Skip to content

Commit

Permalink
Release 1.7.1, Merge pull request #76 from sentinel-hub/develop
Browse files Browse the repository at this point in the history
Release 1.7.1
  • Loading branch information
zigaLuksic authored Jun 1, 2023
2 parents 7155920 + ec2e12b commit 39de435
Show file tree
Hide file tree
Showing 17 changed files with 268 additions and 374 deletions.
8 changes: 0 additions & 8 deletions .flake8

This file was deleted.

1 change: 1 addition & 0 deletions .github/workflows/ci_action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ jobs:
python-version:
- "3.9"
- "3.10"
- "3.11"
include:
# A flag marks whether full or partial tests should be run
# We don't run integration tests on pull requests from outside repos, because they don't have secrets
Expand Down
35 changes: 6 additions & 29 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,41 +13,18 @@ repos:
- id: debug-statements

- repo: https://github.com/psf/black
rev: 22.12.0
rev: 23.1.0
hooks:
- id: black
language_version: python3

- repo: https://github.com/pycqa/isort
rev: 5.12.0
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: "v0.0.269"
hooks:
- id: isort
name: isort (python)

- repo: https://github.com/PyCQA/autoflake
rev: v2.0.0
hooks:
- id: autoflake
args:
[
--remove-all-unused-imports,
--in-place,
--ignore-init-module-imports,
]

- repo: https://github.com/pycqa/flake8
rev: 6.0.0
hooks:
- id: flake8
additional_dependencies:
- flake8-bugbear
- flake8-comprehensions
- flake8-simplify
- flake8-typing-imports
- id: ruff

- repo: https://github.com/nbQA-dev/nbQA
rev: 1.6.1
rev: 1.6.3
hooks:
- id: nbqa-black
- id: nbqa-isort
- id: nbqa-flake8
- id: nbqa-ruff
7 changes: 0 additions & 7 deletions MANIFEST.in

This file was deleted.

2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ The **s2cloudless** algorithm was part of an international collaborative effort

## Installation

The package requires a Python version >= 3.7. The package is available on
The package requires a Python version >= 3.8. The package is available on
the PyPI package manager and can be installed with

```
Expand Down
84 changes: 49 additions & 35 deletions examples/sentinel2-cloud-detector-example.ipynb

Large diffs are not rendered by default.

156 changes: 137 additions & 19 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,18 +1,142 @@
[build-system]
requires = ['hatchling']
build-backend = 'hatchling.build'

[tool.hatch.version]
path = 's2cloudless/__init__.py'

[tool.hatch.build.targets.sdist]
include = ['/README.md', '/LICENSE.md', '/s2cloudless']

[project]
name = "s2cloudless"
dynamic = ["version"]
description = "Sentinel Hub's cloud detector for Sentinel-2 imagery"
readme = "README.md"
requires-python = ">= 3.8"
license = { file = "LICENSE.md" }
authors = [
{ name = "Sinergise EO research team", email = "[email protected]" },
]
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"Operating System :: MacOS",
"Operating System :: Microsoft :: Windows",
"Operating System :: Unix",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Topic :: Scientific/Engineering",
]
dependencies = [
"lightgbm>=2.0.11",
"numpy>=1.13.3",
"opencv-python-headless",
"sentinelhub>=3.9.0",
"typing_extensions",
]

[project.optional-dependencies]
dev = [
"build",
"mypy",
"pre-commit",
"pylint>=2.14.0",
"pytest>=3.0.0",
"pytest-cov",
"twine",
]

[project.urls]
Homepage = "https://github.com/sentinel-hub/sentinel2-cloud-detector"
Issues = "https://github.com/sentinel-hub/sentinel2-cloud-detector/issues"
Source = "https://github.com/sentinel-hub/sentinel2-cloud-detector"
Forum = "https://forum.sentinel-hub.com"

[tool.black]
line-length = 120
preview = true

[tool.isort]
profile = "black"
known_first_party = "sentinelhub"
known_absolute = "s2cloudless"
sections = ["FUTURE","STDLIB","THIRDPARTY","FIRSTPARTY","ABSOLUTE","LOCALFOLDER"]
line_length = 120
[tool.ruff]
line-length = 120
target-version = "py38"
select = [
"F", # pyflakes
"E", # pycodestyle
"W", # pycodestyle
"C90", # mccabe
"N", # naming
"YTT", # flake-2020
"B", # bugbear
"A", # built-ins
"COM", # commas
"C4", # comprehensions
"T10", # debugger statements
"ISC", # implicit string concatenation
"ICN", # import conventions
"G", # logging format
"PIE", # flake8-pie
"T20", # print statements
"PT", # pytest style
"RET", # returns
"SLF", # private member access
"SIM", # simplifications
"ARG", # unused arguments
"PD", # pandas
"PGH", # pygrep hooks (useless noqa comments, eval statements etc.)
"FLY", # flynt
"RUF", # ruff rules
"NPY", # numpy
"I", # isort
"UP", # pyupgrade
"FA", # checks where future import of annotations would make types nicer
]
fix = true
fixable = [
"I", # sort imports
"F401", # remove redundant imports
"UP007", # use new-style union type annotations
"UP006", # use new-style built-in type annotations
"UP037", # remove quotes around types when not necessary
"FA100", # import future annotations where necessary (not autofixable ATM)
]
ignore = [
"SIM108", # tries to aggresively inline `if`, not always readable
"COM812", # trailing comma missing, fights with black
"PD011", # suggests `.to_numpy` instead of `.values`, also does this for non-pandas objects...
# potentially fixable
"PT011", # complains for `pytest.raises(ValueError)` but we use it a lot
"N803", # clashes with the default naming of model protocols
]
per-file-ignores = { "__init__.py" = ["F401"] }
exclude = [".git", "__pycache__", "build", "dist"]

[tool.nbqa.addopts]
flake8 = [
"--extend-ignore=E402"

[tool.ruff.isort]
section-order = [
"future",
"standard-library",
"third-party",
"our-packages",
"first-party",
"local-folder",
]
known-first-party = ["s2cloudless"]
sections = { our-packages = ["sentinelhub"] }

[tool.nbqa.addopts]
ruff = ["--extend-ignore=E402,T201,B015,B018,NPY002,UP,FA"]
# E402 -> imports on top
# T201 -> print found
# B015 & B018 -> useless expression (used to show values in ipynb)
# NPY002 -> use RNG instead of old numpy.random
# UP -> suggestions for new-style classes (future import might confuse readers)
# FA -> necessary future annotations import

[tool.pylint.format]
max-line-length = 120
Expand All @@ -25,27 +149,21 @@ disable = [
"unsubscriptable-object",
"invalid-unary-operand-type",
"unspecified-encoding",
"unnecessary-ellipsis"
"unnecessary-ellipsis",
]

[tool.pylint.design]
max-args = 10
max-attributes = 20

[tool.pytest.ini_options]
markers = [
"sh_integration: marks integration tests with Sentinel Hub service"
]
markers = ["sh_integration: marks integration tests with Sentinel Hub service"]

[tool.coverage.run]
source = [
"s2cloudless"
]
source = ["s2cloudless"]

[tool.coverage.report]
omit = [
"models/*"
]
omit = ["models/*"]

[tool.mypy]
follow_imports = "normal"
Expand Down
7 changes: 0 additions & 7 deletions requirements-dev.txt

This file was deleted.

6 changes: 0 additions & 6 deletions requirements.txt

This file was deleted.

4 changes: 2 additions & 2 deletions s2cloudless/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@

from .cloud_detector import S2PixelCloudDetector
from .pixel_classifier import PixelClassifier
from .utils import download_bands_and_valid_data_mask, get_s2_evalscript, get_timestamps
from .utils import download_bands_and_valid_data_mask

__version__ = "1.7.0"
__version__ = "1.7.1"
35 changes: 20 additions & 15 deletions s2cloudless/cloud_detector.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
"""Module for pixel-based classification on Sentinel-2 L1C imagery."""
from __future__ import annotations

import os
from typing import Any, Optional
from typing import Any

import cv2
import numpy as np
from lightgbm import Booster
from scipy.ndimage import convolve
from skimage.morphology import dilation, disk

from .pixel_classifier import PixelClassifier
from .utils import MODEL_BAND_IDS
from .utils import MODEL_BAND_IDS, cv2_disk

MODEL_FILENAME = "pixel_s2_cloud_detector_lightGBM_v0.1.txt"

Expand Down Expand Up @@ -42,9 +43,9 @@ def __init__(
self,
threshold: float = 0.4,
all_bands: bool = False,
average_over: Optional[int] = 1,
dilation_size: Optional[int] = 1,
model_filename: Optional[str] = None,
average_over: int | None = 1,
dilation_size: int | None = 1,
model_filename: str | None = None,
):
self.threshold = threshold
self.all_bands = all_bands
Expand All @@ -56,13 +57,14 @@ def __init__(
model_filename = os.path.join(package_dir, "models", MODEL_FILENAME)
self.model_filename = model_filename

self._classifier: Optional[PixelClassifier] = None
self._classifier: PixelClassifier | None = None

if average_over is not None and average_over > 0:
self.conv_filter = disk(average_over) / np.sum(disk(average_over))
disk = cv2_disk(average_over)
self.conv_filter = disk / np.sum(disk)

if dilation_size is not None and dilation_size > 0:
self.dilation_filter = disk(dilation_size)
self.dilation_filter = cv2_disk(dilation_size)

@property
def classifier(self) -> PixelClassifier:
Expand Down Expand Up @@ -118,11 +120,10 @@ def get_cloud_masks(self, data: np.ndarray, **kwargs: Any) -> np.ndarray:
"""
self._check_data_dimension(data, 4)
cloud_probs = self.get_cloud_probability_maps(data, **kwargs)
cloud_masks = self.get_mask_from_prob(cloud_probs)

return cloud_masks
return self.get_mask_from_prob(cloud_probs)

def get_mask_from_prob(self, cloud_probs: np.ndarray, threshold: Optional[float] = None) -> np.ndarray:
def get_mask_from_prob(self, cloud_probs: np.ndarray, threshold: float | None = None) -> np.ndarray:
"""
Returns cloud mask by applying convolution and dilation to cloud probabilities.
Expand All @@ -135,14 +136,18 @@ def get_mask_from_prob(self, cloud_probs: np.ndarray, threshold: Optional[float]

if self.average_over:
cloud_masks = np.asarray(
[convolve(cloud_prob, self.conv_filter) > threshold for cloud_prob in cloud_probs], dtype=np.uint8
[
cv2.filter2D(cloud_prob, -1, self.conv_filter, borderType=cv2.BORDER_REFLECT) > threshold
for cloud_prob in cloud_probs
],
dtype=np.uint8,
)
else:
cloud_masks = (cloud_probs > threshold).astype(np.int8)

if self.dilation_size:
cloud_masks = np.asarray(
[dilation(cloud_mask, self.dilation_filter) for cloud_mask in cloud_masks], dtype=np.uint8
[cv2.dilate(cloud_mask, self.dilation_filter) for cloud_mask in cloud_masks], dtype=np.uint8
)

return cloud_masks
Loading

0 comments on commit 39de435

Please sign in to comment.