Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Upgrade python minimum to 3.8 and tidy up packaging #604

Merged
merged 9 commits into from
Mar 30, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 3 additions & 9 deletions .ci_helpers/docker/setup-services.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,7 @@

def parse_args():
parser = argparse.ArgumentParser(description="Setup services for testing")
parser.add_argument(
"--deploy", action="store_true", help="Flag to setup docker services"
)
parser.add_argument("--deploy", action="store_true", help="Flag to setup docker services")
parser.add_argument(
"--tear-down",
action="store_true",
Expand All @@ -36,9 +34,7 @@ def parse_args():
sys.exit(1)

if not any([args.deploy, args.tear_down]):
print(
"Please provide either --deploy or --tear-down flags. For more help use --help flag."
)
print("Please provide either --deploy or --tear-down flags. For more help use --help flag.")
sys.exit(0)

if args.deploy:
Expand All @@ -48,9 +44,7 @@ def parse_args():
os.system(f"docker-compose -f {COMPOSE_FILE} pull")

print("3) Bringing up services.")
os.system(
f"docker-compose -f {COMPOSE_FILE} up -d --remove-orphans --force-recreate"
)
os.system(f"docker-compose -f {COMPOSE_FILE} up -d --remove-orphans --force-recreate")

print(f"4) Deleting old test folder at {TEST_DATA_PATH}")
if TEST_DATA_PATH.exists():
Expand Down
6 changes: 2 additions & 4 deletions .ci_helpers/py3.10.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,14 @@ channels:
dependencies:
- python=3.10
- dask
- matplotlib
- netCDF4
- numpy
- pynmea2
- pytz
- scipy
- xarray
- zarr
- fsspec
- requests
- aiohttp
- s3fs
- matplotlib-base
- cmocean
- mamba=0.20.0
19 changes: 0 additions & 19 deletions .ci_helpers/py3.7.yaml

This file was deleted.

6 changes: 2 additions & 4 deletions .ci_helpers/py3.8.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,14 @@ channels:
dependencies:
- python=3.8
- dask
- matplotlib
- netCDF4
- numpy
- pynmea2
- pytz
- scipy
- xarray
- zarr
- fsspec
- requests
- aiohttp
- s3fs
- matplotlib-base
- cmocean
- mamba=0.20.0
6 changes: 2 additions & 4 deletions .ci_helpers/py3.9.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,14 @@ channels:
dependencies:
- python=3.9
- dask
- matplotlib
- netCDF4
- numpy
- pynmea2
- pytz
- scipy
- xarray
- zarr
- fsspec
- requests
- aiohttp
- s3fs
- matplotlib-base
- cmocean
- mamba=0.20.0
4 changes: 1 addition & 3 deletions .ci_helpers/run-test.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,7 @@
default="",
help="Comma separated list of changed files.",
)
parser.add_argument(
"--pytest-args", type=str, help="Optional pytest args", default=""
)
parser.add_argument("--pytest-args", type=str, help="Optional pytest args", default="")
parser.add_argument(
"--local",
action="store_true",
Expand Down
5 changes: 5 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
[flake8]
max-line-length = 100
output-file = flake8_log.txt
tee = True
ignore = E722,E203,W503,T001
6 changes: 2 additions & 4 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,10 @@ jobs:
fail-fast: false
matrix:
include:
- python-version: 3.7
experimental: false
- python-version: 3.8
experimental: false
- python-version: 3.9
experimental: true
experimental: false
- python-version: "3.10"
experimental: true
services:
Expand Down Expand Up @@ -91,7 +89,7 @@ jobs:
- name: Install echopype
shell: bash -l {0}
run: |
python -m pip install -e .
python -m pip install -e .[plot]
- name: Running All Tests
shell: bash -l {0}
run: |
Expand Down
6 changes: 2 additions & 4 deletions .github/workflows/pr.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,10 @@ jobs:
fail-fast: false
matrix:
include:
- python-version: 3.7
experimental: false
- python-version: 3.8
experimental: false
- python-version: 3.9
experimental: true
experimental: false
- python-version: "3.10"
experimental: true
services:
Expand Down Expand Up @@ -87,7 +85,7 @@ jobs:
- name: Install echopype
shell: bash -l {0}
run: |
python -m pip install -e .
python -m pip install -e .[plot]
- name: Finding changed files
id: files
uses: lsetiawan/get-changed-files@pr_target
Expand Down
3 changes: 1 addition & 2 deletions .readthedocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,8 @@ formats: []

# Optionally set the version of Python and requirements required to build your docs
python:
version: 3.7
version: "3.8"
install:
- requirements: requirements.txt
- requirements: docs/requirements.txt
- method: pip
path: .
Expand Down
6 changes: 4 additions & 2 deletions docs/source/contributing.rst
Original file line number Diff line number Diff line change
Expand Up @@ -76,12 +76,14 @@ Create a `conda <https://docs.conda.io>`_ environment for echopype development

.. code-block:: bash

conda create -c conda-forge -n echopype --yes python=3.9 --file requirements.txt --file requirements-dev.txt
conda create -c conda-forge -n echopype --yes python=3.9 --file requirements-dev.txt
conda activate echopype
# ipykernel is recommended, in order to use with JupyterLab and IPython
# to aid with development. We recommend you install JupyterLab separately
conda install -c conda-forge ipykernel
pip install -e .
# plot is an extra set of requirements that can be used for plotting.
# the command will install all the dependencies along with plotting dependencies.
pip install -e .[plot]

See the :doc:`installation` page to simply install the latest echopype release from conda or PyPI.

Expand Down
8 changes: 2 additions & 6 deletions echopype/calibrate/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,7 @@ def _compute_cal(
# Check on waveform_mode and encode_mode inputs
if echodata.sonar_model == "EK80":
if waveform_mode is None or encode_mode is None:
raise ValueError(
"waveform_mode and encode_mode must be specified for EK80 calibration"
)
raise ValueError("waveform_mode and encode_mode must be specified for EK80 calibration")
elif waveform_mode not in ("BB", "CW"):
raise ValueError("Input waveform_mode not recognized!")
elif encode_mode not in ("complex", "power"):
Expand Down Expand Up @@ -54,9 +52,7 @@ def _compute_cal(
# Perform calibration
if cal_type == "Sv":

sv_dataset = cal_obj.compute_Sv(
waveform_mode=waveform_mode, encode_mode=encode_mode
)
sv_dataset = cal_obj.compute_Sv(waveform_mode=waveform_mode, encode_mode=encode_mode)

if "water_level" in echodata.platform.data_vars.keys():
# add water_level to the created xr.Dataset
Expand Down
16 changes: 4 additions & 12 deletions echopype/calibrate/calibrate_azfp.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,7 @@ def get_cal_params(self, cal_params):
# Params from the Beam group
for p in ["EL", "DS", "TVR", "VTX", "Sv_offset", "equivalent_beam_angle"]:
# substitute if None in user input
self.cal_params[p] = (
cal_params[p] if p in cal_params else self.echodata.beam[p]
)
self.cal_params[p] = cal_params[p] if p in cal_params else self.echodata.beam[p]

def get_env_params(self):
"""Get env params using user inputs or values from data file.
Expand All @@ -51,9 +49,7 @@ def get_env_params(self):

# Salinity and pressure always come from user input
if ("salinity" not in self.env_params) or ("pressure" not in self.env_params):
raise ReferenceError(
"Please supply both salinity and pressure in env_params."
)
raise ReferenceError("Please supply both salinity and pressure in env_params.")
else:
self.env_params["salinity"] = self.env_params["salinity"]
self.env_params["pressure"] = self.env_params["pressure"]
Expand Down Expand Up @@ -84,9 +80,7 @@ def compute_range_meter(self, cal_type):
'Sv' for calculating volume backscattering strength, or
'Sp' for calculating point backscattering strength
"""
self.range_meter = self.echodata.compute_range(
self.env_params, azfp_cal_type=cal_type
)
self.range_meter = self.echodata.compute_range(self.env_params, azfp_cal_type=cal_type)

def _cal_power(self, cal_type, **kwargs):
"""Calibrate to get volume backscattering strength (Sv) from AZFP power data.
Expand All @@ -112,9 +106,7 @@ def _cal_power(self, cal_type, **kwargs):
# scaling factor (slope) in Fig.G-1, units Volts/dB], see p.84
a = self.cal_params["DS"]
EL = (
self.cal_params["EL"]
- 2.5 / a
+ self.echodata.beam.backscatter_r / (26214 * a)
self.cal_params["EL"] - 2.5 / a + self.echodata.beam.backscatter_r / (26214 * a)
) # eq.(5)

if cal_type == "Sv":
Expand Down
37 changes: 9 additions & 28 deletions echopype/calibrate/calibrate_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,9 +83,7 @@ def __init__(
>>> echopype.calibrate.compute_Sv(echodata, env_params=env_params)
""" # noqa
if interp_method not in VALID_INTERP_METHODS[data_kind]:
raise ValueError(
f"invalid interp_method {interp_method} for data_kind {data_kind}"
)
raise ValueError(f"invalid interp_method {interp_method} for data_kind {data_kind}")

self.env_params = env_params
self.data_kind = data_kind
Expand All @@ -112,13 +110,9 @@ def _apply(self, echodata) -> Dict[str, xr.DataArray]:

if self.data_kind == "mobile":
if np.isnan(echodata.platform["location_time"]).all():
raise ValueError(
"cannot perform mobile interpolation without location_time"
)
raise ValueError("cannot perform mobile interpolation without location_time")
# compute_range needs indexing by ping_time
interp_plat = echodata.platform.interp(
{"location_time": echodata.beam["ping_time"]}
)
interp_plat = echodata.platform.interp({"location_time": echodata.beam["ping_time"]})

result = {}
for var, values in env_params.data_vars.items():
Expand All @@ -132,18 +126,15 @@ def _apply(self, echodata) -> Dict[str, xr.DataArray]:
interp_plat["longitude"].data,
)
)
interp = scipy.interpolate.griddata(
points, values, xi, method=self.interp_method
)
interp = scipy.interpolate.griddata(points, values, xi, method=self.interp_method)
result[var] = ("ping_time", interp)
env_params = xr.Dataset(
data_vars=result, coords={"ping_time": interp_plat["ping_time"]}
)
else:
# TODO: organized case
min_max = {
dim: {"min": env_params[dim].min(), "max": env_params[dim].max()}
for dim in dims
dim: {"min": env_params[dim].min(), "max": env_params[dim].max()} for dim in dims
}

extrap = env_params.interp(
Expand All @@ -153,25 +144,18 @@ def _apply(self, echodata) -> Dict[str, xr.DataArray]:
kwargs={"fill_value": "extrapolate" if len(dims) == 1 else None},
)
# only keep unique indexes; xarray requires that indexes be unique
extrap_unique_idx = {
dim: np.unique(extrap[dim], return_index=True)[1] for dim in dims
}
extrap_unique_idx = {dim: np.unique(extrap[dim], return_index=True)[1] for dim in dims}
extrap = extrap.isel(**extrap_unique_idx)
interp = env_params.interp(
{dim: echodata.platform[dim].data for dim in dims},
method=self.interp_method,
)
interp_unique_idx = {
dim: np.unique(interp[dim], return_index=True)[1] for dim in dims
}
interp_unique_idx = {dim: np.unique(interp[dim], return_index=True)[1] for dim in dims}
interp = interp.isel(**interp_unique_idx)

if self.extrap_method is not None:
less = extrap.sel(
{
dim: extrap[dim][extrap[dim] < min_max[dim]["min"]]
for dim in dims
}
{dim: extrap[dim][extrap[dim] < min_max[dim]["min"]] for dim in dims}
)
middle = interp.sel(
{
Expand All @@ -185,10 +169,7 @@ def _apply(self, echodata) -> Dict[str, xr.DataArray]:
}
)
greater = extrap.sel(
{
dim: extrap[dim][extrap[dim] > min_max[dim]["max"]]
for dim in dims
}
{dim: extrap[dim][extrap[dim] > min_max[dim]["max"]] for dim in dims}
)

# remove empty datasets (xarray does not allow any dims from any datasets
Expand Down
Loading