From a77b685c945ce16e9b39d06dda1797960b219104 Mon Sep 17 00:00:00 2001 From: Matt Fisher Date: Sun, 11 Aug 2024 18:07:22 -0600 Subject: [PATCH] Autofix flake8 ignores E711, E712, E714, F401, F841 with Ruff (#542) --- .flake8 | 22 ++++----- .pre-commit-config.yaml | 13 ++--- icepyx/core/auth.py | 1 - icepyx/core/granules.py | 2 +- icepyx/core/is2ref.py | 2 +- icepyx/core/query.py | 13 ++--- icepyx/core/read.py | 2 +- icepyx/core/spatial.py | 72 ++++++++++++++-------------- icepyx/core/validate_inputs.py | 3 -- icepyx/core/variables.py | 52 ++++++++++---------- icepyx/core/visualization.py | 2 +- icepyx/quest/dataset_scripts/argo.py | 14 +++--- icepyx/quest/quest.py | 2 - icepyx/tests/is2class_query.py | 2 - icepyx/tests/test_Earthdata.py | 1 - icepyx/tests/test_auth.py | 1 - icepyx/tests/test_quest.py | 1 - icepyx/tests/test_quest_argo.py | 2 +- icepyx/tests/test_spatial.py | 41 +++++++--------- icepyx/tests/test_temporal.py | 19 ++++---- icepyx/tests/test_visualization.py | 1 - 21 files changed, 122 insertions(+), 146 deletions(-) diff --git a/.flake8 b/.flake8 index 97a126574..bacc40964 100644 --- a/.flake8 +++ b/.flake8 @@ -8,32 +8,28 @@ per-file-ignores = test_granules.py:E501 # imported but unused __init__.py:F401 - # import not at top of file - doc/source/conf.py:E402 + # import not at top of file, imported but unused + doc/source/conf.py:E402,F401 -# GOAL: remove these ignores ignore = # line too long + # NOTE: This is a formatting concern. Black handles long lines of code, but + # allows inline comments to be infinitely long (automatically formatting + # them can have unintended consequences). In our codebase, we have a lot of + # overlong comments. + # See: https://github.com/psf/black/issues/1713#issuecomment-1357045092 E501 - # comparison syntax - E711 - # comparison syntax - E712 - # comparison syntax in tests - E714 + # GOAL: remove ignores below this line # comparison syntax in tests E721 # bare except E722 # ambiguous var name E741 - # imported but unused - F401 # unable to detect undefined names F403 - # assigned and unused (in tests) - F841 # line break before binary operator + # NOTE: This is a formatting concern W503 # GOAL: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3b0674938..eb801b50d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,13 +1,14 @@ repos: -- repo: https://github.com/psf/black - rev: 24.8.0 - hooks: +- repo: https://github.com/psf/black + rev: 24.8.0 + hooks: - id: black -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 # Use the ref you want to point at - hooks: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 # Use the ref you want to point at + hooks: - id: check-toml + - id: check-yaml - id: check-added-large-files args: ["--maxkb=5000"] - id: end-of-file-fixer diff --git a/icepyx/core/auth.py b/icepyx/core/auth.py index ba07ac398..55e34fd7f 100644 --- a/icepyx/core/auth.py +++ b/icepyx/core/auth.py @@ -1,6 +1,5 @@ import copy import datetime -import warnings import earthaccess from icepyx.core.exceptions import DeprecationError diff --git a/icepyx/core/granules.py b/icepyx/core/granules.py index 5c298c625..205149f56 100644 --- a/icepyx/core/granules.py +++ b/icepyx/core/granules.py @@ -351,7 +351,7 @@ def place_order( # DevGoal: use the request response/number to do some error handling/ # give the user better messaging for failures # print(request.content) - root = ET.fromstring(request.content) + # root = ET.fromstring(request.content) # print([subset_agent.attrib for subset_agent in root.iter('SubsetAgent')]) if verbose is True: diff --git a/icepyx/core/is2ref.py b/icepyx/core/is2ref.py index 86888547b..38561168a 100644 --- a/icepyx/core/is2ref.py +++ b/icepyx/core/is2ref.py @@ -159,7 +159,7 @@ def get_varlist(elem): get_varlist(root) vars_vals = [ - v.replace(":", "/") if v.startswith("/") == False else v.replace("/:", "") + v.replace(":", "/") if v.startswith("/") is False else v.replace("/:", "") for v in vars_raw ] cust_options.update({"variables": vars_vals}) diff --git a/icepyx/core/query.py b/icepyx/core/query.py index dce3c1c34..a57806501 100644 --- a/icepyx/core/query.py +++ b/icepyx/core/query.py @@ -1,6 +1,5 @@ import geopandas as gpd import matplotlib.pyplot as plt -from pathlib import Path # used in docstring tests import pprint import icepyx.core.APIformatting as apifmt @@ -103,9 +102,10 @@ class GenQuery: Initializing Query with a geospatial polygon file. - >>> aoi = str(Path('./doc/source/example_notebooks/supporting_files/simple_test_poly.gpkg').resolve()) + >>> from pathlib import Path + >>> aoi = Path('./doc/source/example_notebooks/supporting_files/simple_test_poly.gpkg').resolve() >>> reg_a_dates = ['2019-02-22','2019-02-28'] - >>> reg_a = GenQuery(aoi, reg_a_dates) + >>> reg_a = GenQuery(str(aoi), reg_a_dates) >>> print(reg_a) Extent type: polygon Coordinates: [-55.0, 68.0, -55.0, 71.0, -48.0, 71.0, -48.0, 68.0, -55.0, 68.0] @@ -378,9 +378,10 @@ class Query(GenQuery, EarthdataAuthMixin): Initializing Query with a geospatial polygon file. - >>> aoi = str(Path('./doc/source/example_notebooks/supporting_files/simple_test_poly.gpkg').resolve()) + >>> from pathlib import Path + >>> aoi = Path('./doc/source/example_notebooks/supporting_files/simple_test_poly.gpkg').resolve() >>> reg_a_dates = ['2019-02-22','2019-02-28'] - >>> reg_a = Query('ATL06', aoi, reg_a_dates) + >>> reg_a = Query('ATL06', str(aoi), reg_a_dates) >>> print(reg_a) Product ATL06 v006 ('polygon', [-55.0, 68.0, -55.0, 71.0, -48.0, 71.0, -48.0, 68.0, -55.0, 68.0]) @@ -1132,7 +1133,7 @@ def visualize_spatial_extent( gdf = self._spatial.extent_as_gdf try: - from shapely.geometry import Polygon + from shapely.geometry import Polygon # noqa: F401 import geoviews as gv gv.extension("bokeh") diff --git a/icepyx/core/read.py b/icepyx/core/read.py index 2e4e03ade..e6bfc40b7 100644 --- a/icepyx/core/read.py +++ b/icepyx/core/read.py @@ -5,7 +5,6 @@ import earthaccess import numpy as np -from s3fs.core import S3File import xarray as xr from icepyx.core.auth import EarthdataAuthMixin @@ -634,6 +633,7 @@ def load(self): ) # wanted_groups, vgrp.keys())) # Closing the file prevents further operations on the dataset + # from s3fs.core import S3File # if isinstance(file, S3File): # file.close() diff --git a/icepyx/core/spatial.py b/icepyx/core/spatial.py index 721c949ab..d810726b3 100644 --- a/icepyx/core/spatial.py +++ b/icepyx/core/spatial.py @@ -1,12 +1,10 @@ import geopandas as gpd import numpy as np import os -from pathlib import Path from shapely.geometry import box, Polygon from shapely.geometry.polygon import orient import warnings -import icepyx.core.APIformatting as apifmt # DevGoal: need to update the spatial_extent docstring to describe coordinate order for input @@ -62,7 +60,7 @@ def geodataframe(extent_type, spatial_extent, file=False, xdateline=None): # print("this should cross the dateline:" + str(xdateline)) if extent_type == "bounding_box": - if xdateline == True: + if xdateline is True: cartesian_lons = [i if i > 0 else i + 360 for i in spatial_extent[0:-1:2]] cartesian_spatial_extent = [ item @@ -79,14 +77,14 @@ def geodataframe(extent_type, spatial_extent, file=False, xdateline=None): # DevGoal: Currently this if/else within this elif are not tested... # DevGoal: the crs setting and management needs to be improved - elif extent_type == "polygon" and file == False: + elif extent_type == "polygon" and file is False: # if spatial_extent is already a Polygon if isinstance(spatial_extent, Polygon): spatial_extent_geom = spatial_extent # else, spatial_extent must be a list of floats (or list of tuples of floats) else: - if xdateline == True: + if xdateline is True: cartesian_lons = [ i if i > 0 else i + 360 for i in spatial_extent[0:-1:2] ] @@ -109,7 +107,7 @@ def geodataframe(extent_type, spatial_extent, file=False, xdateline=None): # If extent_type is a polygon AND from a file, create a geopandas geodataframe from it # DevGoal: Currently this elif isn't tested... - elif extent_type == "polygon" and file == True: + elif extent_type == "polygon" and file is True: gdf = gpd.read_file(spatial_extent) else: @@ -397,37 +395,38 @@ def __init__(self, spatial_extent, **kwarg): Optional keyword argument to let user specify whether the spatial input crosses the dateline or not. - See Also - -------- - icepyx.Query - - - Examples - -------- - Initializing Spatial with a bounding box. - - >>> reg_a_bbox = [-55, 68, -48, 71] - >>> reg_a = Spatial(reg_a_bbox) - >>> print(reg_a) - Extent type: bounding_box - Coordinates: [-55.0, 68.0, -48.0, 71.0] - - Initializing Query with a list of polygon vertex coordinate pairs. - - >>> reg_a_poly = [(-55, 68), (-55, 71), (-48, 71), (-48, 68), (-55, 68)] - >>> reg_a = Spatial(reg_a_poly) - >>> print(reg_a) - Extent type: polygon - Coordinates: [-55.0, 68.0, -55.0, 71.0, -48.0, 71.0, -48.0, 68.0, -55.0, 68.0] + See Also + -------- + icepyx.Query - Initializing Query with a geospatial polygon file. - >>> aoi = str(Path('./doc/source/example_notebooks/supporting_files/simple_test_poly.gpkg').resolve()) - >>> reg_a = Spatial(aoi) - >>> print(reg_a) # doctest: +SKIP - Extent Type: polygon - Source file: ./doc/source/example_notebooks/supporting_files/simple_test_poly.gpkg - Coordinates: [-55.0, 68.0, -55.0, 71.0, -48.0, 71.0, -48.0, 68.0, -55.0, 68.0] + Examples + -------- + Initializing Spatial with a bounding box. + + >>> reg_a_bbox = [-55, 68, -48, 71] + >>> reg_a = Spatial(reg_a_bbox) + >>> print(reg_a) + Extent type: bounding_box + Coordinates: [-55.0, 68.0, -48.0, 71.0] + + Initializing Query with a list of polygon vertex coordinate pairs. + + >>> reg_a_poly = [(-55, 68), (-55, 71), (-48, 71), (-48, 68), (-55, 68)] + >>> reg_a = Spatial(reg_a_poly) + >>> print(reg_a) + Extent type: polygon + Coordinates: [-55.0, 68.0, -55.0, 71.0, -48.0, 71.0, -48.0, 68.0, -55.0, 68.0] + + Initializing Query with a geospatial polygon file. + + >>> from pathlib import Path + >>> aoi = Path('./doc/source/example_notebooks/supporting_files/simple_test_poly.gpkg').resolve() + >>> reg_a = Spatial(str(aoi)) + >>> print(reg_a) # doctest: +SKIP + Extent Type: polygon + Source file: ./doc/source/example_notebooks/supporting_files/simple_test_poly.gpkg + Coordinates: [-55.0, 68.0, -55.0, 71.0, -48.0, 71.0, -48.0, 68.0, -55.0, 68.0] """ scalar_types = (int, float, np.int64) @@ -590,6 +589,7 @@ def extent_file(self): >>> reg_a.extent_file + >>> from pathlib import Path >>> reg_a = Spatial(str(Path('./doc/source/example_notebooks/supporting_files/simple_test_poly.gpkg').resolve())) >>> reg_a.extent_file # doctest: +SKIP ./doc/source/example_notebooks/supporting_files/simple_test_poly.gpkg @@ -643,7 +643,7 @@ def fmt_for_CMR(self): extent = [float(i) for i in polygon] # TODO: explore how this will be impacted if the polygon is read in from a shapefile and crosses the dateline - if hasattr(self, "_xdateln") and self._xdateln == True: + if hasattr(self, "_xdateln") and self._xdateln is True: neg_lons = [i if i < 181.0 else i - 360 for i in extent[0:-1:2]] extent = [item for pair in zip(neg_lons, extent[1::2]) for item in pair] diff --git a/icepyx/core/validate_inputs.py b/icepyx/core/validate_inputs.py index a69f045fb..e70a8f944 100644 --- a/icepyx/core/validate_inputs.py +++ b/icepyx/core/validate_inputs.py @@ -1,10 +1,7 @@ import datetime as dt -import os import warnings import numpy as np -import icepyx.core.APIformatting as apifmt - def prod_version(latest_vers, version): """ diff --git a/icepyx/core/variables.py b/icepyx/core/variables.py index 15d5268e5..87c2a94e8 100644 --- a/icepyx/core/variables.py +++ b/icepyx/core/variables.py @@ -1,12 +1,10 @@ import numpy as np import os -import pprint from icepyx.core.auth import EarthdataAuthMixin import icepyx.core.is2ref as is2ref from icepyx.core.exceptions import DeprecationError import icepyx.core.validate_inputs as val -import icepyx.core as ipxc # DEVGOAL: use h5py to simplify some of these tasks, if possible! @@ -145,7 +143,7 @@ def avail(self, options=False, internal=False): 'quality_assessment/gt3r/signal_selection_source_fraction_3'] """ - if not hasattr(self, "_avail") or self._avail == None: + if not hasattr(self, "_avail") or self._avail is None: if not hasattr(self, "path") or self.path.startswith("s3"): self._avail = is2ref._get_custom_options( self.session, self.product, self.version @@ -167,15 +165,15 @@ def visitor_func(name, node): with h5py.File(self.path, "r") as h5f: h5f.visititems(visitor_func) - if options == True: + if options is True: vgrp, paths = self.parse_var_list(self._avail) allpaths = [] [allpaths.extend(np.unique(np.array(paths[p]))) for p in range(len(paths))] allpaths = np.unique(allpaths) - if internal == False: + if internal is False: print("var_list inputs: " + ", ".join(vgrp.keys())) print("keyword_list and beam_list inputs: " + ", ".join(allpaths)) - elif internal == True: + elif internal is True: return vgrp, allpaths else: return self._avail @@ -259,12 +257,12 @@ def parse_var_list(varlist, tiered=True, tiered_vars=False): # create a dictionary of variable names and paths vgrp = {} - if tiered == False: + if tiered is False: paths = [] else: num = np.max([v.count("/") for v in varlist]) # print('max needed: ' + str(num)) - if tiered_vars == True: + if tiered_vars is True: paths = [[] for i in range(num + 1)] else: paths = [[] for i in range(num)] @@ -279,7 +277,7 @@ def parse_var_list(varlist, tiered=True, tiered_vars=False): vgrp[vkey].append(vn) if vpath: - if tiered == False: + if tiered is False: paths.append(vpath) else: j = 0 @@ -289,7 +287,7 @@ def parse_var_list(varlist, tiered=True, tiered_vars=False): for i in range(j, num): paths[i].append("none") i = i + 1 - if tiered_vars == True: + if tiered_vars is True: paths[num].append(vkey) return vgrp, paths @@ -363,7 +361,7 @@ def _get_sum_varlist(self, var_list, all_vars, defaults): Get the list of variables to add or iterate through, depending on function inputs. """ sum_varlist = [] - if defaults == True: + if defaults is True: sum_varlist = sum_varlist + is2ref._default_varlists(self.product) if var_list is not None: for vn in var_list: @@ -380,9 +378,9 @@ def _get_combined_list(beam_list, keyword_list): Get the combined list of beams and/or keywords to add or iterate through. """ combined_list = [] - if beam_list == None: + if beam_list is None: combined_list = keyword_list - elif keyword_list == None: + elif keyword_list is None: combined_list = beam_list else: combined_list = keyword_list + beam_list @@ -485,10 +483,10 @@ def append(self, defaults=False, var_list=None, beam_list=None, keyword_list=Non """ assert not ( - defaults == False - and var_list == None - and beam_list == None - and keyword_list == None + defaults is False + and var_list is None + and beam_list is None + and keyword_list is None ), "You must enter parameters to add to a variable subset list. If you do not want to subset by variable, ensure your is2.subsetparams dictionary does not contain the key 'Coverage'." final_vars = {} @@ -497,7 +495,7 @@ def append(self, defaults=False, var_list=None, beam_list=None, keyword_list=Non self._check_valid_lists(vgrp, allpaths, var_list, beam_list, keyword_list) # Instantiate self.wanted to an empty dictionary if it doesn't exist - if not hasattr(self, "wanted") or self.wanted == None: + if not hasattr(self, "wanted") or self.wanted is None: self.wanted = {} # DEVGOAL: add a secondary var list to include uncertainty/error information for lower level data if specific data variables have been specified... @@ -506,7 +504,7 @@ def append(self, defaults=False, var_list=None, beam_list=None, keyword_list=Non sum_varlist = self._get_sum_varlist(var_list, vgrp.keys(), defaults) # Case only variables (but not keywords or beams) are specified - if beam_list == None and keyword_list == None: + if beam_list is None and keyword_list is None: final_vars.update(self._iter_vars(sum_varlist, final_vars, vgrp)) # Case a beam and/or keyword list is specified (with or without variables) @@ -577,16 +575,16 @@ def remove(self, all=False, var_list=None, beam_list=None, keyword_list=None): >>> reg_a.order_vars.remove(keyword_list=['ancillary_data']) # doctest: +SKIP """ - if not hasattr(self, "wanted") or self.wanted == None: + if not hasattr(self, "wanted") or self.wanted is None: raise ValueError( "You must construct a wanted variable list in order to remove values from it." ) assert not ( - all == False - and var_list == None - and beam_list == None - and keyword_list == None + all is False + and var_list is None + and beam_list is None + and keyword_list is None ), "You must specify which variables/paths/beams you would like to remove from your wanted list." # if not hasattr(self, 'avail'): self.get_avail() @@ -598,7 +596,7 @@ def remove(self, all=False, var_list=None, beam_list=None, keyword_list=None): # self._check_valid_lists(vgrp, allpaths, var_list, beam_list, keyword_list) - if all == True: + if all is True: try: self.wanted = None except NameError: @@ -606,7 +604,7 @@ def remove(self, all=False, var_list=None, beam_list=None, keyword_list=None): else: # Case only variables (but not keywords or beams) are specified - if beam_list == None and keyword_list == None: + if beam_list is None and keyword_list is None: for vn in var_list: try: del self.wanted[vn] @@ -617,7 +615,7 @@ def remove(self, all=False, var_list=None, beam_list=None, keyword_list=None): # Case a beam and/or keyword list is specified (with or without variables) else: combined_list = self._get_combined_list(beam_list, keyword_list) - if var_list == None: + if var_list is None: var_list = self.wanted.keys() # nec_varlist = ['sc_orient','atlas_sdp_gps_epoch','data_start_utc','data_end_utc', diff --git a/icepyx/core/visualization.py b/icepyx/core/visualization.py index edc10d66d..bdbc6d2d9 100644 --- a/icepyx/core/visualization.py +++ b/icepyx/core/visualization.py @@ -364,7 +364,7 @@ def request_OA_data(self, paras) -> da.array: df_series = df.query(expr="date == @Date").iloc[0] beam_data = df_series.beams - except (NameError, KeyError, IndexError) as error: + except (NameError, KeyError, IndexError): beam_data = None if not beam_data: diff --git a/icepyx/quest/dataset_scripts/argo.py b/icepyx/quest/dataset_scripts/argo.py index 8c614d301..b7f374bd6 100644 --- a/icepyx/quest/dataset_scripts/argo.py +++ b/icepyx/quest/dataset_scripts/argo.py @@ -268,10 +268,10 @@ def search_data(self, params=None, presRange=None, printURL=False) -> str: """ # if search is called with replaced parameters or presRange - if not params is None: + if params is not None: self.params = params - if not presRange is None: + if presRange is not None: self.presRange = presRange # builds URL to be submitted @@ -437,23 +437,23 @@ def download(self, params=None, presRange=None, keep_existing=True) -> pd.DataFr """ # TODO: do some basic testing of this block and how the dataframe merging actually behaves - if keep_existing == False: + if keep_existing is False: print( "Your previously stored data in reg.argodata", "will be deleted before new data is downloaded.", ) self.argodata = None - elif keep_existing == True and hasattr(self, "argodata"): + elif keep_existing is True and hasattr(self, "argodata"): print( "The data requested by running this line of code\n", "will be added to previously downloaded data.", ) # if download is called with replaced parameters or presRange - if not params is None: + if params is not None: self.params = params - if not presRange is None: + if presRange is not None: self.presRange = presRange # Add qc data for each of the parameters requested @@ -482,7 +482,7 @@ def download(self, params=None, presRange=None, keep_existing=True) -> pd.DataFr # now that we have a df from this round of downloads, we can add it to any existing dataframe # note that if a given column has previously been added, update needs to be used to replace nans (merge will not replace the nan values) - if not self.argodata is None: + if self.argodata is not None: self.argodata = self.argodata.merge(merged_df, how="outer") else: self.argodata = merged_df diff --git a/icepyx/quest/quest.py b/icepyx/quest/quest.py index 966b19dca..a7cf9be3c 100644 --- a/icepyx/quest/quest.py +++ b/icepyx/quest/quest.py @@ -1,5 +1,3 @@ -import matplotlib.pyplot as plt - from icepyx.core.query import GenQuery, Query from icepyx.quest.dataset_scripts.argo import Argo diff --git a/icepyx/tests/is2class_query.py b/icepyx/tests/is2class_query.py index 22a10b223..84c31cfa4 100644 --- a/icepyx/tests/is2class_query.py +++ b/icepyx/tests/is2class_query.py @@ -1,6 +1,4 @@ import icepyx as ipx -import pytest -import warnings def test_CMRparams(): diff --git a/icepyx/tests/test_Earthdata.py b/icepyx/tests/test_Earthdata.py index 60b92f621..81093f8ff 100644 --- a/icepyx/tests/test_Earthdata.py +++ b/icepyx/tests/test_Earthdata.py @@ -6,7 +6,6 @@ import os import pytest import shutil -import warnings # PURPOSE: test different authentication methods diff --git a/icepyx/tests/test_auth.py b/icepyx/tests/test_auth.py index c8f8e8f5d..50ae1e6ca 100644 --- a/icepyx/tests/test_auth.py +++ b/icepyx/tests/test_auth.py @@ -4,7 +4,6 @@ import earthaccess from icepyx.core.auth import EarthdataAuthMixin -from icepyx.core.exceptions import DeprecationError @pytest.fixture() diff --git a/icepyx/tests/test_quest.py b/icepyx/tests/test_quest.py index 0ba7325a6..2270bfa8b 100644 --- a/icepyx/tests/test_quest.py +++ b/icepyx/tests/test_quest.py @@ -1,5 +1,4 @@ import pytest -import re import icepyx as ipx from icepyx.quest.quest import Quest diff --git a/icepyx/tests/test_quest_argo.py b/icepyx/tests/test_quest_argo.py index a6940fe7b..fb20a3a47 100644 --- a/icepyx/tests/test_quest_argo.py +++ b/icepyx/tests/test_quest_argo.py @@ -59,7 +59,7 @@ def test_param_setter(argo_quest_instance): reg_a.params = ["temperature", "salinity"] - exp = list(set(["temperature", "salinity"])) + exp = ["temperature", "salinity"] assert reg_a.params == exp diff --git a/icepyx/tests/test_spatial.py b/icepyx/tests/test_spatial.py index 4d6369d9e..dc71cdac9 100644 --- a/icepyx/tests/test_spatial.py +++ b/icepyx/tests/test_spatial.py @@ -1,12 +1,9 @@ -import datetime as dt import geopandas as gpd import numpy as np -import os from pathlib import Path import pytest import re from shapely.geometry import Polygon -import warnings import icepyx.core.spatial as spat @@ -65,62 +62,62 @@ def test_intlist_with0_bbox(): def test_too_few_bbox_points(): with pytest.raises(AssertionError): - too_few_bbox_points = spat.Spatial([-64.2, 66.2, -55.5]) + spat.Spatial([-64.2, 66.2, -55.5]) def test_too_many_bbox_points(): with pytest.raises(AssertionError): - too_many_bbox_points = spat.Spatial([-64.2, 66.2, -55.5, 72.5, 0]) + spat.Spatial([-64.2, 66.2, -55.5, 72.5, 0]) def test_invalid_low_latitude_1_bbox(): with pytest.raises(AssertionError): - low_lat_1_bbox = spat.Spatial([-64.2, -90.2, -55.5, 72.5]) + spat.Spatial([-64.2, -90.2, -55.5, 72.5]) def test_invalid_high_latitude_1_bbox(): with pytest.raises(AssertionError): - high_lat_1_bbox = spat.Spatial([-64.2, 90.2, -55.5, 72.5]) + spat.Spatial([-64.2, 90.2, -55.5, 72.5]) def test_invalid_low_latitude_3_bbox(): with pytest.raises(AssertionError): - low_lat_3_bbox = spat.Spatial([-64.2, 66.2, -55.5, -90.5]) + spat.Spatial([-64.2, 66.2, -55.5, -90.5]) def test_invalid_high_latitude_3_bbox(): with pytest.raises(AssertionError): - high_lat_3_bbox = spat.Spatial([-64.2, 66.2, -55.5, 90.5]) + spat.Spatial([-64.2, 66.2, -55.5, 90.5]) def test_invalid_low_longitude_0_bbox(): with pytest.raises(AssertionError): - low_lon_0_bbox = spat.Spatial([-180.2, 66.2, -55.5, 72.5]) + spat.Spatial([-180.2, 66.2, -55.5, 72.5]) def test_invalid_high_longitude_0_bbox(): with pytest.raises(AssertionError): - high_lon_0_bbox = spat.Spatial([180.2, 66.2, -55.5, 72.5]) + spat.Spatial([180.2, 66.2, -55.5, 72.5]) def test_invalid_low_longitude_2_bbox(): with pytest.raises(AssertionError): - low_lon_2_bbox = spat.Spatial([-64.2, 66.2, -180.5, 72.5]) + spat.Spatial([-64.2, 66.2, -180.5, 72.5]) def test_invalid_high_longitude_2_bbox(): with pytest.raises(AssertionError): - high_lon_2_bbox = spat.Spatial([-64.2, 66.2, 180.5, 72.5]) + spat.Spatial([-64.2, 66.2, 180.5, 72.5]) def test_same_sign_lowleft_gt_upright_latitude_bbox(): with pytest.raises(AssertionError): - lat_ll_gt_ur_ss_bbox = spat.Spatial([-64.2, 72.5, -55.5, 66.2]) + spat.Spatial([-64.2, 72.5, -55.5, 66.2]) def test_bad_values_bbox(): with pytest.raises(ValueError): - bad_input = spat.Spatial(["a", "b", "c", "d"]) + spat.Spatial(["a", "b", "c", "d"]) # ############### END BOUNDING BOX TESTS ################################################################ @@ -287,19 +284,17 @@ def test_numpy_intlist_latlon_coords(): def test_odd_num_lat_long_list_poly_throws_error(): with pytest.raises(AssertionError): - bad_input = spat.Spatial([-55, 68, -55, 71, -48, 71, -48, 68, -55]) + spat.Spatial([-55, 68, -55, 71, -48, 71, -48, 68, -55]) def test_wrong_num_lat_long_tuple_poly_throws_error(): with pytest.raises(ValueError): - bad_input = spat.Spatial( - [(-55, 68, 69), (-55, 71), (-48, 71), (-48, 68), (-55, 68)] - ) + spat.Spatial([(-55, 68, 69), (-55, 71), (-48, 71), (-48, 68), (-55, 68)]) def test_bad_value_types_poly(): with pytest.raises(ValueError): - bad_input = spat.Spatial(["a", "b", "c", "d", "e"]) + spat.Spatial(["a", "b", "c", "d", "e"]) # ###################### Automatically Closed Polygon Tests ########################################################### @@ -378,12 +373,12 @@ def test_poly_file_simple_one_poly(): def test_bad_poly_inputfile_name_throws_error(): with pytest.raises(AssertionError): - bad_input = spat.Spatial("bad_filename.gpkg") + spat.Spatial("bad_filename.gpkg") def test_bad_poly_inputfile_type_throws_error(): with pytest.raises(TypeError): - bad_input = spat.Spatial(str(Path("./icepyx/tests/test_read.py").resolve())) + spat.Spatial(str(Path("./icepyx/tests/test_read.py").resolve())) ########## geodataframe ########## @@ -461,7 +456,7 @@ def test_bbox_not_crosses_dateline(bbox): def test_poly_wrong_input(): with pytest.raises(AssertionError): - tuplelist = spat.check_dateline( + spat.check_dateline( "polygon", [[160, -45], [160, -40], [-170, -39], [-128, -40], [-128, -45], [160, -45]], ) diff --git a/icepyx/tests/test_temporal.py b/icepyx/tests/test_temporal.py index c93b30a38..cd24deda4 100644 --- a/icepyx/tests/test_temporal.py +++ b/icepyx/tests/test_temporal.py @@ -1,8 +1,5 @@ import datetime as dt -import numpy as np import pytest -from shapely.geometry import Polygon -import warnings import icepyx.core.temporal as tp @@ -239,44 +236,44 @@ def test_range_str_yyyydoy_dict_time_start_end(): # (The following inputs are bad, testing to ensure the temporal class handles this elegantly) def test_bad_start_time_type(): with pytest.raises(AssertionError): - bad_start = tp.Temporal(["2016-01-01", "2020-01-01"], 100000, "13:10:01") + tp.Temporal(["2016-01-01", "2020-01-01"], 100000, "13:10:01") def test_bad_end_time_type(): with pytest.raises(AssertionError): - bad_end = tp.Temporal(["2016-01-01", "2020-01-01"], "01:00:00", 131001) + tp.Temporal(["2016-01-01", "2020-01-01"], "01:00:00", 131001) def test_range_bad_list_len(): with pytest.raises(ValueError): - result = tp.Temporal(["2016-01-01", "2020-01-01", "2022-02-15"]) + tp.Temporal(["2016-01-01", "2020-01-01", "2022-02-15"]) def test_range_str_bad_yyyydoy(): with pytest.raises(AssertionError): - bad_end = tp.Temporal(["2016-01-01", "2020-01-01"], "01:00:00", 131001) + tp.Temporal(["2016-01-01", "2020-01-01"], "01:00:00", 131001) def test_range_str_bad_yyyymmdd(): with pytest.raises(AssertionError): - bad_end = tp.Temporal(["2016-01-01", "2020-01-01"], "01:00:00", 131001) + tp.Temporal(["2016-01-01", "2020-01-01"], "01:00:00", 131001) # a "bad dict" is assumed to be one of the wrong length or with the wrong key names def test_bad_dict_keys(): with pytest.raises(ValueError): - result = tp.Temporal({"startdate": "2016-01-01", "enddate": "2020-01-01"}) + tp.Temporal({"startdate": "2016-01-01", "enddate": "2020-01-01"}) def test_bad_dict_length(): with pytest.raises(ValueError): - result = tp.Temporal({"start_date": "2016-01-01"}) + tp.Temporal({"start_date": "2016-01-01"}) # A "bad range" is a range where the start_date > end date def test_range_str_bad_range(): with pytest.raises(AssertionError): - result = tp.Temporal({"start_date": "2020-01-01", "end_date": "2016-01-01"}) + tp.Temporal({"start_date": "2020-01-01", "end_date": "2016-01-01"}) # NOTE: Not testing bad datetime/time inputs because it is assumed the datetime library diff --git a/icepyx/tests/test_visualization.py b/icepyx/tests/test_visualization.py index ede046f0b..403cb21f1 100644 --- a/icepyx/tests/test_visualization.py +++ b/icepyx/tests/test_visualization.py @@ -1,6 +1,5 @@ import pytest -from icepyx.core.visualization import Visualize import icepyx.core.visualization as vis