Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 36 additions & 7 deletions cmr/queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
from urllib.parse import quote

import requests
import warnings
from dateutil.parser import parse as dateutil_parse

CMR_OPS = "https://cmr.earthdata.nasa.gov/search/"
Expand Down Expand Up @@ -662,15 +663,26 @@ def polygon(self, coordinates: Sequence[PointLike]) -> Self:

# convert to floats
as_floats = []
lons = []
for lon, lat in coordinates:
as_floats.extend([float(lon), float(lat)])

f_lon, f_lat = float(lon), float(lat)
as_floats.extend([f_lon, f_lat])
lons.append(f_lon)

# last point must match first point to complete polygon
if as_floats[0] != as_floats[-2] or as_floats[1] != as_floats[-1]:
raise ValueError(
f"Coordinates of the last pair must match the first pair: {coordinates[0]} != {coordinates[-1]}"
)


# Check for longitude span and trigger warning if it exceeds 180 degrees
if (max(lons) - min(lons)) > 180:
warnings.warn(
"The polygon's longitude span is greater than 180 degrees. "
"Please verify if the coordinates are flipped or intended to cross the antimeridian.",
UserWarning,
)

# convert to strings
as_strs = [str(val) for val in as_floats]

Expand All @@ -695,13 +707,30 @@ def bounding_box(
:param upper_right_lat: upper right latitude of the box
:returns: self
"""

ll_lon = float(lower_left_lon)
ll_lat = float(lower_left_lat)
ur_lon = float(upper_right_lon)
ur_lat = float(upper_right_lat)

if ll_lon > ur_lon:
warnings.warn(
f"Coordinates appear to be flipped: lower_left_lon ({ll_lon}) is "
f"greater than upper_right_lon ({ur_lon}). This will result in "
"a bounding box that crosses the antimeridian.",
UserWarning,
)

self.params["bounding_box"] = (
f"{float(lower_left_lon)},{float(lower_left_lat)},{float(upper_right_lon)},{float(upper_right_lat)}"
)
if ll_lat > ur_lat:
warnings.warn(
f"Coordinates appear to be flipped: lower_left_lat ({ll_lat}) is "
f"greater than upper_right_lat ({ur_lat}). Please verify the bounding box order.",
UserWarning,
)

self.params["bounding_box"] = f"{ll_lon},{ll_lat},{ur_lon},{ur_lat}"
return self

Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change

def line(self, coordinates: Sequence[PointLike]) -> Self:
"""
Filter by granules that overlap a series of connected points. Must be used in combination
Expand Down
146 changes: 107 additions & 39 deletions tests/test_granule.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import inspect
import pytest
import os
from datetime import datetime, timezone, timedelta
import json
Expand Down Expand Up @@ -30,7 +31,7 @@ class TestGranuleClass(VCRTestCase): # type: ignore
sort_key = "sort_key"

def _get_vcr_kwargs(self, **kwargs):
kwargs['decode_compressed_response'] = True
kwargs["decode_compressed_response"] = True
return kwargs

def _get_cassette_library_dir(self):
Expand Down Expand Up @@ -86,21 +87,31 @@ def test_circle_set(self):

def test_revision_date(self):
query = GranuleQuery()
granules = query.short_name("SWOT_L2_HR_RiverSP_reach_2.0").revision_date("2024-07-05", "2024-07-05").format(
"umm_json").get_all()
granules = (
query.short_name("SWOT_L2_HR_RiverSP_reach_2.0")
.revision_date("2024-07-05", "2024-07-05")
.format("umm_json")
.get_all()
)
granule_dict = {}
for granule in granules:
granule_json = json.loads(granule)
for item in granule_json["items"]:
native_id = item["meta"]["native-id"]
granule_dict[native_id] = item

self.assertIn("SWOT_L2_HR_RiverSP_Reach_017_312_AS_20240630T042656_20240630T042706_PIC0_01_swot",
granule_dict.keys())
self.assertIn("SWOT_L2_HR_RiverSP_Reach_017_310_SI_20240630T023426_20240630T023433_PIC0_01_swot",
granule_dict.keys())
self.assertIn("SWOT_L2_HR_RiverSP_Reach_017_333_EU_20240630T225156_20240630T225203_PIC0_01_swot",
granule_dict.keys())
self.assertIn(
"SWOT_L2_HR_RiverSP_Reach_017_312_AS_20240630T042656_20240630T042706_PIC0_01_swot",
granule_dict.keys(),
)
self.assertIn(
"SWOT_L2_HR_RiverSP_Reach_017_310_SI_20240630T023426_20240630T023433_PIC0_01_swot",
granule_dict.keys(),
)
self.assertIn(
"SWOT_L2_HR_RiverSP_Reach_017_333_EU_20240630T225156_20240630T225203_PIC0_01_swot",
granule_dict.keys(),
)

def test_temporal_invalid_strings(self):
query = GranuleQuery()
Expand Down Expand Up @@ -129,40 +140,56 @@ def test_temporal_rounding(self):
# one whole year
query.temporal("2016", "2016")
self.assertIn("temporal", query.params)
self.assertEqual(query.params["temporal"][0], "2016-01-01T00:00:00Z,2016-12-31T23:59:59Z")
self.assertEqual(
query.params["temporal"][0], "2016-01-01T00:00:00Z,2016-12-31T23:59:59Z"
)

# one whole month
query.temporal("2016-10", "2016-10")
self.assertEqual(query.params["temporal"][1], "2016-10-01T00:00:00Z,2016-10-31T23:59:59Z")
self.assertEqual(
query.params["temporal"][1], "2016-10-01T00:00:00Z,2016-10-31T23:59:59Z"
)

# one whole day, wrong way
query.temporal("2016-10-10", datetime(2016, 10, 10))
self.assertNotEqual(query.params["temporal"][2], "2016-10-10T00:00:00Z,2016-10-10T23:59:59Z")
self.assertNotEqual(
query.params["temporal"][2], "2016-10-10T00:00:00Z,2016-10-10T23:59:59Z"
)

# one whole day, right way
query.temporal("2016-10-10", datetime(2016, 10, 10).date())
self.assertEqual(query.params["temporal"][3], "2016-10-10T00:00:00Z,2016-10-10T23:59:59Z")
self.assertEqual(
query.params["temporal"][3], "2016-10-10T00:00:00Z,2016-10-10T23:59:59Z"
)

def test_temporal_tz_aware(self):
query = GranuleQuery()

tz = timezone(timedelta(hours=-3))
query.temporal("2016-10-10T00:02:01-03:00", datetime(2016, 10, 10, 0, 2, 1, tzinfo=tz))
query.temporal(
"2016-10-10T00:02:01-03:00", datetime(2016, 10, 10, 0, 2, 1, tzinfo=tz)
)
self.assertIn("temporal", query.params)
self.assertEqual(query.params["temporal"][0], "2016-10-10T03:02:01Z,2016-10-10T03:02:01Z")
self.assertEqual(
query.params["temporal"][0], "2016-10-10T03:02:01Z,2016-10-10T03:02:01Z"
)

def test_temporal_set(self):
query = GranuleQuery()

# both strings
query.temporal("2016-10-10T01:02:03Z", "2016-10-12T09:08:07Z")
self.assertIn("temporal", query.params)
self.assertEqual(query.params["temporal"][0], "2016-10-10T01:02:03Z,2016-10-12T09:08:07Z")
self.assertEqual(
query.params["temporal"][0], "2016-10-10T01:02:03Z,2016-10-12T09:08:07Z"
)

# string and datetime
query.temporal("2016-10-10T01:02:03Z", datetime(2016, 10, 12, 9))
self.assertIn("temporal", query.params)
self.assertEqual(query.params["temporal"][1], "2016-10-10T01:02:03Z,2016-10-12T09:00:00Z")
self.assertEqual(
query.params["temporal"][1], "2016-10-10T01:02:03Z,2016-10-12T09:00:00Z"
)

# string and None
query.temporal(datetime(2016, 10, 12, 10, 55, 7), None)
Expand All @@ -172,12 +199,16 @@ def test_temporal_set(self):
# both datetimes
query.temporal(datetime(2016, 10, 12, 10, 55, 7), datetime(2016, 10, 12, 11))
self.assertIn("temporal", query.params)
self.assertEqual(query.params["temporal"][3], "2016-10-12T10:55:07Z,2016-10-12T11:00:00Z")
self.assertEqual(
query.params["temporal"][3], "2016-10-12T10:55:07Z,2016-10-12T11:00:00Z"
)

def test_temporal_option_set(self):
query = GranuleQuery()

query.temporal("2016-10-10T01:02:03Z", "2016-10-12T09:08:07Z", exclude_boundary=True)
query.temporal(
"2016-10-10T01:02:03Z", "2016-10-12T09:08:07Z", exclude_boundary=True
)
self.assertIn("exclude_boundary", query.options["temporal"])
self.assertEqual(query.options["temporal"]["exclude_boundary"], True)

Expand Down Expand Up @@ -261,24 +292,24 @@ def test_orbit_number_encode(self):

def test_day_night_flag_day_set(self):
query = GranuleQuery()
query.day_night_flag('day')
query.day_night_flag("day")

self.assertIn(self.day_night_flag, query.params)
self.assertEqual(query.params[self.day_night_flag], 'day')
self.assertEqual(query.params[self.day_night_flag], "day")

def test_day_night_flag_night_set(self):
query = GranuleQuery()
query.day_night_flag('night')
query.day_night_flag("night")

self.assertIn(self.day_night_flag, query.params)
self.assertEqual(query.params[self.day_night_flag], 'night')
self.assertEqual(query.params[self.day_night_flag], "night")

def test_day_night_flag_unspecified_set(self):
query = GranuleQuery()
query.day_night_flag('unspecified')
query.day_night_flag("unspecified")

self.assertIn(self.day_night_flag, query.params)
self.assertEqual(query.params[self.day_night_flag], 'unspecified')
self.assertEqual(query.params[self.day_night_flag], "unspecified")

def test_day_night_flag_invalid_set(self):
query = GranuleQuery()
Expand Down Expand Up @@ -452,36 +483,39 @@ def test_valid_spatial_state(self):
self.assertTrue(query._valid_state())

def _test_get(self):
""" Test real query """
"""Test real query"""

query = GranuleQuery()
query.short_name('MCD43A4').version('005')
query.short_name("MCD43A4").version("005")
query.temporal(datetime(2016, 1, 1), datetime(2016, 1, 1))
results = query.get(limit=10)

self.assertEqual(len(results), 10)

def test_stac_output(self):
""" Test real query with STAC output type """
"""Test real query with STAC output type"""
# HLSL30: https://cmr.earthdata.nasa.gov/search/concepts/C2021957657-LPCLOUD
query = GranuleQuery()
search = query.parameters(point=(-105.78, 35.79),
temporal=('2021-02-01', '2021-03-01'),
collection_concept_id='C2021957657-LPCLOUD'
)
search = query.parameters(
point=(-105.78, 35.79),
temporal=("2021-02-01", "2021-03-01"),
collection_concept_id="C2021957657-LPCLOUD",
)
results = search.format("stac").get()
feature_collection = json.loads(results[0])

self.assertEqual(len(results), 1)
self.assertEqual(feature_collection['type'], 'FeatureCollection')
self.assertEqual(feature_collection['numberMatched'], 2)
self.assertEqual(len(feature_collection['features']), 2)
self.assertEqual(feature_collection["type"], "FeatureCollection")
self.assertEqual(feature_collection["numberMatched"], 2)
self.assertEqual(len(feature_collection["features"]), 2)

def _test_hits(self):
""" integration test for hits() """
"""integration test for hits()"""

query = GranuleQuery()
query.short_name("AST_L1T").version("003").temporal("2016-10-26T01:30:00Z", "2016-10-26T01:40:00Z")
query.short_name("AST_L1T").version("003").temporal(
"2016-10-26T01:30:00Z", "2016-10-26T01:40:00Z"
)
hits = query.hits()

self.assertEqual(hits, 3)
Expand Down Expand Up @@ -514,7 +548,17 @@ def test_invalid_parameters(self):

def test_valid_formats(self):
query = GranuleQuery()
formats = ["json", "xml", "echo10", "iso", "iso19115", "csv", "atom", "kml", "native"]
formats = [
"json",
"xml",
"echo10",
"iso",
"iso19115",
"csv",
"atom",
"kml",
"native",
]

for _format in formats:
query.format(_format)
Expand Down Expand Up @@ -548,7 +592,9 @@ def test_valid_concept_id(self):
self.assertEqual(query.params["concept_id"], ["C1299783579-LPDAAC_ECS"])

query.concept_id(["C1299783579-LPDAAC_ECS", "G1441380236-PODAAC"])
self.assertEqual(query.params["concept_id"], ["C1299783579-LPDAAC_ECS", "G1441380236-PODAAC"])
self.assertEqual(
query.params["concept_id"], ["C1299783579-LPDAAC_ECS", "G1441380236-PODAAC"]
)

def test_token(self):
query = GranuleQuery()
Expand All @@ -575,3 +621,25 @@ def test_readable_granule_name(self):

query.readable_granule_name(["*a*", "*b*"])
self.assertEqual(query.params[self.readable_granule_name], ["*a*", "*b*"])
# Asegúrate de que no haya espacios antes de 'def'
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

English, please. Also, please add a blank line above to separate it from the preceding method.

def test_wkt_coordinate_order_warning():
"""
Ensure a warning is raised when coordinates appear to be in the wrong order (span > 180).
"""
# Usamos coordenadas que cruzan el antimeridiano (span > 180)
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

English comments please

flipped_coords = [(170, 10), (-170, 10), (-170, -10), (170, -10), (170, 10)]
query = GranuleQuery()

# Verificamos que se dispare el UserWarning configurado en queries.py
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

English, please.

with pytest.warns(UserWarning, match="longitude span is greater than 180 degrees"):
query.polygon(flipped_coords)

def test_bounding_box_order_warning():
"""
Verify warning for incorrect bounding box coordinate order.
"""
query = GranuleQuery()

# Verificamos la alerta de cruce de antimeridiano definida en queries.py
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

English comments, please.

with pytest.warns(UserWarning, match="crosses the antimeridian"):
query.bounding_box(10, 0, -10, 5)
Comment on lines +625 to +645
Copy link
Copy Markdown
Contributor

@frankinspace frankinspace Apr 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Indentation appears to be incorrect

Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That's based on an earlier comment I made, indicating that these test functions can be top level test functions rather than within the test case class, but that was before I saw that all of the tests in this file are within the class.

I'm fine with these new functions also being methods within the class, but we should probably separate such methods into top level functions (in a separate PR) because most of the test methods in this class do not require vcrpy, so placing them within the class doesn't make sense (although does no harm either).

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ah I see your point. Agreed it can be refactored