Skip to content

Commit

Permalink
Merge pull request #55 from wsavran/master
Browse files Browse the repository at this point in the history
update CNAME and fix issue with datetime filtering
  • Loading branch information
wsavran authored Nov 11, 2020
2 parents e1a7926 + e670296 commit 6c42f67
Show file tree
Hide file tree
Showing 4 changed files with 35 additions and 15 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build-sphinx.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ jobs:
git remote add deploy "https://token:${GITHUB_TOKEN}@github.com/${GITHUB_REPOSITORY}.git"
git checkout -b gh-pages
echo "Empty README.md for documenation cache." > README.md
echo "cseptesting.org" > CNAME
echo "docs.cseptesting.org" > CNAME
git add .
git commit -am "Updating PyCSEP docs for commit ${GITHUB_SHA} made on `date -d"@{SOURCE_DATE_EPOCH} --iso-8601=seconds` from ${GITHUB_REF} by ${GITHUB_ACTOR}"
git push deploy gh-pages --force
Expand Down
20 changes: 10 additions & 10 deletions csep/core/catalogs.py
Original file line number Diff line number Diff line change
Expand Up @@ -501,30 +501,30 @@ def parse_datetime_to_origin_time(dt_input):
if isinstance(statements, str):
name = statements.split(' ')[0]
if name == 'datetime':
name, oper, date, time = statements.split(' ')
_, oper, date, time = statements.split(' ')
name = 'origin_time'
# can be a datetime.datetime object or datetime string, if we want to support filtering on meta data it
# can happen here. but need to determine what to do if entry are not present bc meta data does not
# need to be square
value = strptime_to_utc_epoch(' '.join([date, time]))
idx = numpy.where(operators[oper](self.get_epoch_times(), value))
filtered = self.catalog[idx]
filtered = self.catalog[operators[oper](self.catalog[name], float(value))]
else:
name, oper, value = statements.split(' ')
filtered = self.catalog[operators[oper](self.catalog[name], float(value))]
elif isinstance(statements, (list, tuple)):
# slower but at the convenience of not having to call multiple times
filters = list(statements)
filtered = numpy.copy(self.catalog)
idx = numpy.ones(self.event_count, dtype=numpy.bool)
for filt in filters:
name = filt.split(' ')[0]
# create indexing array, start with all events
if name == 'datetime':
name, oper, date, time = filt.split(' ')
# can be a datetime.datetime object or datetime string, if we want to support filtering on meta data it
# can happen here. but need to determine what to do if entry are not present bc meta data does not
# need to be square
_, oper, date, time = filt.split(' ')
# we map the requested datetime to an epoch time so we act like the user requested origin_time
name = 'origin_time'
value = strptime_to_utc_epoch(' '.join([date, time]))
idx = numpy.where(operators[oper](self.get_epoch_times(), value))
filtered = self.catalog[idx]
filtered = filtered[operators[oper](filtered[name], float(value))]
else:
name, oper, value = filt.split(' ')
filtered = filtered[operators[oper](filtered[name], float(value))]
Expand All @@ -539,7 +539,7 @@ def parse_datetime_to_origin_time(dt_input):
# make and return new object
cls = self.__class__
inst = cls(data=filtered, catalog_id=self.catalog_id, format=self.format, name=self.name,
region=self.region)
region=self.region, filters=statements)
return inst

def filter_spatial(self, region=None, update_stats=False):
Expand Down
3 changes: 3 additions & 0 deletions csep/utils/plots.py
Original file line number Diff line number Diff line change
Expand Up @@ -576,6 +576,9 @@ def plot_spatial_dataset(gridded, region, show=False, plot_args=None):
cmap = plot_args.get('cmap', None)

fig = pyplot.figure(figsize=figsize)

# use different projection for global and regional forecasts
# determine this from the extent of the grid
ax = fig.add_subplot(111, projection=ccrs.PlateCarree())

lons, lats = numpy.meshgrid(region.xs, region.ys)
Expand Down
25 changes: 21 additions & 4 deletions tests/test_catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def test_filter_with_list(self):
test_cat = copy.deepcopy(self.test_cat1)
test_cat.filter(filters)
# Filter together
numpy.array_equal(numpy.array([b'2'], dtype='S256'), test_cat.get_event_ids())
numpy.testing.assert_array_equal(numpy.array([b'2'], dtype='S256'), test_cat.get_event_ids())

def test_filter_separately(self):
# Filter together
Expand All @@ -39,14 +39,31 @@ def test_filter_separately(self):
for i in filters:
test_cat.filter(i)

numpy.array_equal(numpy.array([b'2'], dtype='S256'), test_cat.get_event_ids())
numpy.testing.assert_array_equal(numpy.array([b'2'], dtype='S256'), test_cat.get_event_ids())

def test_filter_with_datetime(self):
def test_filter_with_datetime_list(self):
start_dt = strptime_to_utc_datetime('2009-07-01 00:00:00.0')
end_dt = strptime_to_utc_datetime('2010-07-01 00:00:00.0')
filters = [f'datetime >= {start_dt}', f'datetime < {end_dt}'] # should return only event 2
test_cat = copy.deepcopy(self.test_cat1)
test_cat.filter(filters)
numpy.array_equal(numpy.array([b'2'], dtype='S256'), test_cat.get_event_ids())
numpy.testing.assert_array_equal(numpy.array([b'2'], dtype='S256'), test_cat.get_event_ids())

def test_filter_with_datetime_in_place_list(self):
start_dt = strptime_to_utc_datetime('2009-07-01 00:00:00.0')
end_dt = strptime_to_utc_datetime('2010-07-01 00:00:00.0')
filters = [f'datetime > {start_dt}', f'datetime < {end_dt}'] # should return only event 2
test_cat = copy.deepcopy(self.test_cat1)
test_cat = test_cat.filter(filters, in_place=False)
numpy.testing.assert_equal(numpy.array([b'2'], dtype='S256'), test_cat.get_event_ids())

def test_filter_with_datetime(self):
end_dt = strptime_to_utc_datetime('2010-07-01 00:00:00.0')
filters = f'datetime < {end_dt}' # should return only event 1 and 2
test_cat = copy.deepcopy(self.test_cat1)
filtered_test_cat = test_cat.filter(filters, in_place=False)
numpy.testing.assert_equal(numpy.array([b'1', b'2'], dtype='S256').T, filtered_test_cat.get_event_ids())


if __name__ == '__main__':
unittest.main()

0 comments on commit 6c42f67

Please sign in to comment.