Skip to content
This repository has been archived by the owner on Jul 11, 2023. It is now read-only.

Commit

Permalink
Merge pull request #556 from emthompson-usgs/refactor
Browse files Browse the repository at this point in the history
Some initial refactoring
  • Loading branch information
emthompson-usgs authored Dec 17, 2020
2 parents d5f46da + 9e802a8 commit d7988e8
Show file tree
Hide file tree
Showing 153 changed files with 6,916 additions and 861 deletions.
4 changes: 2 additions & 2 deletions docs/developer/readers.md
Original file line number Diff line number Diff line change
Expand Up @@ -93,8 +93,8 @@ import numpy as np
from obspy.core.utcdatetime import UTCDateTime

# local imports
from gmprocess.stationtrace import StationTrace
from gmprocess.stationstream import StationStream
from gmprocess.core.stationtrace import StationTrace
from gmprocess.core.stationstream import StationStream
from gmprocess.io.seedname import get_channel_name, is_channel_north

TEXT_HDR_ROWS = 17
Expand Down
2 changes: 1 addition & 1 deletion docs/scripting/data-structures/stream-collection.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ together.
```python
import glob
from gmprocess.io.read import read_data
from gmprocess.streamcollection import StreamCollection
from gmprocess.core.streamcollection import StreamCollection

# these sample files can be found in the repository
# under gmprocess/data/testdata/knet/us2000cnnl
Expand Down
8 changes: 4 additions & 4 deletions docs/scripting/scripting.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@ processing steps, and the make plots of the processed waveforms.
import os
import pkg_resources

from gmprocess.streamcollection import StreamCollection
from gmprocess.config import get_config
from gmprocess.processing import process_streams
from gmprocess.event import get_event_object
from gmprocess.core.streamcollection import StreamCollection
from gmprocess.utils.config import get_config
from gmprocess.waveform_processing.processing import process_streams
from gmprocess.utils.event import get_event_object

# Path to example data
datapath = os.path.join('data', 'testdata', 'demo', 'ci38457511', 'raw')
Expand Down
8 changes: 4 additions & 4 deletions docs/scripting/waveform-metrics.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@ metrics.
import os
import pkg_resources

from gmprocess.streamcollection import StreamCollection
from gmprocess.config import get_config
from gmprocess.processing import process_streams
from gmprocess.event import get_event_object
from gmprocess.core.streamcollection import StreamCollection
from gmprocess.utils.config import get_config
from gmprocess.waveform_processing.processing import process_streams
from gmprocess.utils.event import get_event_object
from gmprocess.metrics.station_summary import StationSummary

# Path to example data
Expand Down
6 changes: 3 additions & 3 deletions gmprocess/bin/gmconvert.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@
import logging

# local imports
from gmprocess.logging import setup_logger
from gmprocess.args import add_shared_args
from gmprocess.streamcollection import StreamCollection
from gmprocess.utils.logging import setup_logger
from gmprocess.utils.args import add_shared_args
from gmprocess.core.streamcollection import StreamCollection
from gmprocess.io.read import read_data
from gmprocess.io.read_directory import directory_to_streams

Expand Down
5 changes: 3 additions & 2 deletions gmprocess/bin/gminfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@

# local imports
from gmprocess.io.read import _get_format, read_data
from gmprocess.args import add_shared_args
from gmprocess.stationtrace import REV_PROCESS_LEVELS
from gmprocess.utils.args import add_shared_args
from gmprocess.core.stationtrace import REV_PROCESS_LEVELS

COLUMNS = ['Filename', 'Format', 'Process Level',
'Start Time', 'End Time',
Expand Down Expand Up @@ -179,6 +179,7 @@ def main():
# Shared arguments
parser = add_shared_args(parser)
args = parser.parse_args()

if not args.concise and args.save:
msg = '''
****************************************************************
Expand Down
16 changes: 8 additions & 8 deletions gmprocess/bin/gmprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,18 +17,18 @@
import numpy as np

# local imports
from gmprocess.args import add_shared_args
from gmprocess.utils.args import add_shared_args
from gmprocess.io.fetch_utils import (get_events, update_config,
save_shakemap_amps, download,
draw_stations_map)
from gmprocess.logging import setup_logger
from gmprocess.utils.logging import setup_logger
from gmprocess.io.asdf.stream_workspace import StreamWorkspace
from gmprocess.processing import process_streams
from gmprocess.report import build_report_latex
from gmprocess.plot import summary_plots, plot_regression, plot_moveout
from gmprocess.config import get_config
from gmprocess.tables import set_precisions
from gmprocess.constants import DEFAULT_FLOAT_FORMAT, DEFAULT_NA_REP
from gmprocess.waveform_processing.processing import process_streams
from gmprocess.io.report import build_report_latex
from gmprocess.utils.plot import summary_plots, plot_regression, plot_moveout
from gmprocess.utils.config import get_config
from gmprocess.utils.tables import set_precisions
from gmprocess.utils.constants import DEFAULT_FLOAT_FORMAT, DEFAULT_NA_REP

NON_IMT_COLS = set(['EarthquakeId',
'EarthquakeTime',
Expand Down
4 changes: 2 additions & 2 deletions gmprocess/bin/gmsetup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@
import pkg_resources

# local imports
from gmprocess.constants import CONFIG_FILE_PRODUCTION
from gmprocess.args import add_shared_args
from gmprocess.utils.constants import CONFIG_FILE_PRODUCTION
from gmprocess.utils.args import add_shared_args


def main():
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
Response, InstrumentSensitivity)
# local imports
from .stationtrace import StationTrace
from gmprocess.exception import GMProcessException
from gmprocess.utils.exception import GMProcessException

UNITS = {
'acc': 'cm/s/s',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

# local imports
from gmprocess._version import get_versions
from gmprocess.config import get_config
from gmprocess.utils.config import get_config
from gmprocess.io.seedname import get_units_type

UNITS = {'acc': 'cm/s^2',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,12 @@
import pandas as pd
import numpy as np

from gmprocess.exception import GMProcessException
from gmprocess.utils.exception import GMProcessException
from gmprocess.metrics.station_summary import StationSummary
from gmprocess.stationtrace import REV_PROCESS_LEVELS
from gmprocess.stationstream import StationStream
from gmprocess.core.stationtrace import REV_PROCESS_LEVELS
from gmprocess.core.stationstream import StationStream
from gmprocess.io.read_directory import directory_to_streams
from gmprocess.config import get_config
from gmprocess.utils.config import get_config


INDENT = 2
Expand Down
3 changes: 2 additions & 1 deletion gmprocess/io/asdf/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,8 @@ def is_asdf(filename):


def read_asdf(filename, eventid=None, stations=None, label=None):
"""Read Streams of data (complete with processing metadata) from an ASDF file.
"""Read Streams of data (complete with processing metadata) from an ASDF
file.
Args:
filename (str):
Expand Down
46 changes: 26 additions & 20 deletions gmprocess/io/asdf/stream_workspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,13 @@
from mapio.gmt import GMTGrid

# local imports
from gmprocess.stationtrace import (StationTrace, TIMEFMT_MS, NS_SEIS,
_get_person_agent, _get_software_agent)
from gmprocess.stationstream import StationStream
from gmprocess.streamcollection import StreamCollection
from gmprocess.core.stationtrace import (
StationTrace, TIMEFMT_MS, NS_SEIS, _get_person_agent, _get_software_agent)
from gmprocess.core.stationstream import StationStream
from gmprocess.core.streamcollection import StreamCollection
from gmprocess.metrics.station_summary import StationSummary, XML_UNITS
from gmprocess.exception import GMProcessException
from gmprocess.event import ScalarEvent
from gmprocess.utils.exception import GMProcessException
from gmprocess.utils.event import ScalarEvent

TIMEPAT = '[0-9]{4}-[0-9]{2}-[0-9]{2}T'
EVENT_TABLE_COLUMNS = ['id', 'time', 'latitude',
Expand Down Expand Up @@ -139,7 +139,7 @@ def format_netsta(stats):


def format_nslc(stats):
#loc = '' if stats.location == '--' else stats.location
# loc = '' if stats.location == '--' else stats.location
return '{st.network}.{st.station}.{st.location}.{st.channel}'.format(
st=stats)

Expand All @@ -149,7 +149,7 @@ def format_nslct(stats, tag):


def format_nslit(stats, inst, tag):
#loc = '' if stats.location == '--' else stats.location
# loc = '' if stats.location == '--' else stats.location
return '{st.network}.{st.station}.{st.location}.{inst}_{tag}'.format(
st=stats, inst=inst, tag=tag)

Expand Down Expand Up @@ -362,15 +362,16 @@ def addStreams(self, event, streams, label=None):
base_dtype = ''.join([part.capitalize()
for part in name_parts])
for array_name, array in spectrum.items():
path = base_dtype + array_name.capitalize() + "/" + procname
path = base_dtype + array_name.capitalize() \
+ "/" + procname
try:
self.dataset.add_auxiliary_data(
array,
data_type='Cache',
path=path,
parameters={}
)
except Exception as e:
except Exception:
pass

inventory = stream.getInventory()
Expand Down Expand Up @@ -418,10 +419,11 @@ def getStreams(self, eventid, stations=None, labels=None):
"""
trace_auxholder = []
stream_auxholder = []
if 'TraceProcessingParameters' in self.dataset.auxiliary_data:
trace_auxholder = self.dataset.auxiliary_data.TraceProcessingParameters
if 'StreamProcessingParameters' in self.dataset.auxiliary_data:
stream_auxholder = self.dataset.auxiliary_data.StreamProcessingParameters
auxdata = self.dataset.auxiliary_data
if 'TraceProcessingParameters' in auxdata:
trace_auxholder = auxdata.TraceProcessingParameters
if 'StreamProcessingParameters' in auxdata:
stream_auxholder = auxdata.StreamProcessingParameters
streams = []

if stations is None:
Expand Down Expand Up @@ -477,9 +479,10 @@ def getStreams(self, eventid, stations=None, labels=None):
# get the trace spectra arrays from auxiliary,
# repack into stationtrace object
spectra = {}
if 'Cache' in self.dataset.auxiliary_data:
for aux in self.dataset.auxiliary_data['Cache'].list():
auxarray = self.dataset.auxiliary_data['Cache'][aux]

if 'Cache' in auxdata:
for aux in auxdata['Cache'].list():
auxarray = auxdata['Cache'][aux]
if top not in auxarray.list():
continue
auxarray_top = auxarray[top]
Expand Down Expand Up @@ -650,7 +653,8 @@ def calcMetrics(self, eventid, stations=None, labels=None, config=None,
self.insert_aux(xmlstr, 'StationMetrics', metricpath)

def getTables(self, label, streams=None, stream_label=None):
'''Retrieve dataframes containing event information and IMC/IMT metrics.
'''Retrieve dataframes containing event information and IMC/IMT
metrics.
Args:
label (str):
Expand Down Expand Up @@ -850,7 +854,8 @@ def getFitSpectraTable(self, eventid, label, streams):

def getStreamMetrics(self, eventid, network, station, label, streams=None,
stream_label=None):
"""Extract a StationSummary object from the ASDF file for a given input Stream.
"""Extract a StationSummary object from the ASDF file for a given
input Stream.
Args:
eventid (str):
Expand Down Expand Up @@ -906,7 +911,8 @@ def getStreamMetrics(self, eventid, network, station, label, streams=None,
if top in auxholder:
tauxholder = auxholder[top]
if metricpath not in tauxholder:
fmt = 'Stream metrics path (%s) not in WaveFormMetrics auxiliary_data.'
fmt = ('Stream metrics path (%s) not in WaveFormMetrics '
'auxiliary_data.')
logging.warning(fmt % metricpath)
return None

Expand Down
6 changes: 4 additions & 2 deletions gmprocess/io/asdf/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
# third party imports
import h5py


class TallyStorage(object):
"""Tally storage used within each group.
"""
Expand Down Expand Up @@ -72,7 +73,8 @@ def compute_storage(self, items, store_subtotals=False):
'groups': {}
}
else:
raise ValueError("Group item '{}' is of type '{}', expected "
"'h5.Dataset' or 'h5.Group'".format(name, type(item)))
raise ValueError(
"Group item '{}' is of type '{}', expected "
"'h5.Dataset' or 'h5.Group'".format(name, type(item)))
subtotal_bytes += item_bytes
return (subtotal_bytes, storage)
6 changes: 3 additions & 3 deletions gmprocess/io/bhrc/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,12 @@

# third party imports
import numpy as np
from gmprocess.constants import UNIT_CONVERSIONS
from obspy.core.utcdatetime import UTCDateTime

# local
from gmprocess.stationstream import StationStream
from gmprocess.stationtrace import StationTrace, PROCESS_LEVELS
from gmprocess.utils.constants import UNIT_CONVERSIONS
from gmprocess.core.stationstream import StationStream
from gmprocess.core.stationtrace import StationTrace, PROCESS_LEVELS
from gmprocess.io.seedname import get_channel_name, get_units_type


Expand Down
23 changes: 14 additions & 9 deletions gmprocess/io/cosmos/cesmd_fetcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,12 @@
# local imports
from gmprocess.io.fetcher import DataFetcher, _get_first_value
from gmprocess.io.read import read_data
from gmprocess.streamcollection import StreamCollection
from gmprocess.config import get_config
from gmprocess.exception import GMProcessException
from gmprocess.io.cosmos.cesmd_search import (get_records, get_metadata,
get_stations_dataframe)
from gmprocess.core.streamcollection import StreamCollection
from gmprocess.utils.config import get_config
from gmprocess.utils.exception import GMProcessException
from gmprocess.io.cosmos.cesmd_search import (
get_records, get_metadata,
get_stations_dataframe)

# default values for this fetcher
# if None specified in constructor, AND no parameters specified in
Expand Down Expand Up @@ -271,8 +272,8 @@ def retrieveData(self, event_dict):
else:
# web service has a maximum number of stations you're allowed to
# fetch (note that this may not be the same as the number of files)
# so we're splitting up the stations by distance and downloading them
# in chunks.
# so we're splitting up the stations by distance and downloading
# them in chunks.
dataframe = get_stations_dataframe(event)
distances = dataframe['epidist'].to_numpy()
distances.sort()
Expand Down Expand Up @@ -300,14 +301,18 @@ def retrieveData(self, event_dict):
except GMProcessException as gpe:
eqfmt = 'M%.1f %s'
eqdesc = eqfmt % (
self.magnitude, self.time.strftime('%Y-%m-%d %H:%M:%S'))
self.magnitude,
self.time.strftime('%Y-%m-%d %H:%M:%S')
)
if '404' in str(gpe):
fmt = ('Could not find data records for %s '
'between %.1f km and %.1f km')
logging.info(fmt % (eqdesc, mindist, maxdist))
else:
logging.warning(
'Unplanned exception getting records for %s' % eqdesc)
'Unplanned exception getting records for %s'
% eqdesc
)
continue
datafiles += tfiles

Expand Down
Loading

0 comments on commit d7988e8

Please sign in to comment.