Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _deserialize(self, zmap_str):
catalog = Catalog()
for row in zmap_str.split('\n'):
if len(row) == 0:
continue
origin = Origin()
event = Event(origins=[origin])
event.preferred_origin_id = origin.resource_id.id
# Begin value extraction
columns = row.split('\t', 13)[:13] # ignore extra columns
values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns))
# Extract origin
origin.longitude = self._str2num(values.get('lon'))
origin.latitude = self._str2num(values.get('lat'))
depth = self._str2num(values.get('depth'))
if depth is not None:
origin.depth = depth * 1000.0
z_err = self._str2num(values.get('z_err'))
process = kwargs.get('process', True)
elif method == 'from_sac':
sac_files = kwargs.get('sac_files')
if isinstance(sac_files, list):
if isinstance(sac_files[0], (Stream, Trace)):
# This is a list of streams...
st = Stream(sac_files[0])
for sac_file in sac_files[1:]:
st += sac_file
else:
sac_files = [read(sac_file)[0] for sac_file in sac_files]
st = Stream(sac_files)
else:
st = sac_files
# Make an event object...
catalog = Catalog([sactoevent(st, debug=debug)])
sub_catalogs = [catalog]
temp_list = []
process_lengths = []
if "P_all" in swin or "S_all" in swin or all_horiz:
all_channels = True
else:
all_channels = False
for sub_catalog in sub_catalogs:
if method in ['from_seishub', 'from_client']:
debug_print("Downloading data", 1, debug)
st = _download_from_client(
client=client, client_type=client_map[method],
catalog=sub_catalog, data_pad=data_pad,
process_len=process_len, available_stations=available_stations,
an obspy catalog instance
Parameters
-----------
temeky : str, pd.DataFrame
The standard template key (or path to it)
picks : str, pd.DataFrame
A picks file in same format as created by pickPhases
Returns
---------
An Obspy.Catalog object
"""
temkey = readKey(temkey, "template")
picks = readKey(picks, 'phases')
cat = obspy.core.event.Catalog()
for ind, row in temkey.iterrows():
cat.events.append(_getEvents(row, picks))
return cat
# First we need to work out what to call the s-file and open it
# Check that user ID is the correct length
if len(userid) != 4:
raise NordicParsingError('%s User ID must be 4 characters long'
% userid)
# Check that outdir exists
if not os.path.isdir(outdir):
raise NordicParsingError('Out path does not exist, I will not '
'create this: ' + outdir)
# Check that evtype is one of L,R,D
if evtype not in ['L', 'R', 'D']:
raise NordicParsingError('Event type must be either L, R or D')
if explosion:
evtype += 'E'
# Check that there is one event
if isinstance(event, Catalog) and len(event) == 1:
event = event[0]
elif isinstance(event, Event):
event = event
else:
raise NordicParsingError('Needs a single event')
if not isinstance(wavefiles, list):
wavefiles = [str(wavefiles)]
# Determine name from origin time
try:
origin = event.preferred_origin() or event.origins[0]
except IndexError:
msg = 'Need at least one origin with at least an origin time'
raise NordicParsingError(msg)
evtime = origin.time
if not evtime:
msg = ('event has an origin, but time is not populated. ' +
def _deserialize(self):
catalog = Catalog()
res_id = '/'.join((res_id_prefix, self.filename))
catalog.resource_id = ResourceIdentifier(id=res_id)
catalog.description = 'Created from NEIC PDE mchedr format'
catalog.comments = ''
catalog.creation_info = CreationInfo(creation_time=UTCDateTime())
for line in self.fh.readlines():
# XXX: ugly, probably we should do everything in byte strings
# here? Is the pde / mchedr format unicode aware?
line = line.decode()
record_id = line[0:2]
if record_id == 'HY':
event = self._parse_record_hy(line)
catalog.append(event)
elif record_id == 'P ':
pick, arrival = self._parse_record_p(line, event)
elif record_id == 'E ':
origins = [origin]
po = origin.resource_id
magnitudes, pm = _mags(o, evid)
else:
o = p
event = Event(resource_id=ResourceIdentifier(evid),
picks=picks,
origins=origins,
magnitudes=magnitudes,
station_magnitudes=stamags,
preferred_origin_id=po,
preferred_magnitude_id=pm,
**_kw(o, 'event')
)
events.append(event)
return Catalog(events,
description='Created from SeismicHandler EVT format')
def _create_output_event_file(self):
"""
Write the final output file in QuakeML format.
"""
self.log("Writing final output file...")
hypodd_reloc = os.path.join(
os.path.join(self.working_dir, "output_files", "hypoDD.reloc")
)
cat = Catalog()
self.output_catalog = cat
for filename in self.event_files:
cat += read_events(filename)
with open(hypodd_reloc, "r") as open_file:
for line in open_file:
(
event_id,
lat,
lon,
depth,
_,
_,
_,
_,
_,
def _deserialize(self):
# check node "quakeml/eventParameters" for global namespace
try:
namespace = _get_first_child_namespace(self.xml_root)
catalog_el = self._xpath('eventParameters', namespace=namespace)[0]
except IndexError:
raise Exception("Not a QuakeML compatible file or string")
self._quakeml_namespaces = [
ns for ns in self.xml_root.nsmap.values()
if ns.startswith(r"http://quakeml.org/xmlns/")]
# create catalog
catalog = Catalog(force_resource_id=False)
# add any custom namespace abbreviations of root element to Catalog
catalog.nsmap = self.xml_root.nsmap.copy()
# optional catalog attributes
catalog.description = self._xpath2obj('description', catalog_el)
catalog.comments = self._comments(catalog_el)
catalog.creation_info = self._creation_info(catalog_el)
# loop over all events
for event_el in self._xpath('event', catalog_el):
# create new Event object
event = Event(force_resource_id=False)
# optional event attributes
event.preferred_origin_id = \
self._xpath2obj('preferredOriginID', event_el)
event.preferred_magnitude_id = \
self._xpath2obj('preferredMagnitudeID', event_el)
event.preferred_focal_mechanism_id = \
def _saveCatalog(self, filename=None):
'''
Saves the catalog to filename
'''
if self.savefile is None and filename is None:
return self._saveCatalogDlg()
if filename is not None:
savefile = filename
else:
savefile = self.savefile
cat = event.Catalog()
cat.events.append(event.Event(picks=self._picks))
#cat.write(savefile, format='QUAKEML')
... method='from_sac', sac_files=sac_files, lowcut=2.0, highcut=10.0,
... samp_rate=25.0, filt_order=4, length=2.0, swin='all', prepick=0.1,
... all_horiz=True)
>>> print(templates[0][0].stats.sampling_rate)
25.0
>>> print(len(templates[0]))
15
"""
client_map = {'from_client': 'fdsn', 'from_seishub': 'seishub'}
assert method in ('from_client', 'from_seishub', 'from_meta_file',
'from_sac')
if not isinstance(swin, list):
swin = [swin]
process = True
if method in ['from_client', 'from_seishub']:
catalog = kwargs.get('catalog', Catalog())
data_pad = kwargs.get('data_pad', 90)
# Group catalog into days and only download the data once per day
sub_catalogs = _group_events(
catalog=catalog, process_len=process_len, template_length=length,
data_pad=data_pad)
if method == 'from_client':
client = FDSNClient(kwargs.get('client_id', None))
available_stations = []
else:
client = SeisHubClient(kwargs.get('url', None), timeout=10)
available_stations = client.waveform.get_station_ids()
elif method == 'from_meta_file':
if isinstance(kwargs.get('meta_file'), Catalog):
catalog = kwargs.get('meta_file')
else:
catalog = read_events(kwargs.get('meta_file'))