Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
fh.seek(0)
# start with event file header
# line 1
data = _readline(fh)
number_of_channels = int(data[30:33])
# calculate number of lines with channels
number_of_lines = number_of_channels // 3 + (number_of_channels % 3 and 1)
if number_of_lines < 10:
number_of_lines = 10
# line 2
data = _readline(fh)
# line 3
for _i in xrange(0, number_of_lines):
data = _readline(fh)
# now parse each event file channel header + data
stream = Stream()
dlen = arch / 8
dtype = byteorder + 'i' + str(dlen)
stype = '=i' + str(dlen)
for _i in xrange(number_of_channels):
# get channel header
temp = _readline(fh, 1040)
# create Stats
header = Stats()
header['network'] = (temp[16] + temp[19]).strip()
header['station'] = temp[0:5].strip()
header['location'] = (temp[7] + temp[12]).strip()
header['channel'] = (temp[5:7] + temp[8]).strip()
header['sampling_rate'] = float(temp[36:43])
header['npts'] = int(temp[43:50])
# create start and end times
year = int(temp[9:12]) + 1900
continue
except:
if verbose: print('** unexpected read error, skip.',file=ofid)
continue
#- check if this file contains data
if len(data) == 0:
print('File contains no data!',file=None)
print('File contains no data!',file=ofid)
continue
#- clean the data merging segments with gap shorter than a specified number of seconds:
data=mt.mergetraces(data,Fs_original,mergegap)
data.split()
#- initialize stream to 'recollect' the split traces
colloc_data=Stream()
#- split traces into shorter segments======================================================
if inp.split_do == True:
data=proc.slice_traces(data,seglen,minlen,verbose,ofid)
n_traces=len(data)
if verbose==True:
print('* contains '+str(n_traces)+' trace(s)',file=ofid)
#- trim ===============================================================================
if inp.trim == True:
data=proc.trim_next_sec(data,verbose,ofid)
#==================================================================================
Logger.error(e)
continue
else:
raise MatchFilterError(
"Could not download data after {0} attempts".format(
retries))
# Get gaps and remove traces as necessary
if min_gap:
gaps = st.get_gaps(min_gap=min_gap)
if len(gaps) > 0:
Logger.warning("Large gaps in downloaded data")
st.merge()
gappy_channels = list(
set([(gap[0], gap[1], gap[2], gap[3])
for gap in gaps]))
_st = Stream()
for tr in st:
tr_stats = (tr.stats.network, tr.stats.station,
tr.stats.location, tr.stats.channel)
if tr_stats in gappy_channels:
Logger.warning(
"Removing gappy channel: {0}".format(tr))
else:
_st += tr
st = _st
st.split()
st.detrend("simple").merge()
st.trim(starttime=starttime + (i * data_length) - pad,
endtime=starttime + ((i + 1) * data_length) + pad)
for tr in st:
if not _check_daylong(tr):
st.remove(tr)
def _stack_wrapper(groupnames, fname, outkey, **kwargs):
"""
Wrapper around `~yam.stack.stack()`
:param groupnames: groups to load the correlations from
:param fname: file to load correlations from
:param outkey: key to write stacked correlations to
:param \*\*kwargs: all other kwargs are passed to
`~yam.stack.stack()` function
"""
with h5py.File(fname, 'r') as f:
traces = [obspyh5.dataset2trace(f[g]) for g in groupnames]
stream = obspy.Stream(traces)
stack_stream = yam.stack.stack(stream, **kwargs)
for tr in stack_stream:
tr.stats.key = outkey
return stack_stream
:type windows: list
:param config: config for calculating adjoint source
:type config: pyadjoit.Config
:param adj_src_type: adjoint source type
:type adj_src_type: str
:param figure_mode: plot flag. Leave it to True if you want to see adjoint
plots for every trace
:type figure_mode: bool
:param adjoint_src_flag: adjoint source flag. Set it to True if you want
to calculate adjoint sources
:type adjoint_src_flag: bool
:return:
"""
if not isinstance(observed, Stream):
raise ValueError("Input observed should be obspy.Stream")
if not isinstance(synthetic, Stream):
raise ValueError("Input synthetic should be obspy.Stream")
if windows is None or len(windows) == 0:
return None
if not isinstance(config, pyadjoint.Config):
raise ValueError("Input config should be pyadjoint.Config")
adjsrcs_list = []
for chan_win in windows.itervalues():
if len(chan_win) == 0:
continue
obsd_id, synt_id = _extract_window_id(chan_win)
try:
obs = observed.select(id=obsd_id)[0]
def _applyFilter(st, filt, decimate=False, dtype='double', fillZeros=False):
"""
Apply a filter, decimate, and trim to even start/end times
"""
if st is None or len(st) < 1:
msg = '_applyFilter got a stream with 0 length'
detex.log(__name__, msg, level='warn')
return obspy.Stream()
st.sort()
st1 = st.copy()
if dtype == 'single': # cast into single
for num, tr in enumerate(st):
st[num].data = tr.data.astype(np.float32)
nc = list(set([x.stats.channel for x in st]))
if len(st) > len(nc): # if data is fragmented only keep largest chunk
if fillZeros:
st = _mergeChannelsFill(st)
else:
st = _mergeChannels(st)
if not len(st) == len(nc) or len(st) < 1:
sta = st1[0].stats.station
stime = str(st1[0].stats.starttime)
msg = 'Stream is too fractured around %s on %s' % (str(stime), sta)
detex.log(__name__, msg, level='warn')
Edata+=edata
Ndata+=ndata
Udata+=udata
#Finished reading, filtering, etc now time shift by rupture time and resmaple to data
ktrace=0
print("Aligning GFs and resampling to data times...")
for ksta in range(Nsta):
#Loop over subfaults
print('...Working on station #'+str(ksta+1)+' of '+str(Nsta))
for kfault in range(Nfaults):
#Assign current GFs
ess=Stream(Trace())
nss=Stream(Trace())
zss=Stream(Trace())
eds=Stream(Trace())
nds=Stream(Trace())
zds=Stream(Trace())
ess[0]=Ess[ktrace].copy()
nss[0]=Nss[ktrace].copy()
zss[0]=Zss[ktrace].copy()
eds[0]=Eds[ktrace].copy()
nds[0]=Nds[ktrace].copy()
zds[0]=Zds[ktrace].copy()
#Time shift them according to subfault rupture time, zero pad, round to dt interval,decimate
#and extend to maximum time
ess=tshift(ess,tdelay[kfault])
nss=tshift(nss,tdelay[kfault])
zss=tshift(zss,tdelay[kfault])
eds=tshift(eds,tdelay[kfault])
nds=tshift(nds,tdelay[kfault])
zds=tshift(zds,tdelay[kfault])
#Now time align stuff
def flex_cut_stream(st, cut_start, cut_end, dynamic_npts=0):
"""
Flexible cut stream. But checks for the time.
:param st: input stream
:param cut_start: cut starttime
:param cut_end: cut endtime
:param dynamic_npts: the dynamic number of points before cut_start
and after
cut_end
:return: the cutted stream
"""
if not isinstance(st, Stream):
raise TypeError("flex_cut_stream method only accepts obspy.Stream "
"the first Argument")
new_st = Stream()
count = 0
for tr in st:
flex_cut_trace(tr, cut_start, cut_end, dynamic_npts=dynamic_npts)
# throw out small piece of data at this step
if tr.stats.starttime <= cut_start and tr.stats.endtime >= cut_end:
new_st.append(tr)
count += 1
if count == 0:
raise ValueError("None of traces in Stream satisfy the "
"cut time length")
return new_st
.. rubric:: Example
>>> from obspy import read
>>> from eqcorrscan.utils.plotting import spec_trace
>>> st = read()
>>> spec_trace(st, trc='white') # doctest: +SKIP
.. plot::
from obspy import read
from eqcorrscan.utils.plotting import spec_trace
st = read()
spec_trace(st, trc='white')
"""
from obspy import Stream
if isinstance(traces, Stream):
traces.sort(['station', 'channel'])
if not Fig:
Fig = plt.figure(figsize=size)
for i, tr in enumerate(traces):
if i == 0:
ax = Fig.add_subplot(len(traces), 1, i+1)
else:
ax = Fig.add_subplot(len(traces), 1, i+1, sharex=ax)
ax1, ax2 = _spec_trace(tr, wlen=wlen, log=log, trc=trc,
tralpha=tralpha, axes=ax)
ax2.set_yticks([])
if i < len(traces) - 1:
plt.setp(ax1.get_xticklabels(), visible=False)
if type(traces) == list:
ax2.text(0.005, 0.85, tr.stats.starttime.datetime.
strftime('%Y/%m/%d %H:%M:%S'),
"""
_check_save_args(save, savefile)
from eqcorrscan.utils import stacking
import copy
from eqcorrscan.core.match_filter import normxcorr2
from obspy import Stream
import warnings
fig, axes = plt.subplots(len(catalog) + 1, 1, sharex=True, figsize=(7, 12))
if len(catalog) > 1:
axes = axes.ravel()
traces = []
al_traces = []
# Keep input safe
clist = copy.deepcopy(catalog)
if isinstance(streams, Stream):
streams = [streams]
st_list = copy.deepcopy(streams)
for i, event in enumerate(clist):
# Extract the appropriate pick
_pick = [pick for pick in event.picks if
pick.waveform_id.station_code == station and
pick.waveform_id.channel_code == channel][0]
if st_list[i].select(station=station, channel=channel):
tr = st_list[i].select(station=station, channel=channel)[0]
else:
print('No data for ' + _pick.waveform_id)
continue
tr.detrend('linear')
if freqmin:
tr.filter('bandpass', freqmin=freqmin, freqmax=freqmax)
if realign: