Skip to content

Commit

Permalink
Merge pull request #158 from eqcorrscan/doctest-examples
Browse files Browse the repository at this point in the history
Doctest examples
  • Loading branch information
calum-chamberlain authored Sep 14, 2017
2 parents f756fb7 + 22a2e9e commit 9c6bbb1
Show file tree
Hide file tree
Showing 10 changed files with 215 additions and 219 deletions.
1 change: 1 addition & 0 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ jobs:
command: |
. venv/bin/activate
py.test -m "network" -n 2
py.test eqcorrscan/doc/tutorials/*.rst eqcorrscan/doc/submodules/*.rst --cov-append
- run:
name: Upload to codecov
Expand Down
72 changes: 37 additions & 35 deletions eqcorrscan/doc/submodules/utils.correlate.rst
Original file line number Diff line number Diff line change
Expand Up @@ -89,36 +89,35 @@ for example:

.. code-block:: python
import obspy
from eqcorrscan.utils.correlate import numpy_normxcorr, set_xcorr
from eqcorrscan.core.match_filter import match_filter
>>> import obspy
>>> import numpy as np
>>> from eqcorrscan.utils.correlate import numpy_normxcorr, set_xcorr
>>> from eqcorrscan.core.match_filter import match_filter
# generate some toy templates and stream
random = np.random.RandomState(42)
template = obspy.read()
stream = obspy.read()
for num, tr in enumerate(stream): # iter stream and embed templates
data = tr.data
tr.data = random.randn(6000) * 5
tr.data[100: 100 + len(data)] = data
>>> # generate some toy templates and stream
>>> random = np.random.RandomState(42)
>>> template = obspy.read()
>>> stream = obspy.read()
>>> for num, tr in enumerate(stream): # iter stream and embed templates
... data = tr.data
... tr.data = random.randn(6000) * 5
... tr.data[100: 100 + len(data)] = data
>>> # do correlation using numpy rather than fftw
>>> detections = match_filter(['1'], [template], stream, .5, 'absolute',
... 1, False, xcorr_func='numpy')
# do correlation using numpy rather than fftw
match_filter(['1'], [template], stream, .5, 'absolute', 1, False,
xcorr_func='numpy')
>>> # do correlation using a custom function
>>> def custom_normxcorr(templates, stream, pads, *args, **kwargs):
... # Just to keep example short call other xcorr function
... print('calling custom xcorr function')
... return numpy_normxcorr(templates, stream, pads, *args, **kwargs)
# do correlation using a custom function
def custom_normxcorr(templates, stream, pads, *args, **kwargs):
# Just to keep example short call other xcorr function
print('calling custom xcorr function')
return numpy_normxcorr(templates, stream, pads, *args, **kwargs)
match_filter(['1'], [template], stream, .5, 'absolute', 1, False,
xcorr_func=custom_normxcorr)
# prints "calling custom xcorr function
>>> detections = match_filter(
... ['1'], [template], stream, .5, 'absolute', 1, False,
... xcorr_func=custom_normxcorr) # doctest:+ELLIPSIS
calling custom xcorr function...
You can also use the set_xcorr object (eqcorrscan.utils.correlate.set_xcorr)
Expand All @@ -127,13 +126,16 @@ or within the scope of a context manager:

.. code-block:: python
# change the default xcorr function for all code in the with block
with set_xcorr(custom_normxcorr):
match_filter(['1'], [template], stream, .5, 'absolute', 1, False)
# prints "calling custom xcorr function"
# permanently change the xcorr function (until the python kernel restarts)
set_xcorr(custom_normxcorr)
match_filter(['1'], [template], stream, .5, 'absolute', 1, False)
# prints "calling custom xcorr function
set_xcorr.revert() # change it back to the previous state
>>> # change the default xcorr function for all code in the with block
>>> with set_xcorr(custom_normxcorr):
... detections = match_filter(['1'], [template], stream, .5,
... 'absolute', 1, False) # doctest:+ELLIPSIS
calling custom xcorr function...
>>> # permanently set the xcorr function (until the python kernel restarts)
>>> set_xcorr(custom_normxcorr)
default changed to custom_normxcorr
>>> detections = match_filter(['1'], [template], stream, .5, 'absolute',
... 1, False) # doctest:+ELLIPSIS
calling custom xcorr function...
>>> set_xcorr.revert() # change it back to the previous state
1 change: 0 additions & 1 deletion eqcorrscan/doc/submodules/utils.findpeaks.rst
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,5 @@ findpeaks

coin_trig
find_peaks2_short
find_peaks_dep

.. comment to end block
152 changes: 58 additions & 94 deletions eqcorrscan/doc/tutorials/clustering.rst
Original file line number Diff line number Diff line change
Expand Up @@ -18,30 +18,26 @@ threshold to 1,000km

.. code-block:: python
from eqcorrscan.utils.clustering import space_cluster
from obspy.clients.fdsn import Client
from obspy import UTCDateTime
client = Client("IRIS")
starttime = UTCDateTime("2002-01-01")
endtime = UTCDateTime("2002-02-01")
cat = client.get_events(starttime=starttime, endtime=endtime,
minmagnitude=6, catalog="ISC")
groups = space_cluster(catalog=cat, d_thresh=1000, show=False)
>>> from eqcorrscan.utils.clustering import space_cluster
>>> from obspy.clients.fdsn import Client
>>> from obspy import UTCDateTime
>>> client = Client("IRIS")
>>> starttime = UTCDateTime("2002-01-01")
>>> endtime = UTCDateTime("2002-02-01")
>>> cat = client.get_events(starttime=starttime, endtime=endtime,
... minmagnitude=6, catalog="ISC")
>>> groups = space_cluster(catalog=cat, d_thresh=1000, show=False)
Download a local catalog of earthquakes and cluster much finer (distance
threshold of 2km).

.. code-block:: python
from eqcorrscan.utils.clustering import space_cluster
from obspy.clients.fdsn import Client
from obspy import UTCDateTime
client = Client("NCEDC")
starttime = UTCDateTime("2002-01-01")
endtime = UTCDateTime("2002-02-01")
cat = client.get_events(starttime=starttime, endtime=endtime,
minmagnitude=2)
groups = space_cluster(catalog=cat, d_thresh=2, show=False)
>>> client = Client("NCEDC")
>>> cat = client.get_events(starttime=starttime, endtime=endtime,
... minmagnitude=2)
>>> groups = space_cluster(catalog=cat, d_thresh=2, show=False)
Setting show to true will plot the dendrogram for grouping with individual
Expand All @@ -66,15 +62,11 @@ distance threshold and a one-day temporal limit.

.. code-block:: python
from eqcorrscan.utils.clustering import space_time_cluster
from obspy.clients.fdsn import Client
from obspy import UTCDateTime
client = Client("IRIS")
starttime = UTCDateTime("2002-01-01")
endtime = UTCDateTime("2002-02-01")
cat = client.get_events(starttime=starttime, endtime=endtime,
minmagnitude=6, catalog="ISC")
groups = space_time_cluster(catalog=cat, t_thresh=86400, d_thresh=1000)
>>> from eqcorrscan.utils.clustering import space_time_cluster
>>> client = Client("IRIS")
>>> cat = client.get_events(starttime=starttime, endtime=endtime,
... minmagnitude=6, catalog="ISC")
>>> groups = space_time_cluster(catalog=cat, t_thresh=86400, d_thresh=1000)
Cluster according to cross-correlation values
Expand All @@ -91,25 +83,32 @@ in the tests directory.

.. code-block:: python
from obspy import read
import glob
import os
from eqcorrscan.utils.clustering import cluster
# You will need to edit this line to the location of your eqcorrscan repo.
testing_path = 'eqcorrscan/tests/test_data/similar_events'
stream_files = glob.glob(os.path.join(testing_path, '*'))
stream_list = [(read(stream_file), i)
for i, stream_file in enumerate(stream_files)]
for stream in stream_list:
for tr in stream[0]:
if tr.stats.station not in ['WHAT2', 'WV04', 'GCSZ']:
stream[0].remove(tr)
continue
tr.detrend('simple')
tr.filter('bandpass', freqmin=5.0, freqmax=15.0)
tr.trim(tr.stats.starttime + 40, tr.stats.endtime - 45)
groups = cluster(template_list=stream_list, show=False,
corr_thresh=0.3)
>>> from obspy import read
>>> import glob
>>> import os
>>> from eqcorrscan.utils.clustering import cluster
>>> # You will need to edit this line to the location of your eqcorrscan repo.
>>> testing_path = 'eqcorrscan/tests/test_data/similar_events'
>>> stream_files = glob.glob(os.path.join(testing_path, '*'))
>>> stream_list = [(read(stream_file), i)
... for i, stream_file in enumerate(stream_files)]
>>> for stream in stream_list:
... for tr in stream[0]:
... if tr.stats.station not in ['WHAT2', 'WV04', 'GCSZ']:
... stream[0].remove(tr) # doctest:+ELLIPSIS
... continue
... tr = tr.detrend('simple')
... tr = tr.filter('bandpass', freqmin=5.0, freqmax=15.0)
... tr = tr.trim(tr.stats.starttime + 40, tr.stats.endtime - 45)
<obspy.core.stream.Stream object at ...>
>>> groups = cluster(template_list=stream_list, show=False,
... corr_thresh=0.3, cores=2)
Computing the distance matrix using 2 cores
Computing linkage
Clustering
Found 9 groups
Extracting and grouping
Stack waveforms (linear)
------------------------
Expand All @@ -122,30 +121,12 @@ The following examples use the test data in the eqcorrscan github repository.

.. code-block:: python
from obspy import read
import glob
import os
from eqcorrscan.utils.clustering import cluster
from eqcorrscan.utils.stacking import linstack
# You will need to edit this line to the location of your eqcorrscan repo.
testing_path = 'eqcorrscan/tests/test_data/similar_events'
stream_files = glob.glob(os.path.join(testing_path, '*'))
stream_list = [(read(stream_file), i)
for i, stream_file in enumerate(stream_files)]
for stream in stream_list:
for tr in stream[0]:
if tr.stats.station not in ['WHAT2', 'WV04', 'GCSZ']:
stream[0].remove(tr)
continue
tr.detrend('simple')
tr.filter('bandpass', freqmin=5.0, freqmax=15.0)
tr.trim(tr.stats.starttime + 40, tr.stats.endtime - 45)
groups = cluster(template_list=stream_list, show=False,
corr_thresh=0.3)
# groups[0] should contain 3 streams, which we can now stack
# Groups are returned as lists of tuples, of the stream and event index
group_streams = [st_tuple[0] for st_tuple in groups[0]]
stack = linstack(streams=group_streams)
>>> from eqcorrscan.utils.stacking import linstack
>>> # groups[0] should contain 3 streams, which we can now stack
>>> # Groups are returned as lists of tuples, of the stream and event index
>>> group_streams = [st_tuple[0] for st_tuple in groups[0]]
>>> stack = linstack(streams=group_streams)
Expand All @@ -162,27 +143,10 @@ of the instantaneous phase. In this manor coherent signals are amplified.

.. code-block:: python
from obspy import read
import glob
import os
from eqcorrscan.utils.clustering import cluster
from eqcorrscan.utils.stacking import PWS_stack
# You will need to edit this line to the location of your eqcorrscan repo.
testing_path = 'eqcorrscan/tests/test_data/similar_events'
stream_files = glob.glob(os.path.join(testing_path, '*'))
stream_list = [(read(stream_file), i)
for i, stream_file in enumerate(stream_files)]
for stream in stream_list:
for tr in stream[0]:
if tr.stats.station not in ['WHAT2', 'WV04', 'GCSZ']:
stream[0].remove(tr)
continue
tr.detrend('simple')
tr.filter('bandpass', freqmin=5.0, freqmax=15.0)
tr.trim(tr.stats.starttime + 40, tr.stats.endtime - 45)
groups = cluster(template_list=stream_list, show=False,
corr_thresh=0.3)
# groups[0] should contain 3 streams, which we can now stack
# Groups are returned as lists of tuples, of the stream and event index
group_streams = [st_tuple[0] for st_tuple in groups[0]]
stack = PWS_stack(streams=group_streams)
>>> from eqcorrscan.utils.stacking import PWS_stack
>>> # groups[0] should contain 3 streams, which we can now stack
>>> # Groups are returned as lists of tuples, of the stream and event index
>>> stack = PWS_stack(streams=group_streams)
Computing instantaneous phase
Computing the phase stack
84 changes: 43 additions & 41 deletions eqcorrscan/doc/tutorials/mag-calc.rst
Original file line number Diff line number Diff line change
Expand Up @@ -15,18 +15,19 @@ This example requires data downloaded from the eqcorrscan github repository.

.. code-block:: python
from eqcorrscan.utils.mag_calc import amp_pick_sfile
from obspy.core.event import Event
import os
testing_path = 'eqcorrscan/tests/test_data'
sfile = os.path.join(testing_path, 'REA', 'TEST_',
'01-0411-15L.S201309')
datapath = os.path.join(testing_path, 'WAV', 'TEST_')
respdir = testing_path
event = amp_pick_sfile(sfile=sfile, datapath=datapath,
respdir=respdir, chans=['Z'], var_wintype=True,
winlen=0.9, pre_pick=0.2, pre_filt=True,
lowcut=1.0, highcut=20.0, corners=4)
>>> from eqcorrscan.utils.mag_calc import amp_pick_sfile
>>> from obspy.core.event import Event
>>> import os
>>> testing_path = 'eqcorrscan/tests/test_data'
>>> sfile = os.path.join(testing_path, 'REA', 'TEST_',
... '01-0411-15L.S201309')
>>> datapath = os.path.join(testing_path, 'WAV', 'TEST_')
>>> respdir = testing_path
>>> event = amp_pick_sfile(sfile=sfile, datapath=datapath,
... respdir=respdir, chans=['Z'], var_wintype=True,
... winlen=0.9, pre_pick=0.2, pre_filt=True,
... lowcut=1.0, highcut=20.0, corners=4) # doctest:+ELLIPSIS
Working on ...
Relative moment by singular-value decomposition
-----------------------------------------------
Expand All @@ -41,32 +42,33 @@ This example requires data downloaded from the eqcorrscan github repository.

.. code-block:: python
from eqcorrscan.utils.mag_calc import SVD_moments
from obspy import read
import glob
import os
from eqcorrscan.utils.clustering import SVD
import numpy as np
# Do the set-up
testing_path = 'eqcorrscan/tests/test_data/similar_events'
stream_files = glob.glob(os.path.join(testing_path, '*'))
stream_list = [read(stream_file) for stream_file in stream_files]
event_list = []
for i, stream in enumerate(stream_list):
st_list = []
for tr in stream:
# Only use the vertical channels of sites with known high similarity.
# You do not need to use this step for your data.
if (tr.stats.station, tr.stats.channel) not in\
[('WHAT2', 'SH1'), ('WV04', 'SHZ'), ('GCSZ', 'EHZ')]:
stream.remove(tr)
continue
tr.detrend('simple')
tr.filter('bandpass', freqmin=5.0, freqmax=15.0)
tr.trim(tr.stats.starttime + 40, tr.stats.endtime - 45)
st_list.append(i)
event_list.append(st_list)
event_list = np.asarray(event_list).T.tolist()
SVectors, SValues, Uvectors, stachans = SVD(stream_list=stream_list)
M, events_out = SVD_moments(U=Uvectors, s=SValues, V=SVectors,
stachans=stachans, event_list=event_list)
>>> from eqcorrscan.utils.mag_calc import svd_moments
>>> from obspy import read
>>> import glob
>>> from eqcorrscan.utils.clustering import svd
>>> import numpy as np
>>> # Do the set-up
>>> testing_path = 'eqcorrscan/tests/test_data/similar_events'
>>> stream_files = glob.glob(os.path.join(testing_path, '*'))
>>> stream_list = [read(stream_file) for stream_file in stream_files]
>>> event_list = []
>>> for i, stream in enumerate(stream_list): # doctest:+ELLIPSIS
... st_list = []
... for tr in stream:
... # Only use the vertical channels of sites with known high similarity.
... # You do not need to use this step for your data.
... if (tr.stats.station, tr.stats.channel) not in\
... [('WHAT2', 'SH1'), ('WV04', 'SHZ'), ('GCSZ', 'EHZ')]:
... stream.remove(tr)
... continue
... tr.detrend('simple')
... tr.filter('bandpass', freqmin=5.0, freqmax=15.0)
... tr.trim(tr.stats.starttime + 40, tr.stats.endtime - 45)
... st_list.append(i)
... event_list.append(st_list)
<obspy.core.stream.Stream object at ...>
>>> event_list = np.asarray(event_list).T.tolist()
>>> SVectors, SValues, Uvectors, stachans = svd(stream_list=stream_list)
>>> M, events_out = svd_moments(u=Uvectors, s=SValues, v=SVectors,
... stachans=stachans, event_list=event_list) # doctest:+ELLIPSIS
Created Kernel matrix: ...
Loading

0 comments on commit 9c6bbb1

Please sign in to comment.