From 18059dec06fbdb62e8a3c6ff36d18de167699d05 Mon Sep 17 00:00:00 2001 From: Nikolaus Horn Date: Thu, 31 Mar 2022 12:56:58 +0000 Subject: [PATCH 01/60] only one code for USSG and EMSC, update documentation, better defaults --- bin/import/usgs2db/.gitignore | 2 + bin/import/usgs2db/emsc2db.xpy | 53 ++++--- bin/import/usgs2db/usgs2db.1 | 26 ++-- bin/import/usgs2db/usgs2db.xpy | 273 +-------------------------------- 4 files changed, 47 insertions(+), 307 deletions(-) mode change 100755 => 120000 bin/import/usgs2db/usgs2db.xpy diff --git a/bin/import/usgs2db/.gitignore b/bin/import/usgs2db/.gitignore index 25fa1b08b..0627913f9 100644 --- a/bin/import/usgs2db/.gitignore +++ b/bin/import/usgs2db/.gitignore @@ -2,3 +2,5 @@ usgs2db emsc2db pdetxt2db keydb +fdsntxt2db +keydb.idmatch diff --git a/bin/import/usgs2db/emsc2db.xpy b/bin/import/usgs2db/emsc2db.xpy index e1bfde1fc..10919f171 100644 --- a/bin/import/usgs2db/emsc2db.xpy +++ b/bin/import/usgs2db/emsc2db.xpy @@ -1,15 +1,14 @@ """ @author Nikolaus Horn @created 2013-11-25 -@modified 2014-02-15 -@version 1.0 +@modified 2022-03-31 +@version 1.2 @license MIT-style license @credits ZAMG for my visit to EGU 2014 """ import getopt -import codecs import requests import json import warnings @@ -21,23 +20,28 @@ import antelope.elog as elog def usage(progname): - print(progname, "[-v] [-p proxy_url] [-a auth] [-k keydb] [-u url] dbname") + print(progname, "[-v] [-p proxy_url] [-a auth] [-k keydb] [-u url] dbname") def main(): progname = sys.argv[0].split("/")[-1] elog.init(progname) - BASE_URL = ( - "http://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_day.geojson" - ) - BASE_URL = "http://www.seismicportal.eu/fdsnws/event/1/query?limit=100&format=json" + if progname == "usgs2db": + BASE_URL = ( + "http://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_hour.geojson" + ) + auth = "USGS" + else: + BASE_URL = ( + "http://www.seismicportal.eu/fdsnws/event/1/query?limit=10&format=json" + ) + auth = "EMSC" verbose = 0 archive = 0 opts = [] args = [] keydbname = "keydb" keyschema = "idmatch1.0" - auth = "EMSC" proxy_url = "" try: opts, args = getopt.getopt(sys.argv[1:], "a:k:p:u:v", "") @@ -139,6 +143,7 @@ def main(): i = len(data) for index in range(i): fdata = data[index] + unid = fdata["id"] geom_type = fdata["type"] geometry = fdata["geometry"] coordinates = geometry["coordinates"] @@ -154,9 +159,7 @@ def main(): cdi ) = ( place - ) = ( - code - ) = felt = mag = magtype = net = evtype = auth_str = unid = source_id = "" + ) = code = felt = mag = magtype = net = evtype = auth_str = source_id = "" ml = mb = ms = mlnull # be sure to convert unicode objects to string objects by calling "str(xxx)", # this prevents datascope from CRASHING @@ -186,10 +189,12 @@ def main(): net = str(propv) elif propk == "auth": auth_str = str(propv) - elif propk == "unid": - unid = str(propv) - elif propk == "source_id": - source_id = str(propv) + # elif propk == "unid": #emsc repeats the id in Features as it "unid" + # unid = str(propv) + # elif propk == "code": # usgs calls id code + # code = str(propv) + # elif propk == "source_id": + # source_id = str(propv) elif propk == "updated": updated = propv / 1000.0 elif propk == "lastupdate": @@ -200,21 +205,25 @@ def main(): place = str(propv) # push M to mb, seems to make sense... - if magtype.lower() == "m": + lmt = magtype.lower() + if lmt == "m": magtype = "mb" - - if magtype.lower() == "ml": + elif lmt == "ml": ml = mag - elif magtype.lower() == "mb": + elif lmt == "mb": mb = mag - elif magtype.lower() == "ms": + elif lmt == "ms": ms = mag - # grn, srn seems to be unimplemenmted + gr = stock.grnumber(lat, lon) sr = stock.srnumber(lat, lon) jdate = stock.epoch2str(etime, "%Y%j") # fkey = str("%s%s" % (net, code)) + # if net != "" and code != "": + # unid = "%s%s" % (net, code) + if verbose: + elog.notify("check id %s" % unid) kmatch = idmatch.lookup(table="idmatch", record="dbSCRATCH") try: diff --git a/bin/import/usgs2db/usgs2db.1 b/bin/import/usgs2db/usgs2db.1 index 1594aca60..da7a57e73 100644 --- a/bin/import/usgs2db/usgs2db.1 +++ b/bin/import/usgs2db/usgs2db.1 @@ -1,6 +1,6 @@ .TH USGS2DB 1 .SH NAME -usgs2db,emsc2db \- utility to retrieve earthquake feeds in GeoJSON format from \fIUSGS\fP or \fIEMSC\fP. +usgs2db, emsc2db \- utilities to retrieve earthquake feeds in GeoJSON format from \fIUSGS\fP or \fIEMSC\fP. .SH SYNOPSIS .nf \fBusgs2db\fP [-v] [-a \fIauthor\fP] @@ -17,24 +17,24 @@ The programs keeps track of event ids in a separate table and updates the output .SH OPTIONS .IP \-v verbose flag -.IP "-k keydname" -name of database to keep track of origin ids. Since USGS/EMSC IDs are a mixture of strings and integers, a separate table is needed. +.IP "-k keydbname" +name of database to keep track of event ids. Since USGS/EMSC IDs are a mixture of strings and integers, a separate table is needed. The database must be in schema \fBidmatch1.0\fP or higher. The database is created upon startup if not originally existing. The database name defaults to keydb. .IP \-u -URL of the data source. A list of all available feeds can be found on a page of the \fIUSGS\fP +URL of the data source. A list of all available feeds can be found on a website of the \fIUSGS\fP \fBhttp://earthquake.usgs.gov/earthquakes/feed/v1.0/geojson.php\fP If not specified, this defaults to -\fBhttp://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_day.geojson\fP -for \fIEMSC\fP, the program retrieves data from the FDSN webservice described on +\fBhttp://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_hour.geojson\fP. +For \fIEMSC\fP, the program retrieves data from the FDSN webservice described on \fBhttp://www.seismicportal.eu/fdsn-wsevent.html\fP the default url in this case is -\fBhttp://www.seismicportal.eu/fdsnws/event/1/query?limit=100&format=json\fP. +\fBhttp://www.seismicportal.eu/fdsnws/event/1/query?limit=10&format=json\fP. .IP "-a author" Author name for event, origin and netmag table. Defaults to NEIC or EMSC .IP database The name of the output database. This argument is required. .SH EXAMPLE -To retrieve all event data for the last month into database usgs: +To retrieve all event data for the last month into a database named \fBusgs\fP: .nf usgs2db -k db/usgs_ids -v -u http://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_month.geojson usgs .fi @@ -45,18 +45,18 @@ USGS UTC 0,10,20,30,40,50 * * * * usgs2db -k db/usgs_ids db/usgs .fi .SH ENVIRONMENT -The program uses the Python module \fIurllib2\fP to retrieve data from a webpage. -This module provides proxy support via environment variables like e.g.: http_proxy = http://myhost:myport. +The program uses the Python module \fIrequests\fP to retrieve data from a webpage. +This module supports proxy configuration based on the standard environment variables \fIhttp_proxy\fP, \fIhttps_proxy\fP. If you need more control over proxy settings, feel free to contact the author, Nikolaus.Horn@zamg.ac.at. .SH "SEE ALSO" .nf -pydoc liburl2, antelope_python(3y) +https://docs.python-requests.org/en/master, USGS2orb(1), antelope_python(3y) .fi .SH "BUGS AND CAVEATS" -The error handling is very basic. I implemented my personal understanding of the data offered. There might be wiser ways to use the save the information in a database. +The error handling is simple. I implemented my personal understanding of the data offered. There might be wiser ways to use the save the information in a database. The feeds are subject to the feed life cycle policy as explained on the website of the usgs. See there for more information if a feed stops working. -This module uses the python modules urllib2 and json with all their bugs. And of course I added many more problems... +This module uses the python modules requests with all their bugs and limitations. And of course I added many more problems... .SH AUTHOR Nikolaus Horn (nikolaus.horn@zamg.ac.at) diff --git a/bin/import/usgs2db/usgs2db.xpy b/bin/import/usgs2db/usgs2db.xpy deleted file mode 100755 index 29974422f..000000000 --- a/bin/import/usgs2db/usgs2db.xpy +++ /dev/null @@ -1,272 +0,0 @@ -""" -@author Nikolaus Horn -@created 2013-11-25 -@modified 2014-02-15 -@version 1.0 -@license MIT-style license -""" - - -import requests -import urllib.request, urllib.error, urllib.parse -import json -import pprint - -# Import Antelope modules - -import antelope.datascope as ds -import antelope.stock as stock -import getopt - - -def usage(): - print(sys.argv[0], "[-v] [-a auth] [-k keydb] [-u url] dbname") - -def main(): - # BASE_URL="http://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_day.geojson" - BASE_URL = ( - "https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_week.geojson" - ) - verbose = 0 - archive = 0 - opts = [] - args = [] - keydbname = "keydb" - keyschema = "idmatch1.0" - auth = "NEIC" - use_curl = False - try: - opts, args = getopt.getopt(sys.argv[1:], "a:ck:u:v", "") - except getopt.GetoptError: - print("illegal option") - usage() - sys.exit(2) - - for o, a in opts: - if o == "-v": - verbose = 1 - elif o == "-a": - auth = a - elif o == "-c": - use_curl = True - elif o == "-u": - BASE_URL = a - elif o == "-k": - keydbname = a - - if len(args) > 1 or len(args) < 1: - usage() - sys.exit(1) - - if len(args) > 0: - dbname = args[0] - - db = ds.dbopen(dbname, "r+") - dborigin = db.lookup(table="origin") - dbevent = db.lookup(table="event") - dbnetmag = db.lookup(table="netmag") - - dbq = db.lookup(table="origin", field="ml", record="dbNULL") - [mlnull] = dbq.getv("ml") - dbq = db.lookup(table="event", field="evname", record="dbNULL") - evname_width = dbq.query("dbFIELD_SIZE") - - kdb = ds.dbopen(keydbname, "r+") - descname = kdb.query("dbDATABASE_FILENAME") - if os.path.exists(descname): - schemaname = kdb.query("dbSCHEMA_NAME") - if schemaname != keyschema: - elog.die( - "keydb %s has wrong schema %s, should be %s" - % (keydbname, schemaname, keyschema) - ) - sys.exit(1) - else: - kdb.close() - ds.dbcreate(keydbname, keyschema) - - kdb = ds.dbopen(keydbname, "r+") - try: - idmatch = kdb.lookup(table="idmatch") - except Exception as e: - elog.die("Error :", e) - - req = request.get(BASE_URL, timeout=30) - gj_string = url.read() - obj = req.json() - data = obj["features"] - i = len(data) - for index in range(i): - fdata = data[index] - geom_type = fdata["type"] - geometry = fdata["geometry"] - coordinates = geometry["coordinates"] - lon = coordinates[0] - lat = coordinates[1] - depth = coordinates[2] - properties = fdata["properties"] - mb = ms = ml = mlnull - time = status = cdi = place = code = felt = mag = magtype = net = evtype = "" - ml = mb = ms = mlnull - # be sure to convert unicode objects to string objects by calling "str(xxx)", - # this prevents datascope from CRASHING - for propk, propv in properties.items(): - if propk == 'time': - etime=float(propv) / 1000. - elif propk == 'mag': - mag=float(propv) - elif propk == 'magType': - magtype=str(propv) - elif propk == 'place': - evname=str(propv) - elif propk == 'cdi': - if propv is not None: - cdi = float(propv) - inull = float(propv) - elif propk == "felt": - felt = propv - elif propk == "net": - net = str(propv) - elif propk == "code": - code = str(propv) - elif propk == "updated": - updated = propv / 1000.0 - elif propk == "place": - place = str(propv) - - if magtype.lower() == "ml": - ml = mag - elif magtype.lower() == "mb": - mb = mag - elif magtype.lower() == "ms": - ms = mag - # grn, srn seems to be unimplemenmted - gr = stock.grnumber(lat, lon) - sr = stock.srnumber(lat, lon) - jdate = stock.epoch2str(etime, "%Y%j") - - fkey = str("%s%s" % (net, code)) - - kmatch = idmatch.lookup(table="idmatch", record="dbSCRATCH") - try: - kmatch.putv(('fkey', fkey)) - except Exception as e: - print("Error :",e) - - matcher=kmatch.matches(idmatch,'fkey') - rec_list=matcher() - new_event=False - evid=0 - updated_event=False - if len(rec_list) > 1: - print("found too many keys, sth strange goes on here") - if len(rec_list) > 0: - for rec in rec_list: - idmatch.record = rec - [ftime, kname, kval] = idmatch.getv("ftime", "keyname", "keyvalue") - # print "found key %s %s" % (kname, kval) - if kname == "evid": - evid = kval - if updated > ftime: - new_event = False - updated_event = True - else: - updated_event = False - - else: - new_event = True - - if new_event: - if verbose: - print("new event %s" % code) - evid = dborigin.nextid("evid") - orid = dborigin.nextid("orid") - orecno = dborigin.addv( - ("time", etime), - ("lat", lat), - ("lon", lon), - ("depth", depth), - ("evid", evid), - ("orid", orid), - ("jdate", jdate), - ("mb", mb), - ("ml", ml), - ("ms", ms), - ("nass", 0), - ("ndef", 0), - ("auth", auth), - ("grn", gr), - ("srn", sr), - ) - erecno = dbevent.addv( - ("evid", evid), - ("prefor", orid), - ("evname", evname[:evname_width]), - ("auth", auth), - ) - nmrecno = dbnetmag.addv( - ("evid", evid), - ("orid", orid), - ("magnitude", mag), - ("magtype", magtype), - ("auth", auth), - ) - idmatch.addv( - ("fkey", fkey), - ("keyname", "evid"), - ("keyvalue", evid), - ("ftime", updated), - ) - elif updated_event: - if verbose: - print("updated event %s" % code) - idmatch.putv(("ftime", updated)) - kmatch = db.lookup(table="event", record="dbSCRATCH") - kmatch.putv(("evid", evid)) - evmatcher = kmatch.matches(dbevent, "evid") - evlist = evmatcher() - if len(evlist) > 1: - print("strange, found a few matching events for evid %d " % evid) - if len(evlist) > 0: - dbevent.record = evlist[0] - [prefor] = dbevent.getv("prefor") - - kmatch = db.lookup(table="origin", record="dbSCRATCH") - kmatch.putv(("orid", prefor)) - ormatcher = kmatch.matches(dborigin, "orid") - orlist = ormatcher() - if len(orlist) > 1: - print("strange, found a few origind for orid %d" % prefor) - if len(orlist) > 0: - dborigin.record = orlist[0] - dborigin.putv( - ("time", etime), - ("lat", lat), - ("lon", lon), - ("depth", depth), - ("jdate", jdate), - ) - if magtype.lower() == "ml": - dborigin.putv(("ml", mag)) - elif magtype.lower() == "mb": - dborigin.putv(("mb", mag)) - elif magtype.lower() == "ms": - dborigin.putv(("ms", mag)) - kmatch = db.lookup(table="netmag", record="dbSCRATCH") - kmatch.putv(("orid", prefor)) - magmatcher = kmatch.matches(dbnetmag, "orid") - maglist = magmatcher() - if len(maglist) > 1: - print("strange, found a few netmags for origin %d" % prefor) - if len(maglist) > 0: - dbnetmag.record = maglist[0] - dbnetmag.putv( - ("magnitude", mag), ("magtype", magtype), ("auth", auth) - ) - - return 0 - - -if __name__ == "__main__": - status = main() - sys.exit(status) diff --git a/bin/import/usgs2db/usgs2db.xpy b/bin/import/usgs2db/usgs2db.xpy new file mode 120000 index 000000000..962bf17fd --- /dev/null +++ b/bin/import/usgs2db/usgs2db.xpy @@ -0,0 +1 @@ +emsc2db.xpy \ No newline at end of file From 775cb0c563b53321291f3960b91154f160afc71b Mon Sep 17 00:00:00 2001 From: Niko Horn Date: Thu, 31 Mar 2022 16:34:15 +0200 Subject: [PATCH 02/60] fixed behaviour with multiple binaries --- bin/utility/mkmf/mkmf.csh | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/bin/utility/mkmf/mkmf.csh b/bin/utility/mkmf/mkmf.csh index 37a722823..cfd8e2ec4 100644 --- a/bin/utility/mkmf/mkmf.csh +++ b/bin/utility/mkmf/mkmf.csh @@ -1,12 +1,13 @@ -set nonomatch +set nonomatch # no error if nothing matches at all set mybins="" set patterns="*.c *.cpp *.F *.f *.sh *.csh *.tcl *.xpl *.xppl *.xpls *.xtcl *.xwish *.xvwish *.xwish8 *.xpy *.xbqpy" foreach pat ($patterns) set ft=( $pat ) - if (-e $ft[1] ) then - set mybins=`printf "%s %s" $mybins $ft:gr` + if ( -e $ft[1] ) then + #set mybins=`printf "%s %s" $mybins $ft[1]:gr` + set mybins = ($mybins $ft:gr) endif end if ( "${mybins}--" != "--"} ) then @@ -24,7 +25,7 @@ set patterns="*.h *.i" foreach pat ($patterns) set ft=( $pat ) if (-e $ft[1] ) then - set myincludes=`printf "%s %s" $myincludes $ft` + set myincludes = ($myincludes $ft) endif end if ( "${myincludes}--" != "--" ) then From e697952456fe48602c1e68694d432751c9dead17 Mon Sep 17 00:00:00 2001 From: Niko Horn Date: Fri, 1 Apr 2022 08:12:56 +0200 Subject: [PATCH 03/60] remove product from repo --- data/python/zamg_utilities/next_number.3y | 2 -- 1 file changed, 2 deletions(-) delete mode 100644 data/python/zamg_utilities/next_number.3y diff --git a/data/python/zamg_utilities/next_number.3y b/data/python/zamg_utilities/next_number.3y deleted file mode 100644 index a122ef9e0..000000000 --- a/data/python/zamg_utilities/next_number.3y +++ /dev/null @@ -1,2 +0,0 @@ -.so man3y/zamg_utilities.3y - From 39a90c95602b677e2fc7880c0aed2892af102ef2 Mon Sep 17 00:00:00 2001 From: Niko Horn Date: Fri, 1 Apr 2022 08:14:29 +0200 Subject: [PATCH 04/60] fix typo in manpage --- data/python/polygon/polygon_utils.3y | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/data/python/polygon/polygon_utils.3y b/data/python/polygon/polygon_utils.3y index 8c0852552..f85f6c5a4 100644 --- a/data/python/polygon/polygon_utils.3y +++ b/data/python/polygon/polygon_utils.3y @@ -3,18 +3,18 @@ polygon_utils \- little helper for polygon data .SH SYNOPSIS .nf -import matplotlib as mpl +from matplotlib import pyplot as as plt import numpy as np import antelope.datascope as ds sys.path.append(os.environ['ANTELOPE'] + "/contrib/data/python") import zamg.polygon as zp -import \fBzamg.polygon_utilities\fP as \fBzpu\fP +import \fBzamg.polygon_utils\fP as \fBzpu\fP db=ds.dbopen("/opt/antelope/local/data/database/plz_austria_2021","r") db = db.lookup(table="polygon") dbs = db.subset("pname=~/1010/") -pdata = zp.readpolygon(db) # returns a list of lsits, each inner list is a sequence of (lon, lat)-tuples +pdata = zp.readpolygon(db) # returns a list of lists, each inner list is a sequence of (lon, lat)-tuples ndata = np.array(pdata[0]) lon, lat = ndata.T # transpose array sp = zpu.simplify(pdata[0], 0.001) @@ -36,7 +36,7 @@ return simplified version of polygon antelope_python(3y), pythonpolygon(3y), polygon(3) .fi .SH "BUGS AND CAVEATS" -This surrently implements a mthod for simplifying probably adds a few bugs to the polygon library. +This currently implements a mthod for simplifying probably adds a few bugs to the polygon library. Once I understand how the \fIbuplot\fP stuff works, I will add more functionality .SH AUTHOR From 23c21c64ee8f041e8f1f6b51f33dad3a59639422 Mon Sep 17 00:00:00 2001 From: Niko Horn Date: Fri, 1 Apr 2022 08:15:59 +0200 Subject: [PATCH 05/60] remove hacks, not needed with current antelopemake --- lib/libpolygon/Makefile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lib/libpolygon/Makefile b/lib/libpolygon/Makefile index d49134669..43c3d7a58 100644 --- a/lib/libpolygon/Makefile +++ b/lib/libpolygon/Makefile @@ -18,6 +18,9 @@ MAN3= \ ldlibs= $(DBLIBS) cflags = -g +#-dynamiclib +#ldflags = -target x86_64-apple-macos12.0 -dynamiclib + SUBDIR=/contrib include $(ANTELOPEMAKE) From 9f3e890984cec7a787473712701e3c13d155afe4 Mon Sep 17 00:00:00 2001 From: Niko Horn Date: Fri, 1 Apr 2022 08:20:46 +0200 Subject: [PATCH 06/60] try to sort schemas --- bin/utility/schemastuff/sortschema.csh | 29 ++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100755 bin/utility/schemastuff/sortschema.csh diff --git a/bin/utility/schemastuff/sortschema.csh b/bin/utility/schemastuff/sortschema.csh new file mode 100755 index 000000000..04c601b84 --- /dev/null +++ b/bin/utility/schemastuff/sortschema.csh @@ -0,0 +1,29 @@ +if ( "$#" != 2 ) then + echo "usage $0 schemafile newschema" + echo " " + echo "little helper to sort a schema" + exit 1 +endif +set schemafile=$1 +set newfile=$2 +if ( -f $ANTELOPE/contrib/data/awk/splitschema.awk ) then + set SPLITS=$ANTELOPE/contrib/data/awk/splitschema.awk +else if (-f $ANTELOPE/data/awk/splitschema.awk ) then + set SPLITS=$ANTELOPE/data/awk/splitschema.awk +else + echo "helper splitschema.awk not found!" + exit 1 +endif +rm -f $newfile +egrep "^Attribute" $schemafile | awk '{print $2}' > Attributes +egrep "^Relation" $schemafile|awk '{print $2}' > Relations +foreach att (`cat Attributes|sort -u`) + awk -f $SPLITS -v var=$att -v type=a $schemafile >> $newfile + echo >> $newfile +end +foreach rel (`cat Relations|sort -u`) + awk -f $SPLITS -v var=$rel -v type=r $schemafile >> $newfile +end + +rm -f Attributes +rm -f Relations From 01bce18f25e732e8fca361205738c80a52a653d1 Mon Sep 17 00:00:00 2001 From: Niko Horn Date: Fri, 1 Apr 2022 08:21:30 +0200 Subject: [PATCH 07/60] catch more possible sources --- bin/utility/mkmf/mkmf.csh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/utility/mkmf/mkmf.csh b/bin/utility/mkmf/mkmf.csh index cfd8e2ec4..3b3feedfa 100644 --- a/bin/utility/mkmf/mkmf.csh +++ b/bin/utility/mkmf/mkmf.csh @@ -2,7 +2,7 @@ set nonomatch # no error if nothing matches at all set mybins="" -set patterns="*.c *.cpp *.F *.f *.sh *.csh *.tcl *.xpl *.xppl *.xpls *.xtcl *.xwish *.xvwish *.xwish8 *.xpy *.xbqpy" +set patterns="*.c *.cpp *.F *.f *.sh *.csh *.tcl *.xpl *.xppl *.xpls *.xtcl *.xwish *.xvwish *.xwish8 *.oxwish *.xpy *.xbqpy *.xbqpyn *.xbqpyx *xpys" foreach pat ($patterns) set ft=( $pat ) if ( -e $ft[1] ) then From a1dbee642d8fae25ae16e11571486a5d3fe8f5d6 Mon Sep 17 00:00:00 2001 From: Nikolaus Horn Date: Fri, 1 Apr 2022 08:09:45 +0000 Subject: [PATCH 08/60] silly bug when cutting short strings --- data/python/zamg_utilities/utilities.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data/python/zamg_utilities/utilities.py b/data/python/zamg_utilities/utilities.py index 2c1001c48..2b4fff91b 100755 --- a/data/python/zamg_utilities/utilities.py +++ b/data/python/zamg_utilities/utilities.py @@ -86,7 +86,7 @@ def string_charsplit(my_string, bytelen): def string_maxbytes(my_string, bytelen): """chop encoded string into characters, not bytes""" chars = 1 - while utf8len(my_string[:chars]) <= bytelen: + while utf8len(my_string[:chars]) <= bytelen and chars <= len(my_string): chars += 1 return my_string[: chars - 1] From ab065ec5d1efe6452f9767360720b82f4d3cd1f4 Mon Sep 17 00:00:00 2001 From: Nikolaus Horn Date: Fri, 1 Apr 2022 08:22:43 +0000 Subject: [PATCH 09/60] no more chopped characters, useful online-help --- bin/import/usgs2db/emsc2db.xpy | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/bin/import/usgs2db/emsc2db.xpy b/bin/import/usgs2db/emsc2db.xpy index 10919f171..928b488a5 100644 --- a/bin/import/usgs2db/emsc2db.xpy +++ b/bin/import/usgs2db/emsc2db.xpy @@ -18,6 +18,8 @@ import antelope.datascope as ds import antelope.stock as stock import antelope.elog as elog +import zamg.utilities as zu + def usage(progname): print(progname, "[-v] [-p proxy_url] [-a auth] [-k keydb] [-u url] dbname") @@ -31,11 +33,13 @@ def main(): "http://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_hour.geojson" ) auth = "USGS" + help_text = "\nUSGS provides at most 1 month of data on the following URL:\nhttp://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_month.geojson\ndefault is to retrieve only the most recent events" else: BASE_URL = ( "http://www.seismicportal.eu/fdsnws/event/1/query?limit=10&format=json" ) auth = "EMSC" + help_text = "\nEMSC provides at most 1000 events at once on the following URL:\nhttp://www.seismicportal.eu/fdsnws/event/1/query?limit=1000&format=json\ndefault is to retrieve only the most recent events" verbose = 0 archive = 0 opts = [] @@ -44,7 +48,7 @@ def main(): keyschema = "idmatch1.0" proxy_url = "" try: - opts, args = getopt.getopt(sys.argv[1:], "a:k:p:u:v", "") + opts, args = getopt.getopt(sys.argv[1:], "a:hk:p:u:v", "") except getopt.GetoptError: elog.die("illegal option") usage(progname) @@ -61,6 +65,10 @@ def main(): keydbname = a elif o == "-p": proxy_url = a + elif o == "-h": + usage(progname) + elog.notify(help_text) + sys.exit(0) if len(args) > 1 or len(args) < 1: usage(progname) @@ -290,7 +298,7 @@ def main(): erecno = dbevent.addv( ("evid", evid), ("prefor", orid), - ("evname", evname[:evname_width]), + ("evname", zu.string_maxbytes(evname, evname_width)), ("auth", auth), ) except Exception as __: From ae3b9eb0cb3f1f85a6fa1904b73bf1436dcaf4e5 Mon Sep 17 00:00:00 2001 From: Nikolaus Horn Date: Fri, 1 Apr 2022 09:59:44 +0000 Subject: [PATCH 10/60] fix different understandings of depth --- bin/import/usgs2db/emsc2db.xpy | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/bin/import/usgs2db/emsc2db.xpy b/bin/import/usgs2db/emsc2db.xpy index 928b488a5..1048a984d 100644 --- a/bin/import/usgs2db/emsc2db.xpy +++ b/bin/import/usgs2db/emsc2db.xpy @@ -158,7 +158,9 @@ def main(): lon = float(coordinates[0]) lat = float(coordinates[1]) depth = float(coordinates[2]) - depth *= -1.0 + #EMSC correctly specifies depth as a negative number :-) + if progname == "emsc2db": + depth *= -1.0 properties = fdata["properties"] mb = ms = ml = mlnull time = ( @@ -181,6 +183,8 @@ def main(): etime = stock.str2epoch(dt2) elif propk == "mag": mag = float(propv) + elif propk == "depth": + depth = float(propv) elif propk.lower() == "magtype": magtype = str(propv) elif propk == "place": From e9e58b9018f381c74023ffb8dab082804b927433 Mon Sep 17 00:00:00 2001 From: Nikolaus Horn Date: Fri, 1 Apr 2022 13:49:52 +0000 Subject: [PATCH 11/60] remove redundant script to parse pde, better use fdsntxt2bd or so --- bin/import/usgs2db/pdetxt2db.xpy | 122 ------------------------------- 1 file changed, 122 deletions(-) delete mode 100755 bin/import/usgs2db/pdetxt2db.xpy diff --git a/bin/import/usgs2db/pdetxt2db.xpy b/bin/import/usgs2db/pdetxt2db.xpy deleted file mode 100755 index fdcc6f00c..000000000 --- a/bin/import/usgs2db/pdetxt2db.xpy +++ /dev/null @@ -1,122 +0,0 @@ -''' -@author Nikolaus Horn -@created 2014-06-18 -@modified 2014-06-18 -@version 1.0 -@license MIT-style license -''' - - -# Import Antelope modules - -import antelope.datascope as ds -import antelope.stock as stock -import getopt -import codecs -import urllib2 -import json -import pprint - -def usage(): - print sys.argv[0], "[-v] [-a auth] [-u url] dbname from to" - -def main(): - BASE_URL="http://service.iris.edu/fdsnws/event/1/query?orderby=time&format=text&nodata=404" - verbose=0 - archive=0 - opts = [] - args = [] - auth='' - try: - opts, args = getopt.getopt(sys.argv[1:], 'a:u:v', '') - except getopt.GetoptError: - print "illegal option" - usage() - sys.exit(2) - - for o,a in opts: - if o == '-v': - verbose = 1 - elif o == '-a': - auth = a - elif o == '-u': - BASE_URL = a - - if len(args) > 3 or len(args) < 3: - usage() - sys.exit(1) - - if len(args) == 3: - dbname=args[0] - t_from=str(args[1]) - t_to=str(args[2]) - - t1 = stock.str2epoch(t_from) - t2 = stock.str2epoch(t_to) - if t2 < t1: - t2=t1+t2 - ts1=stock.epoch2str(t1,'%Y-%m-%dT%H:%M:%S') - ts2=stock.epoch2str(t2,'%Y-%m-%dT%H:%M:%S') - if t1 >= t2: - print "endtime MUST be AFTER starttime : %s >= %s" % (ts1,ts2) - - db= ds.dbopen( dbname, "r+" ) - dborigin=db.lookup(table='origin') - dbevent=db.lookup(table='event') - dbnetmag=db.lookup(table='netmag') - - dbq=db.lookup(table='origin',field='ml',record='dbNULL') - [mlnull]=dbq.getv('ml') - dbq=db.lookup(table='event',field='evname',record='dbNULL') - evname_width=dbq.query('dbFIELD_SIZE') - - #proxies={'http':'http://138.22.156.44:3128'} - MY_URL="%s&starttime=%s&endtime=%s" % (BASE_URL,ts1,ts2) - url=urllib2.urlopen(MY_URL) - data_string=url.read() - for myline in data_string.splitlines(): - - if myline[0]=='#': - continue - - [evid,otimestr,lat,lon,depth,oauth,ocat,ocont,ocid,magtype,magnitude,magauth,regname]=myline.split('|') - evid=int(evid) - lat=float(lat) - lon=float(lon) - depth=float(depth) - if magnitude != '': - magnitude=float(magnitude) - else: - magnitude=mlnull - ts=otimestr.replace('T',' ') - otime=stock.str2epoch(ts) - magtype=str(magtype).lower() - if verbose: - print "evid %d lat %f lon %f :%s: %.1f" % (evid,lat,lon,magtype,magnitude) - - mb=ms=ml=mlnull - if magtype == 'ml': - ml=magnitude - if magtype == 'ms': - ms=magnitude - if magtype == 'mb': - mb=magnitude - gr=stock.grnumber(lat,lon) - sr=stock.srnumber(lat,lon) - jdate=stock.epoch2str(otime,'%Y%j') - if auth != '': - oauth=auth - ocat= auth - orecno=dborigin.addv( ('time',otime),('lat',lat),('lon',lon),('depth',depth), - ('evid',evid),('orid',evid), ('jdate',jdate), - ('mb',mb),('ml',ml),('ms',ms), - ('nass',0),('ndef',0),('auth',ocat),('grn',gr),('srn',sr) ) - - erecno=dbevent.addv(('evid',evid),('prefor',evid),('evname',regname[:evname_width]),('auth',ocat) ) - nmrecno=dbnetmag.addv(('evid',evid),('orid',evid),('magnitude',magnitude),('magtype',magtype),('auth',magauth) ) - - return 0 - -if __name__ == '__main__': - status = main() - sys.exit(status) From 8214bd260dd555e6a0d733f1e749e38e30b3137c Mon Sep 17 00:00:00 2001 From: Nikolaus Horn Date: Fri, 1 Apr 2022 13:50:32 +0000 Subject: [PATCH 12/60] read GFZ RSS --- bin/import/usgs2db/gfzrss2db.1 | 66 ++++++ bin/import/usgs2db/gfzrss2db.xpy | 354 +++++++++++++++++++++++++++++++ 2 files changed, 420 insertions(+) create mode 100644 bin/import/usgs2db/gfzrss2db.1 create mode 100755 bin/import/usgs2db/gfzrss2db.xpy diff --git a/bin/import/usgs2db/gfzrss2db.1 b/bin/import/usgs2db/gfzrss2db.1 new file mode 100644 index 000000000..50cab0442 --- /dev/null +++ b/bin/import/usgs2db/gfzrss2db.1 @@ -0,0 +1,66 @@ +.TH GFZRSS2DB 1 +.SH NAME +gfzrss2db \- utility to retrieve earthquake information in text format from \fBFDSN\fP services. +.SH SYNOPSIS +.nf +\fBgfzrss2db\fP [-v] [-a \fIauthor\fP] + [-k \fIkeydbname\fP] [-u \fIservice-url\fP] \fIdbname\fP +.fi +.SH DESCRIPTION +\fBgfzrss2db\fP queries the RSS-Feed of GPZ +and stores the event information in a database. +The programs keeps track of event ids in a separate table and updates +the output database if new information is available. +.br +.SH OPTIONS +.IP \-v +verbose flag +.IP -h +Show help. +.IP "-k keydbname" +name of database to keep track of event ids. Since IDs used by GFZ +are a mixture of strings and integers, a separate table is needed. +The database must be in schema \fBidmatch1.0\fP or higher. The database +is created upon startup if not originally existing. +The database name defaults to keydb. +.IP \-u +URL of the data source. The RSS-Feed should have exact the same format as +provided by GFZ on http://geofon.gfz-potsdam.de/eqinfo/list.php?fmt=rss. +Probably it makes no sense not to use the default given above. +.IP "-a author" +Author name for event, origin and netmag table. The event flag (A for automatic) is appended. Defaults to GFZ. +.IP database +The name of the output database. This argument is required. +.SH EXAMPLE +Recent event data from GFZ into a database named \fBgfz\fP: +.nf +gfzrss2db -k db/gfz_ids -v gfz +.fi +To update event information every 10 minutes, add the following to your rtexec.pf +.nf +# update database gfz every 10 minutes +GFZ UTC 0,10,20,30,40,50 * * * * gfzrss2db -k db/gfz_ids db/gfz +.fi +.SH ENVIRONMENT +The program uses the Python module \fIrequests\fP to retrieve data from a +webservice. This module supports proxy configuration based on the standard +environment variables \fIhttp_proxy\fP, \fIhttps_proxy\fP. +If you need more control over proxy settings, feel free to +contact the author, Nikolaus.Horn@zamg.ac.at. +.SH "SEE ALSO" +.nf + +https://docs.python-requests.org/en/master, USGS2orb(1), antelope_python(3y) + +.fi +.SH "BUGS AND CAVEATS" +Since the text format for event information does not provide information on +when the data has been updated, information on existing events is always +overwritten. This is inefficient, but unavoidable. The error handling is simple. +Since no magnitude type is given, it is set to \fImb\fP. +I implemented my personal understanding of the data offered. There might +be wiser ways to use the save the information in a database. +This module uses the python modules requests with all their bugs and +limitations. And of course I added many more problems... +.SH AUTHOR +Nikolaus Horn (nikolaus.horn@zamg.ac.at) diff --git a/bin/import/usgs2db/gfzrss2db.xpy b/bin/import/usgs2db/gfzrss2db.xpy new file mode 100755 index 000000000..5e51a1166 --- /dev/null +++ b/bin/import/usgs2db/gfzrss2db.xpy @@ -0,0 +1,354 @@ +""" +@author Nikolaus Horn +@created 2013-11-25 +@modified 2022-03-31 +@version 1.2 +@license MIT-style license +@credits ZAMG for my visit to EGU 2014 +""" + + +import getopt +import requests +import warnings + +#XML related stuff +import xml.dom.minidom +import pprint +import datetime + +# Import Antelope modules +import antelope.datascope as ds +import antelope.stock as stock +import antelope.elog as elog + +import zamg.utilities as zu + +def getText(nodelist): + rc = [] + for node in nodelist: + if node.nodeType == node.TEXT_NODE: + rc.append(node.data) + return "".join(rc) + +def usage(progname): + print(progname, "[-v] [-p proxy_url] [-a auth] [-k keydb] [-u url] dbname") + + +def main(): + progname = sys.argv[0].split("/")[-1] + elog.init(progname) + BASE_URL = "http://geofon.gfz-potsdam.de/eqinfo/list.php?fmt=rss" + auth_base = "GFZ" + help_text = """Not all datacenters provide event information using FDSN webservices +Unfortunately, RSS or GeoRSS is not fully standardized. I assume this progam is only usefol for obtaining event information from GFZ""" + verbose = 0 + archive = 0 + opts = [] + args = [] + keydbname = "keydb" + keyschema = "idmatch1.0" + proxy_url = "" + try: + opts, args = getopt.getopt(sys.argv[1:], "a:hk:p:u:v", "") + except getopt.GetoptError: + elog.die("illegal option") + usage(progname) + sys.exit(2) + + for o, a in opts: + if o == "-v": + verbose = 1 + elif o == "-a": + auth_base = a + elif o == "-u": + BASE_URL = a + elif o == "-k": + keydbname = a + elif o == "-p": + proxy_url = a + elif o == "-h": + usage(progname) + elog.notify(help_text) + sys.exit(0) + + if len(args) > 1 or len(args) < 1: + usage(progname) + sys.exit(1) + + if len(args) > 0: + dbname = args[0] + + db = ds.dbopen(dbname, "r+") + dborigin = db.lookup(table="origin") + dbevent = db.lookup(table="event") + dbnetmag = db.lookup(table="netmag") + + dbq = db.lookup(table="origin", field="ml", record="dbNULL") + [mlnull] = dbq.getv("ml") + dbq = db.lookup(table="event", field="evname", record="dbNULL") + evname_width = dbq.query("dbFIELD_SIZE") + dbq = db.lookup(table="event", field="auth", record="dbNULL") + auth_width = dbq.query("dbFIELD_SIZE") + + + kdb = ds.dbopen(keydbname, "r+") + descname = kdb.query("dbDATABASE_FILENAME") + if os.path.exists(descname): + schemaname = kdb.query("dbSCHEMA_NAME") + if schemaname != keyschema: + elog.die( + "keydb %s has wrong schema %s, should be %s" + % (keydbname, schemaname, keyschema) + ) + sys.exit(1) + else: + kdb.close() + ds.dbcreate(keydbname, keyschema) + + kdb = ds.dbopen(keydbname, "r+") + try: + idmatch = kdb.lookup(table="idmatch") + except Exception as e: + elog.die("fatal problem with key database:", e) + + if proxy_url != "": + if proxy_url.startswith("https"): + proxy = {"https": proxy_url} + else: + proxy = {"http": proxy_url} + with warnings.catch_warnings(): + warnings.simplefilter( + "ignore" + ) # ignore silly warnings on SSL verification, especially needed on 5.9 + try: + req = requests.get(BASE_URL, proxies=proxy, verify=False, timeout=30) + req.raise_for_status() + except requests.exceptions.HTTPError as herr: + elog.die("problem requesting data from %s:%s" % (BASE_URL, herr)) + except requests.exceptions.Timeout: + elog.die("timeout requesting data from %s" % BASE_URL) + except requests.exceptions.TooManyRedirects: + elog.die("too many retries requesting data from %s" % BASE_URL) + except requests.exceptions.RequestException as e: + elog.die("fatal problem requesting data from %s" % BASE_URL) + except: + elog.die("unspecific problem requesting data from %s" % BASE_URL) + else: + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + try: + req = requests.get(BASE_URL, verify=False, timeout=30) + req.raise_for_status() + except requests.exceptions.HTTPError as herr: + elog.die("problem requesting data from %s:%s" % (BASE_URL, herr)) + except requests.exceptions.Timeout: + elog.die("timeout requesting data from %s" % BASE_URL) + except requests.exceptions.TooManyRedirects: + elog.die("too many retries requesting data from %s" % BASE_URL) + except requests.exceptions.RequestException as e: + elog.die("fatal problem requesting data from %s" % BASE_URL) + except: + elog.die("unspecific problem requesting data from %s" % BASE_URL) + req.encoding = "utf8" # maybe not necessary... + updated = stock.now() + + dom = xml.dom.minidom.parseString(req.content) + events = dom.getElementsByTagName("item") + for event in events: + title = event.getElementsByTagName("title")[0] + ts = getText(title.childNodes) + mag_s, evname = ts.split(",", 1) + mag = float(mag_s[2:]) + description = event.getElementsByTagName("description")[0] + desc = getText(description.childNodes) + date_s, time_s, lat_s, lon_s, depth_s, depth_u, flag = desc.split() + unid = event.getElementsByTagName("guid")[0].firstChild.nodeValue + etime = stock.str2epoch("%s %s" % (date_s, time_s)) + jdate = stock.epoch2str(etime, "%Y%j") + lat = float(lat_s) + lon = float(lon_s) + depth = float(depth_s) + auth = "%s:%s" % (auth_base, flag) + + mb = ms = ml = mlnull + time = status = cdi = place = code = felt = magtype = net = evtype = "" + # be sure to convert unicode objects to string objects by calling "str(xxx)", + # this prevents datascope from CRASHING + + # use magtype mb for ignorance + magtype = "mb" + + gr = stock.grnumber(lat, lon) + sr = stock.srnumber(lat, lon) + jdate = stock.epoch2str(etime, "%Y%j") + + if verbose: + elog.notify("check id %s" % unid) + + kmatch = idmatch.lookup(table="idmatch", record="dbSCRATCH") + try: + kmatch.putv(("fkey", unid)) + except Exception as e: + elog.die("problem writing key %s to matcher :", (unid, e)) + + matcher = kmatch.matches(idmatch, "fkey") + rec_list = matcher() + new_event = False + evid = 0 + updated_event = False + if len(rec_list) > 1: + elog.notify( + "found too many keys for %s, sth strange goes on here" % unid + ) + if len(rec_list) > 0: + for rec in rec_list: + idmatch.record = rec + [ftime, kname, kval] = idmatch.getv("ftime", "keyname", "keyvalue") + # print "found key %s %s" % (kname, kval) + if kname == "evid": + evid = kval + if updated > ftime: + new_event = False + updated_event = True + else: + updated_event = False + + else: + new_event = True + + if new_event: + problem = False + if verbose: + elog.notify("new event %s" % unid) + evid = dborigin.nextid("evid") + orid = dborigin.nextid("orid") + try: + orecno = dborigin.addv( + ("time", etime), + ("lat", lat), + ("lon", lon), + ("depth", depth), + ("evid", evid), + ("orid", orid), + ("jdate", jdate), + ("mb", mb), + ("ml", ml), + ("ms", ms), + ("nass", 0), + ("ndef", 0), + ("auth", auth), + ("grn", gr), + ("srn", sr), + ) + except Exception as __: + problem = True + if verbose: + elog.notify( + "problem adding origin for event at %s" + % stock.strtime(etime) + ) + + if not problem: + try: + erecno = dbevent.addv( + ("evid", evid), + ("prefor", orid), + ("evname", zu.string_maxbytes(evname, evname_width)), + ("auth", auth) , + ) + except Exception as __: + if verbose: + problem = True + elog.notify( + "problem adding event for events at %s" + % stock.strtime(etime) + ) + if not problem: + try: + nmrecno = dbnetmag.addv( + ("evid", evid), + ("orid", orid), + ("magnitude", mag), + ("magtype", magtype), + ("auth", auth), + ) + except Exception as __: + if verbose: + problem = True + elog.notify( + "problem adding netmap for event at %s" + % stock.strtime(etime) + ) + if not problem: + try: + idmatch.addv( + ("fkey", unid), + ("keyname", "evid"), + ("keyvalue", evid), + ("ftime", updated), + ) + except Exception as __: + if verbose: + problem = True + elog.notify( + "problem adding id for event at %s" + % stock.strtime(etime) + ) + elif updated_event: + if verbose: + elog.notify("eventually updated event %s" % unid) + idmatch.putv(("ftime", updated)) + kmatch = db.lookup(table="event", record="dbSCRATCH") + kmatch.putv(("evid", evid)) + evmatcher = kmatch.matches(dbevent, "evid") + evlist = evmatcher() + if len(evlist) > 1: + elog.notify( + "strange, found a few matching events for evid %d " % evid + ) + if len(evlist) > 0: + dbevent.record = evlist[0] + [prefor] = dbevent.getv("prefor") + + kmatch = db.lookup(table="origin", record="dbSCRATCH") + kmatch.putv(("orid", prefor)) + ormatcher = kmatch.matches(dborigin, "orid") + orlist = ormatcher() + if len(orlist) > 1: + elog.notify("strange, found a few origins for orid %d" % prefor) + if len(orlist) > 0: + dborigin.record = orlist[0] + dborigin.putv( + ("time", etime), + ("lat", lat), + ("lon", lon), + ("depth", depth), + ("jdate", jdate), + ) + if magtype.lower() == "ml": + dborigin.putv(("ml", mag)) + elif magtype.lower() == "mb": + dborigin.putv(("mb", mag)) + elif magtype.lower() == "ms": + dborigin.putv(("ms", mag)) + kmatch = db.lookup(table="netmag", record="dbSCRATCH") + kmatch.putv(("orid", prefor)) + magmatcher = kmatch.matches(dbnetmag, "orid") + maglist = magmatcher() + if len(maglist) > 1: + elog.notify( + "strange, found a few netmags for origin %d" % prefor + ) + if len(maglist) > 0: + dbnetmag.record = maglist[0] + dbnetmag.putv( + ("magnitude", mag), ("magtype", magtype), ("auth", auth), + ) + + return 0 + + +if __name__ == "__main__": + status = main() + sys.exit(status) From 8dae28f5c5297f513fc70964e96728bfba122cd6 Mon Sep 17 00:00:00 2001 From: Nikolaus Horn Date: Fri, 1 Apr 2022 13:51:47 +0000 Subject: [PATCH 13/60] use requests, add manpage --- bin/import/usgs2db/fdsntxt2db.1 | 68 +++++ bin/import/usgs2db/fdsntxt2db.xpy | 463 ++++++++++++++++++------------ 2 files changed, 352 insertions(+), 179 deletions(-) create mode 100644 bin/import/usgs2db/fdsntxt2db.1 diff --git a/bin/import/usgs2db/fdsntxt2db.1 b/bin/import/usgs2db/fdsntxt2db.1 new file mode 100644 index 000000000..d219a2a0e --- /dev/null +++ b/bin/import/usgs2db/fdsntxt2db.1 @@ -0,0 +1,68 @@ +.TH FDSNTXT2DB 1 +.SH NAME +fdsntxt2db \- utility to retrieve earthquake information in text format from \fBFDSN\fP services. +.SH SYNOPSIS +.nf +\fBfdsntxt2db\fP [-v] [-a \fIauthor\fP] + [-k \fIkeydbname\fP] [-u \fIservice-url\fP] \fIdbname\fP +.fi +.SH DESCRIPTION +\fBfdsntxt2db\fP queries fdsn services for event information in text format +and stores the event information in a database. +The programs keeps track of event ids in a separate table and updates +the output database if new information is available. +.br +.SH OPTIONS +.IP \-v +verbose flag +.IP -h +Show help. +.IP "-k keydbname" +name of database to keep track of event ids. Since USGS/EMSC IDs +are a mixture of strings and integers, a separate table is needed. +The database must be in schema \fBidmatch1.0\fP or higher. The database +is created upon startup if not originally existing. +The database name defaults to keydb. +.IP \-u +URL of the data source. A list of all available feeds can be found on a +website of the \fBFDSN\fP: \fBhttps://www.fdsn.org/datacenters/\fP. +Please be aware that not all datacenters provide event information. And +not all datacenters providing event information also support the text format. +The URL defaults to \fBhttp://webservices.ingv.it/fdsnws/event/1/query?format=text\fI, +the service run by INGV for Italy. +.IP "-a author" +Author name for event, origin and netmag table. If this is not specified, the author information returned by the service is used. There is no default. +.IP database +The name of the output database. This argument is required. +.SH EXAMPLE +To retrieve recent event data from INGV into a database named \fBingv\fP: +.nf +fdsntxt2db -k db/fdsn_ids -v ingv +.fi +To update event information every 10 minutes, add the following to your rtexec.pf +.nf +# update database USGS every 10 minutes, all events above magnitude 2.5 +INGV UTC 0,10,20,30,40,50 * * * * fdsntxt2db -k db/fdsn_ids db/ingv +.fi +.SH ENVIRONMENT +The program uses the Python module \fIrequests\fP to retrieve data from a +webservice. This module supports proxy configuration based on the standard +environment variables \fIhttp_proxy\fP, \fIhttps_proxy\fP. +If you need more control over proxy settings, feel free to +contact the author, Nikolaus.Horn@zamg.ac.at. +.SH "SEE ALSO" +.nf + +https://docs.python-requests.org/en/master, USGS2orb(1), antelope_python(3y) + +.fi +.SH "BUGS AND CAVEATS" +Since the text format for event information does not provide information on +when the data has been updated, information on existing events is always +overwritten. This is inefficient but unavoidable. The error handling is simple. +I implemented my personal understanding of the data offered. There might +be wiser ways to use the save the information in a database. +This module uses the python modules requests with all their bugs and +limitations. And of course I added many more problems... +.SH AUTHOR +Nikolaus Horn (nikolaus.horn@zamg.ac.at) diff --git a/bin/import/usgs2db/fdsntxt2db.xpy b/bin/import/usgs2db/fdsntxt2db.xpy index 1116f758d..7270cb7b6 100755 --- a/bin/import/usgs2db/fdsntxt2db.xpy +++ b/bin/import/usgs2db/fdsntxt2db.xpy @@ -1,43 +1,53 @@ -sys.path.append(os.environ["ANTELOPE"] + "/contrib/data/python") - """ @author Nikolaus Horn -@created 2018-01-01 -@modified 2020-10-27 -@version 1.0 +@created 2013-11-25 +@modified 2022-03-31 +@version 1.2 @license MIT-style license +@credits ZAMG for my visit to EGU 2014 """ -# Import Antelope modules +import getopt +import requests + +# import json +import warnings +# Import Antelope modules import antelope.datascope as ds import antelope.stock as stock -import getopt -import codecs -import urllib3 -import json -import pprint +import antelope.elog as elog +import zamg.utilities as zu -def usage(): - print(sys.argv[0], "[-v] [-a auth] [-k keydb] [-u url] dbname") + +def usage(progname): + print(progname, "[-v] [-p proxy_url] [-a auth] [-k keydb] [-u url] dbname") def main(): - BASE_URL = "http://webservices.rm.ingv.it/fdsnws/event/1/query?format=text" + progname = sys.argv[0].split("/")[-1] + elog.init(progname) + BASE_URL = "http://webservices.ingv.it/fdsnws/event/1/query?format=text" + + auth = "" + help_text = """a list of datacenters running FDSN services can be found on the web: + https://www.fdsn.org/datacenters + Unfortunately, there is no general overview if these services provide event information and also support the text format. + You mut check each webservice individually""" verbose = 0 archive = 0 opts = [] args = [] keydbname = "keydb" keyschema = "idmatch1.0" - auth = "INGV" + proxy_url = "" try: - opts, args = getopt.getopt(sys.argv[1:], "a:k:u:v", "") + opts, args = getopt.getopt(sys.argv[1:], "a:hk:p:u:v", "") except getopt.GetoptError: - print("illegal option") - usage() + elog.die("illegal option") + usage(progname) sys.exit(2) for o, a in opts: @@ -49,13 +59,19 @@ def main(): BASE_URL = a elif o == "-k": keydbname = a + elif o == "-p": + proxy_url = a + elif o == "-h": + usage(progname) + elog.notify(help_text) + sys.exit(0) if len(args) > 1 or len(args) < 1: - usage() + usage(progname) sys.exit(1) - dbname = args[0] - # print dbname + if len(args) > 0: + dbname = args[0] db = ds.dbopen(dbname, "r+") dborigin = db.lookup(table="origin") @@ -66,13 +82,16 @@ def main(): [mlnull] = dbq.getv("ml") dbq = db.lookup(table="event", field="evname", record="dbNULL") evname_width = dbq.query("dbFIELD_SIZE") + dbq = db.lookup(table="event", field="auth", record="dbNULL") + auth_width = dbq.query("dbFIELD_SIZE") + kdb = ds.dbopen(keydbname, "r+") descname = kdb.query("dbDATABASE_FILENAME") if os.path.exists(descname): schemaname = kdb.query("dbSCHEMA_NAME") if schemaname != keyschema: - print( + elog.die( "keydb %s has wrong schema %s, should be %s" % (keydbname, schemaname, keyschema) ) @@ -80,178 +99,264 @@ def main(): else: kdb.close() ds.dbcreate(keydbname, keyschema) + kdb = ds.dbopen(keydbname, "r+") try: idmatch = kdb.lookup(table="idmatch") except Exception as e: - print("Error :", e) - - updated = stock.now() - # proxies={'http':'http://138.22.156.44:3128'} - http = urllib3.PoolManager() - req = http.request("GET", BASE_URL) - txt_string = req.data.decode() - # #EventID|Time|Latitude|Longitude|Depth/Km|Author|Catalog|Contributor|ContributorID|MagType|Magnitude|MagAuthor|EventLocationName - # 7093051|2016-08-24T07:55:22.780000|42.8127|13.1653|9.6|SURVEY-INGV||||ML|2.8|--|Perugia - - for line in txt_string.splitlines(): - if line.startswith("#"): - continue - time = status = cdi = place = code = felt = mag = magtype = net = evtype = "" - ( - evid, - timestr, - lats, - lons, - depths, - oauth, - cat, - cont, - contid, - magtype, - mags, - magauth, - evname, - rest, - ) = line.split("|", 14) - - evid = int(evid) - mag = float(mags) - lat = float(lats) - lon = float(lons) - depth = float(depths) - etime = stock.str2epoch(timestr.replace("T", " ")) - if auth != "INGV": - auth = str(oauth) - - ml = mb = ms = mlnull - # be sure to convert unicode objects to string objects by calling "str(xxx)", - # this prevents datascope from CRASHING - - if magtype.lower() == "ml": - ml = mag - elif magtype.lower() == "mb": - mb = mag - elif magtype.lower() == "ms": - ms = mag - # grn, srn seems to be unimplemenmted - gr = stock.grnumber(lat, lon) - sr = stock.srnumber(lat, lon) - jdate = stock.epoch2str(etime, "%Y%j") - - fkey = str("%d" % evid) - - kmatch = idmatch.lookup(table="idmatch", record="dbSCRATCH") - try: - kmatch.putv(("fkey", fkey)) - except Exception as e: - print("Error :", e) - - matcher = kmatch.matches(idmatch, "fkey") - rec_list = matcher() - new_event = False - evid = 0 - updated_event = False - if len(rec_list) > 1: - print("found too many keys, sth strange goes on here") - if len(rec_list) > 0: - for rec in rec_list: - idmatch.record = rec - [ftime, kname, kval] = idmatch.getv("ftime", "keyname", "keyvalue") - # print "found key %s %s" % (kname, kval) - if kname == "evid": - evid = kval - if updated > ftime: - new_event = False - updated_event = True - else: - updated_event = False + elog.die("fatal problem with key database:", e) + + if proxy_url != "": + if proxy_url.startswith("https"): + proxy = {"https": proxy_url} else: - new_event = True + proxy = {"http": proxy_url} + with warnings.catch_warnings(): + warnings.simplefilter( + "ignore" + ) # ignore silly warnings on SSL verification, especially needed on 5.9 + try: + req = requests.get(BASE_URL, proxies=proxy, verify=False, timeout=30) + req.raise_for_status() + except requests.exceptions.HTTPError as herr: + elog.die("problem requesting data from %s:%s" % (BASE_URL, herr)) + except requests.exceptions.Timeout: + elog.die("timeout requesting data from %s" % BASE_URL) + except requests.exceptions.TooManyRedirects: + elog.die("too many retries requesting data from %s" % BASE_URL) + except requests.exceptions.RequestException as e: + elog.die("fatal problem requesting data from %s" % BASE_URL) + except: + elog.die("unspecific problem requesting data from %s" % BASE_URL) + else: + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + try: + req = requests.get(BASE_URL, verify=False, timeout=30) + req.raise_for_status() + except requests.exceptions.HTTPError as herr: + elog.die("problem requesting data from %s:%s" % (BASE_URL, herr)) + except requests.exceptions.Timeout: + elog.die("timeout requesting data from %s" % BASE_URL) + except requests.exceptions.TooManyRedirects: + elog.die("too many retries requesting data from %s" % BASE_URL) + except requests.exceptions.RequestException as e: + elog.die("fatal problem requesting data from %s" % BASE_URL) + except: + elog.die("unspecific problem requesting data from %s" % BASE_URL) + req.encoding = "utf8" # maybe not necessary... + data = req.text + for line in data.splitlines(): + if not line.startswith("#"): + updated = stock.now() + time = ( + status + ) = cdi = place = code = felt = mag = magtype = net = evtype = "" + ( + unid, + timestr, + lats, + lons, + depths, + oauth, + cat, + cont, + contid, + magtype, + mags, + magauth, + evname, + rest, + ) = line.split("|", 14) + mag = float(mags) + lat = float(lats) + lon = float(lons) + depth = float(depths) + etime = stock.str2epoch(timestr.replace("T", " ")) + if auth != "": + oauth = auth + magauth = auth + + + ml = mb = ms = mlnull + + # push M to mb, seems to make sense... + lmt = magtype.lower() + if lmt == "m": + magtype = "mb" + elif lmt == "ml": + ml = mag + elif lmt == "mb": + mb = mag + elif lmt == "ms": + ms = mag + + gr = stock.grnumber(lat, lon) + sr = stock.srnumber(lat, lon) + jdate = stock.epoch2str(etime, "%Y%j") - if new_event: - if verbose: - print("new event %s" % code) - evid = dborigin.nextid("evid") - orid = dborigin.nextid("orid") - orecno = dborigin.addv( - ("time", etime), - ("lat", lat), - ("lon", lon), - ("depth", depth), - ("evid", evid), - ("orid", orid), - ("jdate", jdate), - ("mb", mb), - ("ml", ml), - ("ms", ms), - ("nass", 0), - ("ndef", 0), - ("auth", auth), - ("grn", gr), - ("srn", sr), - ) - erecno = dbevent.addv( - ("evid", evid), - ("prefor", orid), - ("evname", evname[:evname_width]), - ("auth", auth), - ) - nmrecno = dbnetmag.addv( - ("evid", evid), - ("orid", orid), - ("magnitude", mag), - ("magtype", magtype), - ("auth", auth), - ) - # idmatch.addv(('fkey',fkey),('keyname','evid'),('keyvalue',evid),('ftime',updated) ) - idmatch.addv(("fkey", fkey), ("keyname", "evid"), ("keyvalue", evid)) - elif updated_event: if verbose: - print("updated event %s" % code) - idmatch.putv(("ftime", updated)) - kmatch = db.lookup(table="event", record="dbSCRATCH") - kmatch.putv(("evid", evid)) - evmatcher = kmatch.matches(dbevent, "evid") - evlist = evmatcher() - if len(evlist) > 1: - print("strange, found a few matching events for evid %d " % evid) - if len(evlist) > 0: - dbevent.record = evlist[0] - [prefor] = dbevent.getv("prefor") - - kmatch = db.lookup(table="origin", record="dbSCRATCH") - kmatch.putv(("orid", prefor)) - ormatcher = kmatch.matches(dborigin, "orid") - orlist = ormatcher() - if len(orlist) > 1: - print("strange, found a few origind for orid %d" % prefor) - if len(orlist) > 0: - dborigin.record = orlist[0] - dborigin.putv( + elog.notify("check id %s" % unid) + + kmatch = idmatch.lookup(table="idmatch", record="dbSCRATCH") + try: + kmatch.putv(("fkey", unid)) + except Exception as e: + elog.die("problem writing key %s to matcher :", (unid, e)) + + matcher = kmatch.matches(idmatch, "fkey") + rec_list = matcher() + new_event = False + evid = 0 + updated_event = False + if len(rec_list) > 1: + elog.notify( + "found too many keys for %s, sth strange goes on here" % unid + ) + if len(rec_list) > 0: + for rec in rec_list: + idmatch.record = rec + [ftime, kname, kval] = idmatch.getv("ftime", "keyname", "keyvalue") + # print "found key %s %s" % (kname, kval) + if kname == "evid": + evid = kval + if updated > ftime: + new_event = False + updated_event = True + else: + updated_event = False + + else: + new_event = True + + if new_event: + problem = False + if verbose: + elog.notify("new event %s" % unid) + evid = dborigin.nextid("evid") + orid = dborigin.nextid("orid") + try: + orecno = dborigin.addv( ("time", etime), ("lat", lat), ("lon", lon), ("depth", depth), + ("evid", evid), + ("orid", orid), ("jdate", jdate), + ("mb", mb), + ("ml", ml), + ("ms", ms), + ("nass", 0), + ("ndef", 0), + ("auth", zu.string_maxbytes(oauth, auth_width)), + ("grn", gr), + ("srn", sr), ) - if magtype.lower() == "ml": - dborigin.putv(("ml", mag)) - elif magtype.lower() == "mb": - dborigin.putv(("mb", mag)) - elif magtype.lower() == "ms": - dborigin.putv(("ms", mag)) - kmatch = db.lookup(table="netmag", record="dbSCRATCH") + except Exception as __: + problem = True + if verbose: + elog.notify( + "problem adding origin for event at %s" + % stock.strtime(etime) + ) + + if not problem: + try: + erecno = dbevent.addv( + ("evid", evid), + ("prefor", orid), + ("evname", zu.string_maxbytes(evname, evname_width)), + ("auth", zu.string_maxbytes(oauth, auth_width)), + ) + except Exception as __: + if verbose: + problem = True + elog.notify( + "problem adding event for events at %s" + % stock.strtime(etime) + ) + if not problem: + try: + nmrecno = dbnetmag.addv( + ("evid", evid), + ("orid", orid), + ("magnitude", mag), + ("magtype", magtype), + ("auth", zu.string_maxbytes(magauth, auth_width)), + ) + except Exception as __: + if verbose: + problem = True + elog.notify( + "problem adding netmap for event at %s" + % stock.strtime(etime) + ) + if not problem: + try: + idmatch.addv( + ("fkey", unid), + ("keyname", "evid"), + ("keyvalue", evid), + ("ftime", updated), + ) + except Exception as __: + if verbose: + problem = True + elog.notify( + "problem adding id for event at %s" + % stock.strtime(etime) + ) + elif updated_event: + if verbose: + elog.notify("eventually updated event %s" % unid) + idmatch.putv(("ftime", updated)) + kmatch = db.lookup(table="event", record="dbSCRATCH") + kmatch.putv(("evid", evid)) + evmatcher = kmatch.matches(dbevent, "evid") + evlist = evmatcher() + if len(evlist) > 1: + elog.notify( + "strange, found a few matching events for evid %d " % evid + ) + if len(evlist) > 0: + dbevent.record = evlist[0] + [prefor] = dbevent.getv("prefor") + + kmatch = db.lookup(table="origin", record="dbSCRATCH") kmatch.putv(("orid", prefor)) - magmatcher = kmatch.matches(dbnetmag, "orid") - maglist = magmatcher() - if len(maglist) > 1: - print("strange, found a few netmags for origin %d" % prefor) - if len(maglist) > 0: - dbnetmag.record = maglist[0] - dbnetmag.putv( - ("magnitude", mag), ("magtype", magtype), ("auth", auth) + ormatcher = kmatch.matches(dborigin, "orid") + orlist = ormatcher() + if len(orlist) > 1: + elog.notify("strange, found a few origins for orid %d" % prefor) + if len(orlist) > 0: + dborigin.record = orlist[0] + dborigin.putv( + ("time", etime), + ("lat", lat), + ("lon", lon), + ("depth", depth), + ("jdate", jdate), ) + if magtype.lower() == "ml": + dborigin.putv(("ml", mag)) + elif magtype.lower() == "mb": + dborigin.putv(("mb", mag)) + elif magtype.lower() == "ms": + dborigin.putv(("ms", mag)) + kmatch = db.lookup(table="netmag", record="dbSCRATCH") + kmatch.putv(("orid", prefor)) + magmatcher = kmatch.matches(dbnetmag, "orid") + maglist = magmatcher() + if len(maglist) > 1: + elog.notify( + "strange, found a few netmags for origin %d" % prefor + ) + if len(maglist) > 0: + dbnetmag.record = maglist[0] + dbnetmag.putv( + ("magnitude", mag), ("magtype", magtype), ("auth", zu.string_maxbytes(magauth, auth_width)), + ) return 0 From 85152dafe7fc170f3571dc00d89dc07fea95052d Mon Sep 17 00:00:00 2001 From: Nikolaus Horn Date: Fri, 1 Apr 2022 13:52:54 +0000 Subject: [PATCH 14/60] cosmetic changes --- bin/import/usgs2db/.gitignore | 2 ++ bin/import/usgs2db/Makefile | 4 ++-- bin/import/usgs2db/emsc2db.xpy | 13 ++++++++++--- 3 files changed, 14 insertions(+), 5 deletions(-) diff --git a/bin/import/usgs2db/.gitignore b/bin/import/usgs2db/.gitignore index 0627913f9..6e0ff7959 100644 --- a/bin/import/usgs2db/.gitignore +++ b/bin/import/usgs2db/.gitignore @@ -4,3 +4,5 @@ pdetxt2db keydb fdsntxt2db keydb.idmatch +gfzrss2db +orig_* diff --git a/bin/import/usgs2db/Makefile b/bin/import/usgs2db/Makefile index e3a6c4c1d..17167a1c1 100644 --- a/bin/import/usgs2db/Makefile +++ b/bin/import/usgs2db/Makefile @@ -1,5 +1,5 @@ -BIN=usgs2db emsc2db pdetxt2db fdsntxt2db -MAN1=usgs2db.1 emsc2db.1 +BIN=usgs2db emsc2db gfzrss2db fdsntxt2db +MAN1=usgs2db.1 emsc2db.1 fdsntxt2db.1 gfzrss2db.1 DATA=idmatch1.0 DATADIR=schemas diff --git a/bin/import/usgs2db/emsc2db.xpy b/bin/import/usgs2db/emsc2db.xpy index 1048a984d..b89d1858e 100644 --- a/bin/import/usgs2db/emsc2db.xpy +++ b/bin/import/usgs2db/emsc2db.xpy @@ -33,13 +33,18 @@ def main(): "http://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_hour.geojson" ) auth = "USGS" - help_text = "\nUSGS provides at most 1 month of data on the following URL:\nhttp://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_month.geojson\ndefault is to retrieve only the most recent events" + help_text = """USGS provides at most 1 month of data on the following URL: +http://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_month.geojson. +The default is to retrieve events from the last hour with a mgnitude of 2.5 or higher""" else: BASE_URL = ( "http://www.seismicportal.eu/fdsnws/event/1/query?limit=10&format=json" ) auth = "EMSC" - help_text = "\nEMSC provides at most 1000 events at once on the following URL:\nhttp://www.seismicportal.eu/fdsnws/event/1/query?limit=1000&format=json\ndefault is to retrieve only the most recent events" + help_text = """EMSC provides at most 1000 events at once on the following URL: +http://www.seismicportal.eu/fdsnws/event/1/query?limit=1000&format=json. +The default default is to retrieve only the most recent events""" + verbose = 0 archive = 0 opts = [] @@ -86,6 +91,8 @@ def main(): [mlnull] = dbq.getv("ml") dbq = db.lookup(table="event", field="evname", record="dbNULL") evname_width = dbq.query("dbFIELD_SIZE") + dbq = db.lookup(table="origin", field="auth", record="dbNULL") + auth_width = dbq.query("dbFIELD_SIZE") kdb = ds.dbopen(keydbname, "r+") descname = kdb.query("dbDATABASE_FILENAME") @@ -158,7 +165,7 @@ def main(): lon = float(coordinates[0]) lat = float(coordinates[1]) depth = float(coordinates[2]) - #EMSC correctly specifies depth as a negative number :-) + # EMSC correctly specifies depth as a negative number :-) if progname == "emsc2db": depth *= -1.0 properties = fdata["properties"] From b0b5996822df494864d18dfccdba45ca2d44e6b0 Mon Sep 17 00:00:00 2001 From: Nikolaus Horn Date: Fri, 1 Apr 2022 14:33:06 +0000 Subject: [PATCH 15/60] MacroSeismic Sensor Project (aka Ruwai) --- data/instruments/dataloggers/misc/Makefile | 1 + data/instruments/dataloggers/misc/mss2.pf | 27 ++++++++++++++++++++++ 2 files changed, 28 insertions(+) create mode 100644 data/instruments/dataloggers/misc/mss2.pf diff --git a/data/instruments/dataloggers/misc/Makefile b/data/instruments/dataloggers/misc/Makefile index 81604ec9e..863c953f4 100644 --- a/data/instruments/dataloggers/misc/Makefile +++ b/data/instruments/dataloggers/misc/Makefile @@ -2,6 +2,7 @@ DATADIR=instruments/dataloggers DATA= parosci.pf \ saic_2001.pf \ SMACH_SM2.pf \ + mss2.pf \ unknown.pf SUBDIR=/contrib diff --git a/data/instruments/dataloggers/misc/mss2.pf b/data/instruments/dataloggers/misc/mss2.pf new file mode 100644 index 000000000..5d65d2cc6 --- /dev/null +++ b/data/instruments/dataloggers/misc/mss2.pf @@ -0,0 +1,27 @@ +# https://www.mertl-research.at/en/projects/macro_seismic_network/ +originator Nikolaus Horn, ZAMG +last_modified 2022-04-01 + +category MSS +configuration Jamstec OBS + +description MacroSeismik Sensor Network +dfile mss2 # no response, a mystery + +streams 100sps + +# definitions of streams +100sps 100 a/d + +# gain = counts/V = (1 / 0.000125 V/count) +gains 8000 + + +# Stages +a/d &Arr{ +gtype digitizer +samprate 100 +iunits V +ounits counts +sn datalogger +} From 2f541b4bd9217009becf80a5d5b779d5d914a3b2 Mon Sep 17 00:00:00 2001 From: Nikolaus Horn Date: Wed, 6 Apr 2022 11:56:43 +0000 Subject: [PATCH 16/60] less noisy --- data/python/zamg_utilities/.gitignore | 1 + data/python/zamg_utilities/Makefile | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/data/python/zamg_utilities/.gitignore b/data/python/zamg_utilities/.gitignore index 3b8c02d97..5a3a37455 100644 --- a/data/python/zamg_utilities/.gitignore +++ b/data/python/zamg_utilities/.gitignore @@ -16,3 +16,4 @@ send_multipartemail.3y add_remark.3y get_remark.3y set_remark.3y +next_number.3y diff --git a/data/python/zamg_utilities/Makefile b/data/python/zamg_utilities/Makefile index 997a4f63a..deae79b6d 100644 --- a/data/python/zamg_utilities/Makefile +++ b/data/python/zamg_utilities/Makefile @@ -13,7 +13,7 @@ CLEAN= mkso *.pyc *.so __init__.py \ utf8len.3y string_charsplit.3y string_maxbytes.3y \ create_dbdesc.3y rfc33392epoch.3y epoch2rfc3339.3y \ spherical_distance.3y haversine_distance.3y \ - multipartemail.3y send_email.3y add_remark.3y get_remark.3y set_remark.3y + multipartemail.3y send_email.3y add_remark.3y get_remark.3y set_remark.3y next_number.3y include $(ANTELOPEMAKE) SUBDIR=/contrib From 05e4278855cba4c867a6fbab776e415101c5f98b Mon Sep 17 00:00:00 2001 From: Nikolaus Horn Date: Wed, 6 Apr 2022 11:57:14 +0000 Subject: [PATCH 17/60] add lowpass to MSS --- data/instruments/dataloggers/misc/mss2.pf | 19 ++++++++++++------- data/responses/misc/Makefile | 4 +++- data/responses/misc/lp12_8hz | 14 ++++++++++++++ 3 files changed, 29 insertions(+), 8 deletions(-) create mode 100644 data/responses/misc/lp12_8hz diff --git a/data/instruments/dataloggers/misc/mss2.pf b/data/instruments/dataloggers/misc/mss2.pf index 5d65d2cc6..5e3a0b3ea 100644 --- a/data/instruments/dataloggers/misc/mss2.pf +++ b/data/instruments/dataloggers/misc/mss2.pf @@ -3,7 +3,7 @@ originator Nikolaus Horn, ZAMG last_modified 2022-04-01 category MSS -configuration Jamstec OBS +configuration MacroSeismic Sensor # Oxymoron or contradictio in adjecto description MacroSeismik Sensor Network dfile mss2 # no response, a mystery @@ -11,7 +11,7 @@ dfile mss2 # no response, a mystery streams 100sps # definitions of streams -100sps 100 a/d +100sps 100 a/d lp_12.8l # gain = counts/V = (1 / 0.000125 V/count) gains 8000 @@ -19,9 +19,14 @@ gains 8000 # Stages a/d &Arr{ -gtype digitizer -samprate 100 -iunits V -ounits counts -sn datalogger + gtype digitizer + samprate 100 + iunits V + ounits counts + sn datalogger +} +lp_12.8 &Arr{ + gtype analog_lp + gain 1 + response &datafile(responses/lp12_8hz) } diff --git a/data/responses/misc/Makefile b/data/responses/misc/Makefile index d1282bd52..8bd129ea4 100644 --- a/data/responses/misc/Makefile +++ b/data/responses/misc/Makefile @@ -1,7 +1,9 @@ DATADIR=responses DATA=SSA2_50 dummy GEOS_100 SCSN_20 SSR1_50 uwiseismic unkn_vel fir_dummy \ gsv_111_10mm sm6-b BH1 \ - seismonitor sm2_aa_128sps sm2_aa_256sps ifs3000 + seismonitor lp12_8hz \ + sm2_aa_128sps sm2_aa_256sps \ + ifs3000 SUBDIR=/contrib diff --git a/data/responses/misc/lp12_8hz b/data/responses/misc/lp12_8hz new file mode 100644 index 000000000..dda46e9ec --- /dev/null +++ b/data/responses/misc/lp12_8hz @@ -0,0 +1,14 @@ +# 12.8Hz lowpass 20dB/Decade +# All poles and zeroes in radians/sec +# 1 type +# 1 num of zeroes +# 0 num of poles +# 0.0 input sample interval +# 1 decim factor +# 2.010660e+1 normalization factor +# 1.0 gain +theoretical 1 lowpass paz pz6seismo +8.04244772E+01 +1 Poles +-8.0424772E+01 0.0E+00 0.0000E+00 0.0000E+00 +0 Zeros From d276e9938f06fe7e1cd7c5517f916c664efbc37c Mon Sep 17 00:00:00 2001 From: Nikolaus Horn Date: Wed, 6 Apr 2022 13:05:24 +0000 Subject: [PATCH 18/60] SARA datalogger and Hyperion 3000 Infrasound Barometer --- data/instruments/dataloggers/misc/Makefile | 1 + data/instruments/dataloggers/misc/sara_is.pf | 32 ++++++++++++++++++++ data/instruments/sensors/ifs_3000.pf | 2 +- data/responses/infrasound/Makefile | 3 +- data/responses/infrasound/ifs_3000 | 22 ++++++++++++++ data/responses/misc/lp8_8hz | 14 +++++++++ 6 files changed, 72 insertions(+), 2 deletions(-) create mode 100644 data/instruments/dataloggers/misc/sara_is.pf create mode 100644 data/responses/infrasound/ifs_3000 create mode 100644 data/responses/misc/lp8_8hz diff --git a/data/instruments/dataloggers/misc/Makefile b/data/instruments/dataloggers/misc/Makefile index 863c953f4..0f3421445 100644 --- a/data/instruments/dataloggers/misc/Makefile +++ b/data/instruments/dataloggers/misc/Makefile @@ -3,6 +3,7 @@ DATA= parosci.pf \ saic_2001.pf \ SMACH_SM2.pf \ mss2.pf \ + sara_is.pf \ unknown.pf SUBDIR=/contrib diff --git a/data/instruments/dataloggers/misc/sara_is.pf b/data/instruments/dataloggers/misc/sara_is.pf new file mode 100644 index 000000000..428ddc99e --- /dev/null +++ b/data/instruments/dataloggers/misc/sara_is.pf @@ -0,0 +1,32 @@ +# https://www.mertl-research.at/en/projects/macro_seismic_network/ +originator Nikolaus Horn, ZAMG +last_modified 2022-04-05 + +category SARA +configuration Sara SL06 for Infrasound + +description +dfile sl06 # no response, a mystery + +streams 20sps + +# definitions of streams +20sps 20 a/d lp_8.8hz + +# gain = counts/V +gains 838860.8 # 20 V peak-to-peak, that results in a factor of 838860.8 Count/V, or 0.000001192093 V/Count. + + +# Stages +a/d &Arr{ + gtype digitizer + samprate 20 + iunits V + ounits counts + sn datalogger +} +lp_8.8hz &Arr{ + gtype analog_lp + gain 1 + response &datafile(responses/lp8_8hz) +} diff --git a/data/instruments/sensors/ifs_3000.pf b/data/instruments/sensors/ifs_3000.pf index e83310740..a9d0edbb3 100644 --- a/data/instruments/sensors/ifs_3000.pf +++ b/data/instruments/sensors/ifs_3000.pf @@ -20,5 +20,5 @@ orientations &Tbl{ D 0 0 } -response &datafile(responses/ifs3000) +response &datafile(responses/ifs_3000) diff --git a/data/responses/infrasound/Makefile b/data/responses/infrasound/Makefile index 00e6391ad..aec31e80c 100644 --- a/data/responses/infrasound/Makefile +++ b/data/responses/infrasound/Makefile @@ -1,7 +1,8 @@ DATADIR=responses DATA= \ chaparral_25 \ - hyperion \ + hyperion \ + ifs_3000 \ mb2000_fil \ mb2000_raw \ ncpa \ diff --git a/data/responses/infrasound/ifs_3000 b/data/responses/infrasound/ifs_3000 new file mode 100644 index 000000000..243fd7a06 --- /dev/null +++ b/data/responses/infrasound/ifs_3000 @@ -0,0 +1,22 @@ +# Hyperion IS 3000 +# +# 3 num of zeroes +# 3 num of poles +# 1 normalization factor +# 1.0 gain +# # From the Manual: +# the frequencies of the poles for the transfer function of the sensors are: +#f1 = 1.483 mHz +#f2 = 3.387 mHz +#f3 = 29.49 mHz. +theoretical 1 anti-alias paz Hyperion +1 +3 Poles +-9.317963E-03 0.000000E+00 0.000000E+00 0.000000E+00 +-2.128114E-02 0.000000E+00 0.000000E+00 0.000000E+00 +-1.852911E-01 0.000000E+00 0.000000E+00 0.000000E+00 + +3 Zeros +0.000000E+00 0.000000E+00 0.000000E+00 0.000000E+00 +0.000000E+00 0.000000E+00 0.000000E+00 0.000000E+00 +0.000000E+00 0.000000E+00 0.000000E+00 0.000000E+00 diff --git a/data/responses/misc/lp8_8hz b/data/responses/misc/lp8_8hz new file mode 100644 index 000000000..8fe14c60f --- /dev/null +++ b/data/responses/misc/lp8_8hz @@ -0,0 +1,14 @@ +# 8.8Hz lowpass 20dB/Decade +# All poles and zeroes in radians/sec +# 1 type +# 1 num of zeroes +# 0 num of poles +# 0.0 input sample interval +# 1 decim factor +# 2.010660e+1 normalization factor +# 1.0 gain +theoretical 1 lowpass paz pz6seismo +5.529203E+01 +1 Poles +5.529203E+01 0.000000E+00 0.000000E+00 0.000000E+00 +0 Zeros From 3a07e0354aa742eeb6ceba90f04ec819ebab6fe1 Mon Sep 17 00:00:00 2001 From: Nikolaus Horn Date: Wed, 6 Apr 2022 13:33:48 +0000 Subject: [PATCH 19/60] cleaner --- data/responses/misc/Makefile | 2 +- data/responses/misc/ifs3000 | 25 ------------------------- 2 files changed, 1 insertion(+), 26 deletions(-) delete mode 100644 data/responses/misc/ifs3000 diff --git a/data/responses/misc/Makefile b/data/responses/misc/Makefile index 8bd129ea4..c7e79a82c 100644 --- a/data/responses/misc/Makefile +++ b/data/responses/misc/Makefile @@ -1,7 +1,7 @@ DATADIR=responses DATA=SSA2_50 dummy GEOS_100 SCSN_20 SSR1_50 uwiseismic unkn_vel fir_dummy \ gsv_111_10mm sm6-b BH1 \ - seismonitor lp12_8hz \ + seismonitor lp12_8hz lp8_8hz \ sm2_aa_128sps sm2_aa_256sps \ ifs3000 diff --git a/data/responses/misc/ifs3000 b/data/responses/misc/ifs3000 deleted file mode 100644 index 20c0a1618..000000000 --- a/data/responses/misc/ifs3000 +++ /dev/null @@ -1,25 +0,0 @@ -# Hyperion IFS 3000 -# -# 3 num of zeroes -# 3 num of poles -# 1 normalization factor -# 1.0 gain -####### original (in Hz) ##### -# theoretical 1 sensor paz Niko -# 1.0 -# 3 Poles -# 1.483E-3 0.0000e+00 0.0000E+00 0.0000E+00 -# 3.387E-3 0.0000e+00 0.0000E+00 0.0000E+00 -# 29.49E-3 0.0000e+00 0.0000E+00 0.0000E+00 -# 0 Zeros -####### converted (to rad) ##### nawk -f hz2rad filename > newfilename -theoretical 1 sensor paz Niko -1 -3 Poles --0.009318 0.000000 0 0 --0.021281 0.000000 0 0 --0.185291 0.000000 0 0 -3 Zeros -0 0 0 0 -0 0 0 0 -0 0 0 0 From 18fe1559f267b076a94d8b2da899519f503d2c7f Mon Sep 17 00:00:00 2001 From: Nikolaus Horn Date: Thu, 7 Apr 2022 15:29:47 +0000 Subject: [PATCH 20/60] typo fixed --- data/instruments/dataloggers/misc/mss2.pf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data/instruments/dataloggers/misc/mss2.pf b/data/instruments/dataloggers/misc/mss2.pf index 5e3a0b3ea..a9f24ed36 100644 --- a/data/instruments/dataloggers/misc/mss2.pf +++ b/data/instruments/dataloggers/misc/mss2.pf @@ -11,7 +11,7 @@ dfile mss2 # no response, a mystery streams 100sps # definitions of streams -100sps 100 a/d lp_12.8l +100sps 100 a/d lp_12.8 # gain = counts/V = (1 / 0.000125 V/count) gains 8000 From ba4979540917e2ffac2a92653a39df75c40b5938 Mon Sep 17 00:00:00 2001 From: Niko Horn Date: Fri, 8 Apr 2022 13:10:36 +0200 Subject: [PATCH 21/60] license, announce help --- bin/import/usgs2db/LICENSE | 39 +++++++++++++++++++++++++++++++ bin/import/usgs2db/emsc2db.xpy | 20 ++++++++++++---- bin/import/usgs2db/fdsntxt2db.1 | 2 +- bin/import/usgs2db/fdsntxt2db.xpy | 13 +++++------ bin/import/usgs2db/gfzrss2db.1 | 2 +- bin/import/usgs2db/gfzrss2db.xpy | 34 ++++++++++++--------------- bin/import/usgs2db/usgs2db.1 | 6 +++-- 7 files changed, 81 insertions(+), 35 deletions(-) create mode 100644 bin/import/usgs2db/LICENSE diff --git a/bin/import/usgs2db/LICENSE b/bin/import/usgs2db/LICENSE new file mode 100644 index 000000000..f8985a66a --- /dev/null +++ b/bin/import/usgs2db/LICENSE @@ -0,0 +1,39 @@ +Copyright (c) 2015,2022 Nikolaus Horn +All rights reserved. + +This software is licensed under the New BSD license: + +Redistribution and use in source and binary forms, +with or without modification, are permitted provided +that the following conditions are met: + +* Redistributions of source code must retain the above +copyright notice, this list of conditions and the +following disclaimer. + +* Redistributions in binary form must reproduce the +above copyright notice, this list of conditions and +the following disclaimer in the documentation and/or +other materials provided with the distribution. + +* Neither the name of Nikolaus Horn nor +the names of its contributors may be used to endorse +or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL +THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + diff --git a/bin/import/usgs2db/emsc2db.xpy b/bin/import/usgs2db/emsc2db.xpy index b89d1858e..4612cb3bc 100644 --- a/bin/import/usgs2db/emsc2db.xpy +++ b/bin/import/usgs2db/emsc2db.xpy @@ -22,7 +22,7 @@ import zamg.utilities as zu def usage(progname): - print(progname, "[-v] [-p proxy_url] [-a auth] [-k keydb] [-u url] dbname") + print(progname, "[-v] [-h] [-p proxy_url] [-a auth] [-k keydb] [-u url] dbname") def main(): @@ -43,9 +43,10 @@ The default is to retrieve events from the last hour with a mgnitude of 2.5 or h auth = "EMSC" help_text = """EMSC provides at most 1000 events at once on the following URL: http://www.seismicportal.eu/fdsnws/event/1/query?limit=1000&format=json. -The default default is to retrieve only the most recent events""" +The default default is to retrieve only the most recent 10 events""" - verbose = 0 + verbose = False + debug = False archive = 0 opts = [] args = [] @@ -53,7 +54,7 @@ The default default is to retrieve only the most recent events""" keyschema = "idmatch1.0" proxy_url = "" try: - opts, args = getopt.getopt(sys.argv[1:], "a:hk:p:u:v", "") + opts, args = getopt.getopt(sys.argv[1:], "a:hk:p:u:vd", "") except getopt.GetoptError: elog.die("illegal option") usage(progname) @@ -61,7 +62,10 @@ The default default is to retrieve only the most recent events""" for o, a in opts: if o == "-v": - verbose = 1 + verbose = True + elif o == "-d": + verbose = True + debug = True elif o == "-a": auth = a elif o == "-u": @@ -119,6 +123,8 @@ The default default is to retrieve only the most recent events""" proxy = {"https": proxy_url} else: proxy = {"http": proxy_url} + if debug: + elog.notify("try to retrieve %s\nvia proxy %s" % (BASE_URL, proxy)) with warnings.catch_warnings(): warnings.simplefilter( "ignore" @@ -137,6 +143,8 @@ The default default is to retrieve only the most recent events""" except: elog.die("unspecific problem requesting data from %s" % BASE_URL) else: + if debug: + elog.notify("try to retrieve %s" % BASE_URL) with warnings.catch_warnings(): warnings.simplefilter("ignore") try: @@ -156,6 +164,8 @@ The default default is to retrieve only the most recent events""" obj = req.json() data = obj["features"] i = len(data) + if debug: + elog.debug("retrieved %d events" % i) for index in range(i): fdata = data[index] unid = fdata["id"] diff --git a/bin/import/usgs2db/fdsntxt2db.1 b/bin/import/usgs2db/fdsntxt2db.1 index d219a2a0e..f87a1ae85 100644 --- a/bin/import/usgs2db/fdsntxt2db.1 +++ b/bin/import/usgs2db/fdsntxt2db.1 @@ -3,7 +3,7 @@ fdsntxt2db \- utility to retrieve earthquake information in text format from \fBFDSN\fP services. .SH SYNOPSIS .nf -\fBfdsntxt2db\fP [-v] [-a \fIauthor\fP] +\fBfdsntxt2db\fP [-v] [-h] [-a \fIauthor\fP] [-k \fIkeydbname\fP] [-u \fIservice-url\fP] \fIdbname\fP .fi .SH DESCRIPTION diff --git a/bin/import/usgs2db/fdsntxt2db.xpy b/bin/import/usgs2db/fdsntxt2db.xpy index 7270cb7b6..0914f17bc 100755 --- a/bin/import/usgs2db/fdsntxt2db.xpy +++ b/bin/import/usgs2db/fdsntxt2db.xpy @@ -23,7 +23,7 @@ import zamg.utilities as zu def usage(progname): - print(progname, "[-v] [-p proxy_url] [-a auth] [-k keydb] [-u url] dbname") + print(progname, "[-v] [-h] [-p proxy_url] [-a auth] [-k keydb] [-u url] dbname") def main(): @@ -36,8 +36,7 @@ def main(): https://www.fdsn.org/datacenters Unfortunately, there is no general overview if these services provide event information and also support the text format. You mut check each webservice individually""" - verbose = 0 - archive = 0 + verbose = False opts = [] args = [] keydbname = "keydb" @@ -52,7 +51,7 @@ def main(): for o, a in opts: if o == "-v": - verbose = 1 + verbose = True elif o == "-a": auth = a elif o == "-u": @@ -85,7 +84,6 @@ def main(): dbq = db.lookup(table="event", field="auth", record="dbNULL") auth_width = dbq.query("dbFIELD_SIZE") - kdb = ds.dbopen(keydbname, "r+") descname = kdb.query("dbDATABASE_FILENAME") if os.path.exists(descname): @@ -177,7 +175,6 @@ def main(): oauth = auth magauth = auth - ml = mb = ms = mlnull # push M to mb, seems to make sense... @@ -355,7 +352,9 @@ def main(): if len(maglist) > 0: dbnetmag.record = maglist[0] dbnetmag.putv( - ("magnitude", mag), ("magtype", magtype), ("auth", zu.string_maxbytes(magauth, auth_width)), + ("magnitude", mag), + ("magtype", magtype), + ("auth", zu.string_maxbytes(magauth, auth_width)), ) return 0 diff --git a/bin/import/usgs2db/gfzrss2db.1 b/bin/import/usgs2db/gfzrss2db.1 index 50cab0442..b08672ca5 100644 --- a/bin/import/usgs2db/gfzrss2db.1 +++ b/bin/import/usgs2db/gfzrss2db.1 @@ -3,7 +3,7 @@ gfzrss2db \- utility to retrieve earthquake information in text format from \fBFDSN\fP services. .SH SYNOPSIS .nf -\fBgfzrss2db\fP [-v] [-a \fIauthor\fP] +\fBgfzrss2db\fP [-v] [-h] [-a \fIauthor\fP] [-k \fIkeydbname\fP] [-u \fIservice-url\fP] \fIdbname\fP .fi .SH DESCRIPTION diff --git a/bin/import/usgs2db/gfzrss2db.xpy b/bin/import/usgs2db/gfzrss2db.xpy index 5e51a1166..b7b3e608b 100755 --- a/bin/import/usgs2db/gfzrss2db.xpy +++ b/bin/import/usgs2db/gfzrss2db.xpy @@ -12,7 +12,7 @@ import getopt import requests import warnings -#XML related stuff +# XML related stuff import xml.dom.minidom import pprint import datetime @@ -24,6 +24,7 @@ import antelope.elog as elog import zamg.utilities as zu + def getText(nodelist): rc = [] for node in nodelist: @@ -31,8 +32,9 @@ def getText(nodelist): rc.append(node.data) return "".join(rc) + def usage(progname): - print(progname, "[-v] [-p proxy_url] [-a auth] [-k keydb] [-u url] dbname") + print(progname, "[-v] [-h] [-p proxy_url] [-a auth] [-k keydb] [-u url] dbname") def main(): @@ -41,9 +43,8 @@ def main(): BASE_URL = "http://geofon.gfz-potsdam.de/eqinfo/list.php?fmt=rss" auth_base = "GFZ" help_text = """Not all datacenters provide event information using FDSN webservices -Unfortunately, RSS or GeoRSS is not fully standardized. I assume this progam is only usefol for obtaining event information from GFZ""" - verbose = 0 - archive = 0 +Unfortunately, RSS or GeoRSS is not fully standardized. I assume this progam is only useful for obtaining event information from GFZ""" + verbose = False opts = [] args = [] keydbname = "keydb" @@ -58,7 +59,7 @@ Unfortunately, RSS or GeoRSS is not fully standardized. I assume this progam is for o, a in opts: if o == "-v": - verbose = 1 + verbose = True elif o == "-a": auth_base = a elif o == "-u": @@ -91,7 +92,6 @@ Unfortunately, RSS or GeoRSS is not fully standardized. I assume this progam is dbq = db.lookup(table="event", field="auth", record="dbNULL") auth_width = dbq.query("dbFIELD_SIZE") - kdb = ds.dbopen(keydbname, "r+") descname = kdb.query("dbDATABASE_FILENAME") if os.path.exists(descname): @@ -198,9 +198,7 @@ Unfortunately, RSS or GeoRSS is not fully standardized. I assume this progam is evid = 0 updated_event = False if len(rec_list) > 1: - elog.notify( - "found too many keys for %s, sth strange goes on here" % unid - ) + elog.notify("found too many keys for %s, sth strange goes on here" % unid) if len(rec_list) > 0: for rec in rec_list: idmatch.record = rec @@ -245,8 +243,7 @@ Unfortunately, RSS or GeoRSS is not fully standardized. I assume this progam is problem = True if verbose: elog.notify( - "problem adding origin for event at %s" - % stock.strtime(etime) + "problem adding origin for event at %s" % stock.strtime(etime) ) if not problem: @@ -255,7 +252,7 @@ Unfortunately, RSS or GeoRSS is not fully standardized. I assume this progam is ("evid", evid), ("prefor", orid), ("evname", zu.string_maxbytes(evname, evname_width)), - ("auth", auth) , + ("auth", auth), ) except Exception as __: if verbose: @@ -292,8 +289,7 @@ Unfortunately, RSS or GeoRSS is not fully standardized. I assume this progam is if verbose: problem = True elog.notify( - "problem adding id for event at %s" - % stock.strtime(etime) + "problem adding id for event at %s" % stock.strtime(etime) ) elif updated_event: if verbose: @@ -304,9 +300,7 @@ Unfortunately, RSS or GeoRSS is not fully standardized. I assume this progam is evmatcher = kmatch.matches(dbevent, "evid") evlist = evmatcher() if len(evlist) > 1: - elog.notify( - "strange, found a few matching events for evid %d " % evid - ) + elog.notify("strange, found a few matching events for evid %d " % evid) if len(evlist) > 0: dbevent.record = evlist[0] [prefor] = dbevent.getv("prefor") @@ -343,7 +337,9 @@ Unfortunately, RSS or GeoRSS is not fully standardized. I assume this progam is if len(maglist) > 0: dbnetmag.record = maglist[0] dbnetmag.putv( - ("magnitude", mag), ("magtype", magtype), ("auth", auth), + ("magnitude", mag), + ("magtype", magtype), + ("auth", auth), ) return 0 diff --git a/bin/import/usgs2db/usgs2db.1 b/bin/import/usgs2db/usgs2db.1 index da7a57e73..ca1b771d0 100644 --- a/bin/import/usgs2db/usgs2db.1 +++ b/bin/import/usgs2db/usgs2db.1 @@ -3,11 +3,11 @@ usgs2db, emsc2db \- utilities to retrieve earthquake feeds in GeoJSON format from \fIUSGS\fP or \fIEMSC\fP. .SH SYNOPSIS .nf -\fBusgs2db\fP [-v] [-a \fIauthor\fP] +\fBusgs2db\fP [-v] [-h] [-a \fIauthor\fP] [-k \fIkeydbname\fP] [-u \fIfeed-url\fP] \fIdbname\fP .fi .nf -\fBemsc2db\fP [-v] [-a \fIauthor\fP] +\fBemsc2db\fP [-v] [-h] [-a \fIauthor\fP] [-k \fIkeydbname\fP] [-u \fIfeed-url\fP] \fIdbname\fP .fi .SH DESCRIPTION @@ -31,6 +31,8 @@ the default url in this case is \fBhttp://www.seismicportal.eu/fdsnws/event/1/query?limit=10&format=json\fP. .IP "-a author" Author name for event, origin and netmag table. Defaults to NEIC or EMSC +.IP "-h" +Show help. .IP database The name of the output database. This argument is required. .SH EXAMPLE From 14058ab9da7904f411756336304ba1c28f00cb8c Mon Sep 17 00:00:00 2001 From: Niko Horn Date: Fri, 8 Apr 2022 13:11:48 +0200 Subject: [PATCH 22/60] add license --- bin/import/ctbto/LICENSE | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 bin/import/ctbto/LICENSE diff --git a/bin/import/ctbto/LICENSE b/bin/import/ctbto/LICENSE new file mode 100644 index 000000000..f024d35e3 --- /dev/null +++ b/bin/import/ctbto/LICENSE @@ -0,0 +1,39 @@ +Copyright (c) 2015 Nikolaus Horn +All rights reserved. + +This software is licensed under the New BSD license: + +Redistribution and use in source and binary forms, +with or without modification, are permitted provided +that the following conditions are met: + +* Redistributions of source code must retain the above +copyright notice, this list of conditions and the +following disclaimer. + +* Redistributions in binary form must reproduce the +above copyright notice, this list of conditions and +the following disclaimer in the documentation and/or +other materials provided with the distribution. + +* Neither the name of Nikolaus Horn nor +the names of its contributors may be used to endorse +or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL +THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + From a4e2dbc80955582852304b90cc5bc502fd4f1e17 Mon Sep 17 00:00:00 2001 From: Niko Horn Date: Sun, 10 Apr 2022 13:48:37 +0200 Subject: [PATCH 23/60] no more external dependencies, license --- bin/export/leaflet_js/LICENSE | 39 +++++ bin/export/leaflet_js/Makefile | 6 +- bin/export/leaflet_js/web_evmap.1 | 89 ++++++++++ bin/export/leaflet_js/web_evmap.pf | 131 +++++++++++++++ bin/export/leaflet_js/web_evmap.xpy | 243 ++++++--------------------- bin/export/leaflet_js/web_stamap.1 | 83 ++++++--- bin/export/leaflet_js/web_stamap.pf | 165 +++++++++++++++--- bin/export/leaflet_js/web_stamap.xpy | 152 ++++------------- 8 files changed, 547 insertions(+), 361 deletions(-) create mode 100644 bin/export/leaflet_js/LICENSE create mode 100644 bin/export/leaflet_js/web_evmap.1 create mode 100644 bin/export/leaflet_js/web_evmap.pf diff --git a/bin/export/leaflet_js/LICENSE b/bin/export/leaflet_js/LICENSE new file mode 100644 index 000000000..f8985a66a --- /dev/null +++ b/bin/export/leaflet_js/LICENSE @@ -0,0 +1,39 @@ +Copyright (c) 2015,2022 Nikolaus Horn +All rights reserved. + +This software is licensed under the New BSD license: + +Redistribution and use in source and binary forms, +with or without modification, are permitted provided +that the following conditions are met: + +* Redistributions of source code must retain the above +copyright notice, this list of conditions and the +following disclaimer. + +* Redistributions in binary form must reproduce the +above copyright notice, this list of conditions and +the following disclaimer in the documentation and/or +other materials provided with the distribution. + +* Neither the name of Nikolaus Horn nor +the names of its contributors may be used to endorse +or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL +THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + diff --git a/bin/export/leaflet_js/Makefile b/bin/export/leaflet_js/Makefile index de2073e59..8dd941fb5 100644 --- a/bin/export/leaflet_js/Makefile +++ b/bin/export/leaflet_js/Makefile @@ -1,7 +1,7 @@ -BIN=web_stamap -MAN1=web_stamap.1 +BIN=web_stamap web_evmap +MAN1=web_stamap.1 web_evmap.1 -PF=web_stamap.pf +PF=web_stamap.pf web_evmap.pf include $(ANTELOPEMAKE) DIRS= SUBDIR=/contrib diff --git a/bin/export/leaflet_js/web_evmap.1 b/bin/export/leaflet_js/web_evmap.1 new file mode 100644 index 000000000..9dae3dfb0 --- /dev/null +++ b/bin/export/leaflet_js/web_evmap.1 @@ -0,0 +1,89 @@ +.TH WEB_EVMAP 1 +.SH NAME +web_evmap \- interactive event map based on the leaflet library +.SH SYNOPSIS +.nf +web_evmap [-v] [-s sitedb] [-p pffile] [-o] dbname evid|orid +.fi + +.SH DESCRIPTION +This utility produces a webpage for interactive display of event information and stations in a database. +The display relies on the \fIleaflet\fP javascript library written by Vladimir Agafonkin. + +.SH OPTIONS +.IP "-v" +verbose flag +.IP "-p pffile" +parameter file +.IP "-s sitedb" +Database with station metadata. +.IP "-o" +The id is an origin id. Default would be to assume the event id is given. +.IP "dbname" +Name of database with event information. Site information can optionally be provided in a separatae database. This is a required parameter. +.IP "evid (or orid)" +Event or origin id. +.SH PARAMETER FILE + +The following is a sample parameter file. Most of the parameters +should be fairly self-explanatory. + +.in 2c +.ft CW +.nf +title Antelope Demo Event Map # title + +logo_url https://geoweb.zamg.ac.at/images/zamg_logo_vert.png +logo_alt ZAMG Logo + +leaflet_js https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.7.1/leaflet.min.js +leaflet_css https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.7.1/leaflet.min.css +leaflet_draw_js https://cdnjs.cloudflare.com/ajax/libs/leaflet.draw/1.0.4/leaflet.draw.js +leaflet_draw_css https://cdnjs.cloudflare.com/ajax/libs/leaflet.draw/1.0.4/leaflet.draw.css +leaflet_measurecontrol_js https://makinacorpus.github.io/Leaflet.MeasureControl/leaflet.measurecontrol.js +leaflet_measurecontrol_css https://makinacorpus.github.io/Leaflet.MeasureControl/leaflet.measurecontrol.css + +prefor_icon https://geoweb.zamg.ac.at/my_icons/star_red.png +origin_icon https://geoweb.zamg.ac.at/my_icons/star_blue.png +defining_icon https://geoweb.zamg.ac.at/my_icons/tri_26_magenta.png +nondefining_icon https://geoweb.zamg.ac.at/my_icons/tri_26_navy.png +unassoc_icon https://geoweb.zamg.ac.at/my_icons/tri_26_grey.png + + +html_template ... +.fi +.ft R +.in +.IP "logo_url, logo_alt" +Replace with your own logo. +.IP "prefor_icon, etc" +The default parameter file uses icons hosted by ZAMG in Vienna. You may use these icons, but it would be kind +to use locally hosted versions of these icons. +.IP "leaflet_..." +Javascript libraries used here. You may want to provide locally hosted versions of these libraries +to increase stability and reliability. The plugins \fBdraw\fP and \fBmeasurecontrol\fP are needed for the interactive ruler taht can be activated by clicking on the icon below the zoom control ("+/-") on the left side of the webpage. See https://github.com/makinacorpus/Leaflet.MeasureControl for more information. +.IP html_template +Template for the new webpage. See https://leaflet-extras.github.io/leaflet-providers/preview/ for a list of alternative map layers. +All curly brackets must be doubled because of the antelope parameter file quoting conventions. See \fIpf(5)\fP for an explanation. +.SH EXAMPLE +.nf +% web_evmap /opt/antelope/data/db/demo/demo 4 +% open evmap_evid_0000000004.html +.fi + +.SH "BUGS AND CAVEATS" +The default parameter file is using icons and java-script libraries and stylesheets provided by the cloudflare CDN, github and ZAMG. For productive usage, I would recommend to provide locally hosted copies. + +.SH "SEE ALSO" +.nf +web_stamap(1), +\fBhttps://leafletjs.com/\fP, +\fBhttps://leaflet-extras.github.io/leaflet-providers/preview/\fP, +\fBhttps://github.com/makinacorpus/Leaflet.MeasureControl\fP and +pf(5) +.fi +.SH AUTHOR +.nf +Nikolaus Horn, 2022 + +ZAMG / Vienna, Nikolaus.Horn@zamg.ac.at diff --git a/bin/export/leaflet_js/web_evmap.pf b/bin/export/leaflet_js/web_evmap.pf new file mode 100644 index 000000000..7a15838e4 --- /dev/null +++ b/bin/export/leaflet_js/web_evmap.pf @@ -0,0 +1,131 @@ +filebase events # output filename +title Antelope Demo Event Map # title + +logo_url https://geoweb.zamg.ac.at/images/zamg_logo_vert.png +logo_alt ZAMG Logo +leaflet_js https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.7.1/leaflet.min.js +leaflet_css https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.7.1/leaflet.min.css +leaflet_draw_js https://cdnjs.cloudflare.com/ajax/libs/leaflet.draw/1.0.4/leaflet.draw.js +leaflet_draw_css https://cdnjs.cloudflare.com/ajax/libs/leaflet.draw/1.0.4/leaflet.draw.css +leaflet_measurecontrol_js https://makinacorpus.github.io/Leaflet.MeasureControl/leaflet.measurecontrol.js +leaflet_measurecontrol_css https://makinacorpus.github.io/Leaflet.MeasureControl/leaflet.measurecontrol.css + +prefor_icon https://geoweb.zamg.ac.at/my_icons/star_red.png +origin_icon https://geoweb.zamg.ac.at/my_icons/star_blue.png +defining_icon https://geoweb.zamg.ac.at/my_icons/tri_26_magenta.png +nondefining_icon https://geoweb.zamg.ac.at/my_icons/tri_26_navy.png +unassoc_icon https://geoweb.zamg.ac.at/my_icons/tri_26_grey.png + + +# Note: we don't need to escape percent signs, but +# we need double curly brackets for a single on in the output, while +# key-fields must be embracketed only once +html_template &Literal{ + + + + {title} + + + + + + + + + + + +
+ + + + +} diff --git a/bin/export/leaflet_js/web_evmap.xpy b/bin/export/leaflet_js/web_evmap.xpy index d2c8d3e33..5395ba7c2 100755 --- a/bin/export/leaflet_js/web_evmap.xpy +++ b/bin/export/leaflet_js/web_evmap.xpy @@ -1,35 +1,22 @@ """ -recent webforms - -Create HTML for seismic event +Create HTML for seismic event(s) @author Nikolaus Horn %s%s%.1f%.2f%.0f%s" ) -html_template = """ - - - - - - -%s - - - - - - - - - -
- - - -""" - info_template = """ @@ -199,20 +57,7 @@ import codecs def usage(progname): - print(progname, "[-v] [-d dirout] [-s sitedb] [-h histdb] [-p pfname] [-o] dbname evid") - - -def feltsize(number): - # print "this is feltsize" - size = 12 - if number < 3: - size = 12 - else: - size = 12 + (number - 3) * (number - 3) - if size > 30: - size = 30 - # print "fs %d %d" % (number,size) - return size + print(progname, "[-v] [-d dirout] [-s sitedb] [-p pfname] [-o] dbname evid") def magsize(number): @@ -275,8 +120,6 @@ def main(): pf = a elif o == "-d": dirout = a - elif o == "-h": - histdbname = a elif o == "-s": sitedbname = a @@ -288,6 +131,23 @@ def main(): timenow = stock.now() creation_time = stock.epoch2str(timenow, "%d. %m. %Y %H:%M") + pf = stock.pfread(pfname) + + logo_url = pf["logo_url"] + logo_alt = pf["logo_alt"] + leaflet_css = pf["leaflet_css"] + leaflet_js = pf["leaflet_js"] + leaflet_draw_css = pf["leaflet_draw_css"] + leaflet_draw_js = pf["leaflet_draw_js"] + leaflet_measurecontrol_css = pf["leaflet_measurecontrol_css"] + leaflet_measurecontrol_js = pf["leaflet_measurecontrol_js"] + title = pf["title"] + html_template = pf["html_template"] + prefor_icon = pf["prefor_icon"] + origin_icon = pf["origin_icon"] + defining_icon = pf["defining_icon"] + nondefining_icon = pf["nondefining_icon"] + unassoc_icon = pf["unassoc_icon"] if id_is_orid: orid = int(args[1]) @@ -300,9 +160,6 @@ def main(): filebase = os.path.join(dirout, filebase) db = ds.dbopen(dbname, "r") my_tables = db.query(ds.dbSCHEMA_TABLES) - has_macro_tables = False - if "idp" in my_tables and "massoc" in my_tables and "meval" in my_tables: - has_macro_tables = True dborigin = db.lookup(table="origin") dbevent = db.lookup(table="event") dbassoc = db.lookup(table="assoc") @@ -349,10 +206,6 @@ def main(): stamagmatcher = dbstamagmatch.matches(dbstamag, ["sta", "orid"]) # print "we have %d origins" % nevents - if has_macro_tables: - dbp = dbp.join("massoc", outer=True) - dbp = dbp.join("meval", outer=True) - layers = [] layer_names = [] layer_descriptions = [] @@ -427,7 +280,7 @@ def main(): ms = mag elif magt == "ml": ml = mag - elif magt == "mw": + elif magt == "mw" or magt == "mww": mw = mag if mw > -99.0: @@ -565,7 +418,7 @@ def main(): ms = mag elif magt == "ml": ml = mag - elif magt == "mw": + elif magt == "mw" or magt == "mww": mw = mag if mw > -99.0: @@ -588,7 +441,7 @@ def main(): ss = "%d,%d" % (mysize, mysize) htmlfilename = "%s_evinfo.html" % filebase marker_html = ( - '%s
%s %.2f %.2f %.0fkm
%s
orid: %d evid: %d
Auth: %s
Mehr Info' + '%s
%s %.2f %.2f %.0fkm
%s
orid: %d evid: %d
Auth: %s
more information' % ( tooltipstr, etype, @@ -668,7 +521,9 @@ def main(): magdiff = ml - stamag if stamagtype.lower() == "ms" and ms > -90.0: magdiff = ms - stamag - if stamagtype.lower() == "mw" and mw > -90.0: + if ( + stamagtype.lower() == "mw" or stamagtype.lower() == "mww" + ) and mw > -90.0: magdiff = mw - stamag if first_stamag: mag_str.append( @@ -687,7 +542,6 @@ def main(): if len(siterecords) > 0: dbsite.record = siterecords[0] [stalat, stalon, staname] = dbsite.getv("lat", "lon", "staname") - #staname = staname.decode(db_encoding) staname = staname.replace("'", "\\'") stastr = [] stastr.append("%s - %s" % (sta, staname)) @@ -821,12 +675,10 @@ def main(): icon_name = "xI" icons.append(icon_template % (icon_name, unassoc_icon)) this_layer = "xxx" - # layer_names.append( this_layer ) layers.append(layer_template % this_layer) layer_descriptions.append('"unused Stations": %s' % this_layer) for dbsite.record in range(n_sites_left): [sta, stalat, stalon, staname] = dbsite.getv("sta", "lat", "lon", "staname") - #staname = staname.decode(db_encoding) staname = staname.replace("'", "\\'") marker_html = "%s - %s
%.2f %.2f" % (sta, staname, stalat, stalon) markers.append( @@ -834,32 +686,41 @@ def main(): % (stalat, stalon, "xI", "%s - %s" % (sta, staname), marker_html, "xxx") ) - htmlfilename = "%s.html" % filebase - file = codecs.open(htmlfilename, "w", "UTF-8") if id_is_orid: titlestring = "Origin %d" % orid else: titlestring = "Event %d - prefor %d" % (orid, preforid) - my_html = html_template % ( - titlestring, - "".join(icons), - "".join(layers), - "".join(markers), - plat, - plon, - ",".join(layer_names), - ",".join(layer_descriptions), - creation_time, + my_html = html_template.format( + title=title, + leaflet_css=leaflet_css, + leaflet_js=leaflet_js, + leaflet_draw_css=leaflet_draw_css, + leaflet_draw_js=leaflet_draw_js, + leaflet_measurecontrol_css=leaflet_measurecontrol_css, + leaflet_measurecontrol_js=leaflet_measurecontrol_js, + icons="".join(icons), + layers="".join(layers), + markers="".join(markers), + layer_names=",".join(layer_names), + layer_descriptions=",".join(layer_descriptions), + center_lat=plat, + center_lon=plon, + logo_url=logo_url, + logo_alt=logo_alt, + creation_time=creation_time, ) - file.write("".join(my_html)) + htmlfilename = "%s.html" % filebase + with open(htmlfilename, "w", encoding="utf8") as myfile: + myfile.write(my_html) + if verbose: elog.log("file done %s" % htmlfilename) - htmlfilename = "%s_evinfo.html" % filebase - file = codecs.open(htmlfilename, "w", "UTF-8") my_html = info_template % (titlestring, "".join(outstr), "".join(mag_str)) - file.write("".join(my_html)) + htmlfilename = "%s_evinfo.html" % filebase + with open(htmlfilename, "w", encoding="utf8") as myfile: + myfile.write(my_html) if verbose: elog.log("file done %s" % htmlfilename) return 0 diff --git a/bin/export/leaflet_js/web_stamap.1 b/bin/export/leaflet_js/web_stamap.1 index fbf4c07b7..af0e7e932 100644 --- a/bin/export/leaflet_js/web_stamap.1 +++ b/bin/export/leaflet_js/web_stamap.1 @@ -8,45 +8,81 @@ web_stamap [-v] [-p pffile] dbname .SH DESCRIPTION This utility produces a webpage for an interactive display of stations in a database. The stations can be grouped using search expressions in the paramter file. -The display relys on the \fIleaflet\fP javascript library written by Vladimir Agafonkin. +The display relies on the \fIleaflet\fP javascript library written by Vladimir Agafonkin. .SH OPTIONS .IP "-v" verbose flag .IP "-p pffile" parameter file +.IP "-f file" +Output filename. This overrides the filebase from the parameter file. .IP "dbname" Name of database. Only the site table is needed here. .SH PARAMETER FILE -The following is a sample parameter file. Some of the parameters +The following is a sample parameter file. Most of the parameters should be fairly self-explanatory. +.in 2c +.ft CW .nf -filebase stations -title OÖW Station Map +filebase stations # output filename +title Antelope Demo Station Map # title -logo_url https://geoweb.zamg.ac.at/images/zamg_logo_vert.png -logo_alt ZAMG Logo -web_css https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.7.1/leaflet.min.css -web_js https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.7.1/leaflet.min.js -dontshow S[123456]TA|BSTA|CUVW|CXYZ|CO[ABCD]|VB[1-3]|AAA[0-9].* +dontshow X[123456]TA|BSTA|CUVW|CXYZ|CO[ABCD] # ignore sites matching this expression layers &Tbl{ - &Arr{ - stations BZN|CRY|FRD|HSSP|SETM|SND|TONN|TRAN|WMC|B081|B082|B082A|B084|B086|B086A|B087|B088|B088A|B093|B946|ALCY|GVAR1|SJR|PSPR|TFRD - icon https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.7.1/images/marker-icon.png - description Anza Stations - } - &Arr{ - stations - icon https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.7.1/images/marker-icon.png - description Other Stations - } + &Arr{ + stations B0.* + icon http://geoweb.zamg.ac.at/my_icons/tri_26_pink.png + description Some array in the desert + } + &Arr{ + stations PFO + icon http://geoweb.zamg.ac.at/my_icons/tri_26_pink.png + description Pinion Flat + } + &Arr{ + stations # catch all stations left here + icon https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.7.1/images/marker-icon.png + description ANZA stations + } } -.fi +logo_url https://geoweb.zamg.ac.at/images/zamg_logo_vert.png +logo_alt ZAMG Logo + +leaflet_js https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.7.1/leaflet.min.js +leaflet_css https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.7.1/leaflet.min.css +leaflet_draw_js https://cdnjs.cloudflare.com/ajax/libs/leaflet.draw/1.0.4/leaflet.draw.js +leaflet_draw_css https://cdnjs.cloudflare.com/ajax/libs/leaflet.draw/1.0.4/leaflet.draw.css +leaflet_measurecontrol_js https://makinacorpus.github.io/Leaflet.MeasureControl/leaflet.measurecontrol.js +leaflet_measurecontrol_css https://makinacorpus.github.io/Leaflet.MeasureControl/leaflet.measurecontrol.css + +html_template ... +.fi +.ft R +.in +.IP filebase +Output filename without extension (".html"). This can be overriden on the commandline +.IP dontshow +Filter out stations +.IP layers +Different groups of stations selecte by the parameter \fIstations\fP. +Each group should be given a different icon. You may use the icons specified here, +but it would be kind to provide locally hosted copies. +The last groups with an empty parameter \fIstations\fP contains all stations left after excluding stations +either selected by the paramter \fIdontshow\fP or in one of the previous groups. +.IP "logo_url, logo_alt" +Replace with your own logo +.IP "leaflet_..." +Javascript libraries used here. You may want to provide locally hosted versions of these libraries +to increase stability and reliability. The plugins \fBdraw\fP and \fBmeasurecontrol\fP are needed for the interactive ruler taht can be activated by clicking on the icon below the zoom control ("+/-") on the left side of the webpage. See https://github.com/makinacorpus/Leaflet.MeasureControl for more information. +.IP html_template +Template for the new webpage. See https://leaflet-extras.github.io/leaflet-providers/preview/ for a list of alternative map layers. +All curly brackets must be doubled because of the antelope parameter file quoting conventions. See \fIpf(5)\fP for an explanation. .SH EXAMPLE .nf % web_stamap /opt/antelope/data/db/demo/demo @@ -54,11 +90,14 @@ layers &Tbl{ .fi .SH "BUGS AND CAVEATS" -The default parameter files is using icons and java-script libraries provided by the cloudflare CDN. For productive usage, I would recommend to provide locally hosted copies. +The default parameter file is using icons and java-script libraries and stylesheets provided by the cloudflare CDN, github and ZAMG. For productive usage, I would recommend to provide locally hosted copies. .SH "SEE ALSO" .nf -antelope(1), rtexec(1), \fBhttps://leafletjs.com/\fP +\fBhttps://leafletjs.com/\fP, +\fBhttps://leaflet-extras.github.io/leaflet-providers/preview/\fP, +\fBhttps://github.com/makinacorpus/Leaflet.MeasureControl\fP and +pf(5) .fi .SH AUTHOR .nf diff --git a/bin/export/leaflet_js/web_stamap.pf b/bin/export/leaflet_js/web_stamap.pf index ffe75b885..3bbb16b13 100644 --- a/bin/export/leaflet_js/web_stamap.pf +++ b/bin/export/leaflet_js/web_stamap.pf @@ -1,25 +1,144 @@ -filebase stations -title OÖW Station Map - -logo_url https://geoweb.zamg.ac.at/images/zamg_logo_vert.png -logo_alt ZAMG Logo -leaflet_css https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.7.1/leaflet.min.css -leaflet_js https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.7.1/leaflet.min.js -dontshow S[123456]TA|BSTA|CUVW|CXYZ|CO[ABCD]|VB[1-3]|AAA[0-9].* +filebase stations # output filename +title Antelope Demo Station Map # title + +dontshow X[123456]TA|BSTA|CUVW|CXYZ|CO[ABCD] # ignore sites matching this expression layers &Tbl{ - &Arr{ - stations ABNA|ABTA|ARSA|BIOA|CONA|CSNA|DAVA|FETA|KBA|KMR|LESA|MOA|MOTA|MYKA|OBKA|RETA|RONA|SOKA|SESA|SQTA|VIE|WATA|WINA|WTTA|UNNA - icon https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.7.1/images/marker-icon.png - description Austrian Broadband ( Q330 + STS-2/STS-2.5 ) - } - &Arr{ - stations OBSA|RKSA|RSNA|RWNA|KMWA|ADSA|ZETA|UMWA|FRTA|NATA|LFVA|WOTA|PROA|LMSA|WIWA|GUKA|KEKA|KMKA|BITA|SNWA|ROSA|BSTA|SOSA - icon https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.7.1/images/marker-icon.png - description Austrian Realtime Strong Motion Stations (Basalt or Etna2) - } - &Arr{ - stations - icon https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.7.1/images/marker-icon.png - description Foreign Stations - } + &Arr{ + stations B0.* + icon http://geoweb.zamg.ac.at/my_icons/tri_26_pink.png + description Some array in the desert + } + &Arr{ + stations PFO + icon http://geoweb.zamg.ac.at/my_icons/tri_26_pink.png + description Pinion Flat + } + &Arr{ + stations # catch all stations left here + icon https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.7.1/images/marker-icon.png + description ANZA stations + } +} + +logo_url https://geoweb.zamg.ac.at/images/zamg_logo_vert.png +logo_alt ZAMG Logo +leaflet_js https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.7.1/leaflet.min.js +leaflet_css https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.7.1/leaflet.min.css +leaflet_draw_js https://cdnjs.cloudflare.com/ajax/libs/leaflet.draw/1.0.4/leaflet.draw.js +leaflet_draw_css https://cdnjs.cloudflare.com/ajax/libs/leaflet.draw/1.0.4/leaflet.draw.css +leaflet_measurecontrol_js https://makinacorpus.github.io/Leaflet.MeasureControl/leaflet.measurecontrol.js +leaflet_measurecontrol_css https://makinacorpus.github.io/Leaflet.MeasureControl/leaflet.measurecontrol.css + + +# Note: we don't need to escape percent signs, but +# we need double curly brackets for a single on in the output, while +# key-fields must be embracketed only once +html_template &Literal{ + + + + {title} + + + + + + + + + + + +
+ + + + } diff --git a/bin/export/leaflet_js/web_stamap.xpy b/bin/export/leaflet_js/web_stamap.xpy index 1e05c10ef..521b3a064 100755 --- a/bin/export/leaflet_js/web_stamap.xpy +++ b/bin/export/leaflet_js/web_stamap.xpy @@ -20,112 +20,6 @@ marker_template = ( icon_template = """var %s = new LeafIcon({iconUrl: '%s'});""" marker_html_template = "%s - %s

%.2f %.2f %.0fm

%s" -# note we need to escape percents %-> %% -html_template = """ - - - - %s - - - - - - - - - - - -

- - - -""" - import getopt # Import Antelope modules @@ -184,9 +78,14 @@ def main(): logo_alt = pf["logo_alt"] leaflet_css = pf["leaflet_css"] leaflet_js = pf["leaflet_js"] + leaflet_draw_css = pf["leaflet_draw_css"] + leaflet_draw_js = pf["leaflet_draw_js"] + leaflet_measurecontrol_css = pf["leaflet_measurecontrol_css"] + leaflet_measurecontrol_js = pf["leaflet_measurecontrol_js"] dontshow = pf["dontshow"] title = pf["title"] gis_layers = pf["layers"] + html_template = pf["html_template"] db = ds.dbopen(dbname, "r") dbsite = db.lookup(table="site") @@ -226,7 +125,11 @@ def main(): layer_index += 1 for dbs.record in range(dbs.record_count): [ondate, lat, lon, elev, sta] = dbs.getv( - "ondate", "lat", "lon", "elev", "sta", + "ondate", + "lat", + "lon", + "elev", + "sta", ) try: [staname] = dbs.getv("staname") @@ -247,26 +150,31 @@ def main(): marker_template % (lat, lon, icon_name, sta, marker_html, this_layer) ) - my_html = html_template % ( - title, - leaflet_css, - leaflet_js, - "".join(icons), - "".join(layers), - "".join(markers), - center_lon, center_lat, - ",".join(layer_names), - ",".join(layer_descriptions), - logo_url, - logo_alt, - creation_time, + my_html = html_template.format( + title=title, + leaflet_css=leaflet_css, + leaflet_js=leaflet_js, + leaflet_draw_css=leaflet_draw_css, + leaflet_draw_js=leaflet_draw_js, + leaflet_measurecontrol_css=leaflet_measurecontrol_css, + leaflet_measurecontrol_js=leaflet_measurecontrol_js, + icons="".join(icons), + layers="".join(layers), + layer_names=",".join(layer_names), + layer_descriptions=",".join(layer_descriptions), + markers="".join(markers), + center_lat=center_lat, + center_lon=center_lon, + logo_url=logo_url, + logo_alt=logo_alt, + creation_time=creation_time, ) - outstr = "".join(my_html) with open(htmlfilename, "w", encoding="utf8") as myfile: - myfile.write(outstr) + myfile.write(my_html) return 0 + if __name__ == "__main__": status = main() sys.exit(status) From ee7aca1c26801211882d7e2d5d41d466eab745cb Mon Sep 17 00:00:00 2001 From: Niko Horn Date: Mon, 11 Apr 2022 09:16:25 +0200 Subject: [PATCH 24/60] add license --- lib/libpolygon/LICENSE | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 lib/libpolygon/LICENSE diff --git a/lib/libpolygon/LICENSE b/lib/libpolygon/LICENSE new file mode 100644 index 000000000..98eb59478 --- /dev/null +++ b/lib/libpolygon/LICENSE @@ -0,0 +1,39 @@ +Copyright (c) 2003 Nikolaus Horn +All rights reserved. + +This software is licensed under the New BSD license: + +Redistribution and use in source and binary forms, +with or without modification, are permitted provided +that the following conditions are met: + +* Redistributions of source code must retain the above +copyright notice, this list of conditions and the +following disclaimer. + +* Redistributions in binary form must reproduce the +above copyright notice, this list of conditions and +the following disclaimer in the documentation and/or +other materials provided with the distribution. + +* Neither the name of Nikolaus Horn nor +the names of its contributors may be used to endorse +or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL +THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + From ffeb6d3871a5fcc8456ec36aaa4369338adc4f80 Mon Sep 17 00:00:00 2001 From: Niko Horn Date: Mon, 11 Apr 2022 09:20:58 +0200 Subject: [PATCH 25/60] add license --- lib/perl/perlpolygon/LICENSE | 39 ++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 lib/perl/perlpolygon/LICENSE diff --git a/lib/perl/perlpolygon/LICENSE b/lib/perl/perlpolygon/LICENSE new file mode 100644 index 000000000..98eb59478 --- /dev/null +++ b/lib/perl/perlpolygon/LICENSE @@ -0,0 +1,39 @@ +Copyright (c) 2003 Nikolaus Horn +All rights reserved. + +This software is licensed under the New BSD license: + +Redistribution and use in source and binary forms, +with or without modification, are permitted provided +that the following conditions are met: + +* Redistributions of source code must retain the above +copyright notice, this list of conditions and the +following disclaimer. + +* Redistributions in binary form must reproduce the +above copyright notice, this list of conditions and +the following disclaimer in the documentation and/or +other materials provided with the distribution. + +* Neither the name of Nikolaus Horn nor +the names of its contributors may be used to endorse +or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL +THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + From a4a1b6589bdfceb3c4f3794e5fc77daf8bca6f42 Mon Sep 17 00:00:00 2001 From: Niko Horn Date: Mon, 11 Apr 2022 09:30:38 +0200 Subject: [PATCH 26/60] add license --- bin/db/inwhichpolygons/LICENSE | 39 ++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 bin/db/inwhichpolygons/LICENSE diff --git a/bin/db/inwhichpolygons/LICENSE b/bin/db/inwhichpolygons/LICENSE new file mode 100644 index 000000000..98eb59478 --- /dev/null +++ b/bin/db/inwhichpolygons/LICENSE @@ -0,0 +1,39 @@ +Copyright (c) 2003 Nikolaus Horn +All rights reserved. + +This software is licensed under the New BSD license: + +Redistribution and use in source and binary forms, +with or without modification, are permitted provided +that the following conditions are met: + +* Redistributions of source code must retain the above +copyright notice, this list of conditions and the +following disclaimer. + +* Redistributions in binary form must reproduce the +above copyright notice, this list of conditions and +the following disclaimer in the documentation and/or +other materials provided with the distribution. + +* Neither the name of Nikolaus Horn nor +the names of its contributors may be used to endorse +or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL +THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + From 03b977126a9f51452d67cd97e58e9dd5d7e6c248 Mon Sep 17 00:00:00 2001 From: Niko Horn Date: Mon, 11 Apr 2022 09:33:45 +0200 Subject: [PATCH 27/60] no more blabla --- data/schemas/css3.0.ext/origqual | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data/schemas/css3.0.ext/origqual b/data/schemas/css3.0.ext/origqual index da988ef89..163b61870 100644 --- a/data/schemas/css3.0.ext/origqual +++ b/data/schemas/css3.0.ext/origqual @@ -136,6 +136,6 @@ Relation origqual Foreign ( commid ) Description ( "origin quality idicators" ) Detail { - blabla parameters to assess origin quality + Parameters to assess origin quality } ; From d2fdcd37f9309baa72c0ecc39f16aa44eee37989 Mon Sep 17 00:00:00 2001 From: Niko Horn Date: Mon, 11 Apr 2022 10:03:47 +0200 Subject: [PATCH 28/60] add license, less verbose, add hint on manpage --- bin/rt/dbnew2orb/LICENSE | 39 ++++++++++++++++++++++++++++++++++++ bin/rt/dbnew2orb/dbnew2orb.1 | 3 ++- bin/rt/dbnew2orb/dbnew2orb.c | 2 +- 3 files changed, 42 insertions(+), 2 deletions(-) create mode 100644 bin/rt/dbnew2orb/LICENSE diff --git a/bin/rt/dbnew2orb/LICENSE b/bin/rt/dbnew2orb/LICENSE new file mode 100644 index 000000000..8c011b6dc --- /dev/null +++ b/bin/rt/dbnew2orb/LICENSE @@ -0,0 +1,39 @@ +Copyright (c) 2005,2013 Nikolaus Horn +All rights reserved. + +This software is licensed under the New BSD license: + +Redistribution and use in source and binary forms, +with or without modification, are permitted provided +that the following conditions are met: + +* Redistributions of source code must retain the above +copyright notice, this list of conditions and the +following disclaimer. + +* Redistributions in binary form must reproduce the +above copyright notice, this list of conditions and +the following disclaimer in the documentation and/or +other materials provided with the distribution. + +* Neither the name of Nikolaus Horn nor +the names of its contributors may be used to endorse +or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL +THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + diff --git a/bin/rt/dbnew2orb/dbnew2orb.1 b/bin/rt/dbnew2orb/dbnew2orb.1 index 963ca217e..b628767bf 100644 --- a/bin/rt/dbnew2orb/dbnew2orb.1 +++ b/bin/rt/dbnew2orb/dbnew2orb.1 @@ -10,7 +10,7 @@ dbnew2orb \- send new or updated database rows to an orbserver .SH DESCRIPTION \fBdbnew2orb\fP watches a database for modifications and sends modified rows to an orbserver. It allows to transfer a database so that it can be used on the receiving side with programs -that keep the database open like e.g. dbevents. +that keep the database open like e.g. dbevents. The database can be in any database schema. .SH OPTIONS .IP "-sleep seconds" \fIseconds\fP specifies the number of \fIseconds\fP to wait between iterations over all database @@ -100,6 +100,7 @@ save the demo database on the other side: .SH "BUGS AND CAVEATS" The current version of the program transfers external files, i.e. those referenced by the fields dir and dfile \fIdfile\fP. This could result in large data volume transferred. +It can happen that orb2orb refuses to transfer database rows whose format is unknown on the instance transferring the data. This can be solved by installing database extensions on the transmitting machine as well, not only on the sending and receiving sides. Sometimes one might see strange log entries coming from orb2dbt on the receiving side. While this looks frigthening, it still seems to work fine. diff --git a/bin/rt/dbnew2orb/dbnew2orb.c b/bin/rt/dbnew2orb/dbnew2orb.c index cabbb029c..b3cd0f74f 100644 --- a/bin/rt/dbnew2orb/dbnew2orb.c +++ b/bin/rt/dbnew2orb/dbnew2orb.c @@ -327,7 +327,7 @@ main(int argc, char **argv) continue; } } - if (verbose) elog_notify(0,"will check table %s\n",tablename); + if (verbose > 1) elog_notify(0,"will check table %s\n",tablename); pushtbl(tablenames,tablename); } } From 28678bfbd8e92f303bbe53ee9f0f559d7db8d0cc Mon Sep 17 00:00:00 2001 From: Niko Horn Date: Mon, 11 Apr 2022 10:07:53 +0200 Subject: [PATCH 29/60] add license --- bin/utility/pfcp/LICENSE | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 bin/utility/pfcp/LICENSE diff --git a/bin/utility/pfcp/LICENSE b/bin/utility/pfcp/LICENSE new file mode 100644 index 000000000..98eb59478 --- /dev/null +++ b/bin/utility/pfcp/LICENSE @@ -0,0 +1,39 @@ +Copyright (c) 2003 Nikolaus Horn +All rights reserved. + +This software is licensed under the New BSD license: + +Redistribution and use in source and binary forms, +with or without modification, are permitted provided +that the following conditions are met: + +* Redistributions of source code must retain the above +copyright notice, this list of conditions and the +following disclaimer. + +* Redistributions in binary form must reproduce the +above copyright notice, this list of conditions and +the following disclaimer in the documentation and/or +other materials provided with the distribution. + +* Neither the name of Nikolaus Horn nor +the names of its contributors may be used to endorse +or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL +THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + From e8fbdf50c98e3bcdfc6340eb58b44fecd3776686 Mon Sep 17 00:00:00 2001 From: Niko Horn Date: Mon, 11 Apr 2022 10:12:03 +0200 Subject: [PATCH 30/60] add license --- bin/utility/reltime/LICENSE | 39 +++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 bin/utility/reltime/LICENSE diff --git a/bin/utility/reltime/LICENSE b/bin/utility/reltime/LICENSE new file mode 100644 index 000000000..3aed15749 --- /dev/null +++ b/bin/utility/reltime/LICENSE @@ -0,0 +1,39 @@ +Copyright (c) 2005 Nikolaus Horn +All rights reserved. + +This software is licensed under the New BSD license: + +Redistribution and use in source and binary forms, +with or without modification, are permitted provided +that the following conditions are met: + +* Redistributions of source code must retain the above +copyright notice, this list of conditions and the +following disclaimer. + +* Redistributions in binary form must reproduce the +above copyright notice, this list of conditions and +the following disclaimer in the documentation and/or +other materials provided with the distribution. + +* Neither the name of Nikolaus Horn nor +the names of its contributors may be used to endorse +or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL +THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + From de48cdbc6bdc037c20aecf457d45d463c28197e2 Mon Sep 17 00:00:00 2001 From: Niko Horn Date: Mon, 11 Apr 2022 10:18:36 +0200 Subject: [PATCH 31/60] add license, reference to original author --- data/python/polygon/LICENSE | 39 ++++++++++++++++++++++++++++ data/python/polygon/polygon_utils.3y | 1 + 2 files changed, 40 insertions(+) create mode 100644 data/python/polygon/LICENSE diff --git a/data/python/polygon/LICENSE b/data/python/polygon/LICENSE new file mode 100644 index 000000000..4c4e3808e --- /dev/null +++ b/data/python/polygon/LICENSE @@ -0,0 +1,39 @@ +Copyright (c) 2014,2022 Nikolaus Horn +All rights reserved. + +This software is licensed under the New BSD license: + +Redistribution and use in source and binary forms, +with or without modification, are permitted provided +that the following conditions are met: + +* Redistributions of source code must retain the above +copyright notice, this list of conditions and the +following disclaimer. + +* Redistributions in binary form must reproduce the +above copyright notice, this list of conditions and +the following disclaimer in the documentation and/or +other materials provided with the distribution. + +* Neither the name of Nikolaus Horn nor +the names of its contributors may be used to endorse +or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL +THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + diff --git a/data/python/polygon/polygon_utils.3y b/data/python/polygon/polygon_utils.3y index f85f6c5a4..dfb803140 100644 --- a/data/python/polygon/polygon_utils.3y +++ b/data/python/polygon/polygon_utils.3y @@ -33,6 +33,7 @@ utilities to make life with polygon data easier return simplified version of polygon .SH "SEE ALSO" .nf +https://github.com/omarestrella/simplify.py antelope_python(3y), pythonpolygon(3y), polygon(3) .fi .SH "BUGS AND CAVEATS" From c0db63c54e2f15eca2ccaa7b1972387fe452f0dd Mon Sep 17 00:00:00 2001 From: Niko Horn Date: Mon, 11 Apr 2022 10:24:16 +0200 Subject: [PATCH 32/60] add license --- data/python/zamg_utilities/LICENSE | 39 ++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 data/python/zamg_utilities/LICENSE diff --git a/data/python/zamg_utilities/LICENSE b/data/python/zamg_utilities/LICENSE new file mode 100644 index 000000000..d2b68e96f --- /dev/null +++ b/data/python/zamg_utilities/LICENSE @@ -0,0 +1,39 @@ +Copyright (c) 2018-2022 Nikolaus Horn +All rights reserved. + +This software is licensed under the New BSD license: + +Redistribution and use in source and binary forms, +with or without modification, are permitted provided +that the following conditions are met: + +* Redistributions of source code must retain the above +copyright notice, this list of conditions and the +following disclaimer. + +* Redistributions in binary form must reproduce the +above copyright notice, this list of conditions and +the following disclaimer in the documentation and/or +other materials provided with the distribution. + +* Neither the name of Nikolaus Horn nor +the names of its contributors may be used to endorse +or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL +THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + From b7a393facad0e3c6672ea274a85face3ca0628d1 Mon Sep 17 00:00:00 2001 From: Niko Horn Date: Mon, 11 Apr 2022 10:29:36 +0200 Subject: [PATCH 33/60] add license --- data/python/python_missing/LICENSE | 39 ++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 data/python/python_missing/LICENSE diff --git a/data/python/python_missing/LICENSE b/data/python/python_missing/LICENSE new file mode 100644 index 000000000..99141b68a --- /dev/null +++ b/data/python/python_missing/LICENSE @@ -0,0 +1,39 @@ +Copyright (c) 2022 Nikolaus Horn +All rights reserved. + +This software is licensed under the New BSD license: + +Redistribution and use in source and binary forms, +with or without modification, are permitted provided +that the following conditions are met: + +* Redistributions of source code must retain the above +copyright notice, this list of conditions and the +following disclaimer. + +* Redistributions in binary form must reproduce the +above copyright notice, this list of conditions and +the following disclaimer in the documentation and/or +other materials provided with the distribution. + +* Neither the name of Nikolaus Horn nor +the names of its contributors may be used to endorse +or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL +THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + From cee3a68c5db547c4ecf410dd19372705e0470204 Mon Sep 17 00:00:00 2001 From: Nikolaus Horn Date: Fri, 29 Apr 2022 10:04:15 +0000 Subject: [PATCH 34/60] another license --- bin/import/ctbto/LICENSE | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 bin/import/ctbto/LICENSE diff --git a/bin/import/ctbto/LICENSE b/bin/import/ctbto/LICENSE new file mode 100644 index 000000000..f55bc3911 --- /dev/null +++ b/bin/import/ctbto/LICENSE @@ -0,0 +1,39 @@ +Copyright (c) 2022 Nikolaus Horn +All rights reserved. + +This software is licensed under the New BSD license: + +Redistribution and use in source and binary forms, +with or without modification, are permitted provided +that the following conditions are met: + +* Redistributions of source code must retain the above +copyright notice, this list of conditions and the +following disclaimer. + +* Redistributions in binary form must reproduce the +above copyright notice, this list of conditions and +the following disclaimer in the documentation and/or +other materials provided with the distribution. + +* Neither the name Nikolaus Horn nor the names of +his contributors may be used to endorse +or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL +THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + From f7feb2b58738f9fa056df5b2c5210982344aa0dc Mon Sep 17 00:00:00 2001 From: Nikolaus Horn Date: Wed, 4 May 2022 08:10:05 +0000 Subject: [PATCH 35/60] obvious typo in sensitivity --- data/instruments/sensors/ifs_3000.pf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data/instruments/sensors/ifs_3000.pf b/data/instruments/sensors/ifs_3000.pf index a9d0edbb3..38c1e738b 100644 --- a/data/instruments/sensors/ifs_3000.pf +++ b/data/instruments/sensors/ifs_3000.pf @@ -12,7 +12,7 @@ band l # broadband sensor_type_code D gtype sensor -sensitivity 0.1725 # (17.25 mv / Pa) +sensitivity 0.01725 # (17.25 mv / Pa) iunits Pa ounits V From 2160ab2af907491281c8f360735c535ab0f72344 Mon Sep 17 00:00:00 2001 From: Nikolaus Horn Date: Mon, 16 May 2022 13:35:26 +0000 Subject: [PATCH 36/60] more checks, except for Attribute Dbptr, correctly identify undefined Attributes --- bin/utility/schemastuff/.gitignore | 1 + bin/utility/schemastuff/Makefile | 2 +- bin/utility/schemastuff/check_schema.xpy | 547 +++++++++++++++++++++++ 3 files changed, 549 insertions(+), 1 deletion(-) create mode 100644 bin/utility/schemastuff/check_schema.xpy diff --git a/bin/utility/schemastuff/.gitignore b/bin/utility/schemastuff/.gitignore index de3645a1c..00ad9fba3 100644 --- a/bin/utility/schemastuff/.gitignore +++ b/bin/utility/schemastuff/.gitignore @@ -1 +1,2 @@ schema2tables +check_schema diff --git a/bin/utility/schemastuff/Makefile b/bin/utility/schemastuff/Makefile index f816214ba..ca6bff6b6 100644 --- a/bin/utility/schemastuff/Makefile +++ b/bin/utility/schemastuff/Makefile @@ -1,7 +1,7 @@ DATADIR=awk MAN1=schema2tables.1 splitschema.awk.1 DATA=splitschema.awk -BIN=schema2tables +BIN=schema2tables check_schema SUBDIR=/contrib include $(ANTELOPEMAKE) diff --git a/bin/utility/schemastuff/check_schema.xpy b/bin/utility/schemastuff/check_schema.xpy new file mode 100644 index 000000000..27a2b00ce --- /dev/null +++ b/bin/utility/schemastuff/check_schema.xpy @@ -0,0 +1,547 @@ +""" +check schema definitions for inconsitencies + +@author Nikolaus Horn