diff --git a/.github/workflows/gradle.yml b/.github/workflows/gradle.yml index 25d39bd68..86f24decc 100644 --- a/.github/workflows/gradle.yml +++ b/.github/workflows/gradle.yml @@ -66,19 +66,10 @@ jobs: - name: java build -- raven run: cd raven && ../gradlew --info clean build javadoc checkstyleMain - - - name: java build -- ringhold - run: cd ringhold && ../gradlew --info clean build javadoc checkstyleMain - name: java build -- ratik run: cd ratik && ../gradlew --info clean build javadoc checkstyleMain -## TODO: docker build depends on cadc-tomcat base image from docker-base.git -# - name: docker build -- baldur -# run: cd baldur && docker build . --file Dockerfile --tag baldur:$(date +%s) -# - name: docker build -- minoc -# run: cd minoc && docker build . --file Dockerfile --tag minoc:$(date +%s) -# - name: docker build -- luskan -# run: cd luskan && docker build . --file Dockerfile --tag luskan:$(date +%s) -# - name: docker build -- raven -# run: cd raven && docker build . --file Dockerfile --tag raven:$(date +%s) + - name: java build -- ringhold + run: cd ringhold && ../gradlew --info clean build javadoc checkstyleMain + diff --git a/ChangeLog.md b/ChangeLog.md new file mode 100644 index 000000000..77d824e33 --- /dev/null +++ b/ChangeLog.md @@ -0,0 +1,70 @@ +# Change Log + +This is a cursory summary of changes to various storage-inventory components. +Check the README in specific modules for details. + +## minoc:1.0.0 +``` +added optional `org.opencadc.minoc.trust.preauth` config +removed optional `org.opencadc.minoc.publicKeyFile` config +``` +A `minoc` instance will download a public key from each trusted service and +use the public key(s) to validate URLs that include a _preauth_ token. + +``` +added optional `org.opencadc.minoc.readable` and `org.opencadc.minoc.writable` config +``` +A `minoc` service will advertise (via inventory.StorageSite record in the database) the +_readable_ and _writable_ status; this information is synced to global inventory and +used by `raven` to determine if it should generate PUT or GET URLs that use the `minoc` +service(s) at that site. The configuration of _readGrantProvider_(s) and +_writeGrantProvider_(s) implicitly determines the status (_readable_ and _writable_ +respectively); configuration of any _trust.preauth_ will also implicitly make make the +status _readable_ and _writable_. + +The explicit _readable_ and _writable_ configuration options will override the above +implicit logic and set the status accordingly. This is currently optional but may be required +in a future version. + +``` +added optional config file: cadc-log.properties +added optional config file: cadc-vosi.properties +``` + +## raven:1.0.0 +``` +added org.opencadc.raven.inventory connection pool +``` +A `raven` service uses this pool to perform database initialization. This pool is +configured in the `catalina.properties` file. + +``` +added optional `org.opencadc.raven.keys.preauth` config +removed optional `org.opencadc.raven.publicKeyFile` and `org.opencadc.minoc.privateKeyFile` config +``` +When configured to do so, a `raven` service will generate it's own public/private key pair +and use the private key to _sign_ `minoc` URLs. All the `minoc` services known to the global +`raven` service must also be configured to _trust_ `raven`. + +``` +added optional config file: cadc-log.properties +added optional config file: cadc-vosi.properties +``` + +## luskan:1.0.0 +``` +changed config file: cadc-tap-tmp.properties +``` +A `luskan` service now uses the DelegatingStorageManager` so this config file must +specify which storage manager implementation to use along with existing +implementation-specific configuration options. + +``` +added optional config file: cadc-log.properties +added optional config file: cadc-vosi.properties +``` + +## vault:0.5.0 (NEW) +This is a new service that implements the IVOA VOSpace 2.1 REST API to provide user-managed +hierarchical storage. `vault` is deployed with it's own database and an associated inventory +database, uses inventory services (`minoc`) for file storage and management. diff --git a/README.md b/README.md index 2838d9b05..1b2fbf4da 100644 --- a/README.md +++ b/README.md @@ -46,6 +46,10 @@ local artifact selection policy. This is used if the new fenwick policy excludes This is an implementation of the **file-validate** process that compares the inventory database against the back end storage at a storage site. +## vault +UNDER DEVELOPMENT: This is an implementation of an IVOA VOSpace +service that uses storage-inventory as the back end storage mechanism. + ## cadc-* These are libraries used in multiple services and applications. diff --git a/baldur/README.md b/baldur/README.md index 641ee0425..93a8b26fa 100644 --- a/baldur/README.md +++ b/baldur/README.md @@ -112,6 +112,9 @@ When more than one entry matches an artifact URI, the grants are combined as fol * the members of any of the groups in all matching readOnlyGroup lists are allowed to read * the members of any of the groups in all matching readWriteGroup lists are allowed to read and write +### cadc-log.properties (optional) +See cadc-log for common +dynamic logging control. ## integration testing diff --git a/cadc-inventory-db/build.gradle b/cadc-inventory-db/build.gradle index f01493ba1..91e5e51d8 100644 --- a/cadc-inventory-db/build.gradle +++ b/cadc-inventory-db/build.gradle @@ -17,7 +17,7 @@ sourceCompatibility = 1.8 group = 'org.opencadc' -version = '0.14.6' +version = '1.0.0' description = 'OpenCADC Storage Inventory database library' def git_url = 'https://github.com/opencadc/storage-inventory' @@ -25,8 +25,10 @@ def git_url = 'https://github.com/opencadc/storage-inventory' mainClassName = 'org.opencadc.inventory.db.version.Main' dependencies { - compile 'org.opencadc:cadc-util:[1.9.5,2.0)' - compile 'org.opencadc:cadc-inventory:[0.9.4,)' + compile 'org.opencadc:cadc-util:[1.11.0,2.0)' + compile 'org.opencadc:cadc-gms:[1.0.0,)' + compile 'org.opencadc:cadc-inventory:[1.0.0,)' + compile 'org.opencadc:cadc-vos:[2.0.6,3.0)' testCompile 'junit:junit:[4.0,)' diff --git a/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/ArtifactDAOTest.java b/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/ArtifactDAOTest.java index 6591d3af7..f3a225976 100644 --- a/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/ArtifactDAOTest.java +++ b/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/ArtifactDAOTest.java @@ -94,10 +94,11 @@ import org.junit.Test; import org.opencadc.inventory.Artifact; import org.opencadc.inventory.InventoryUtil; +import org.opencadc.inventory.Namespace; import org.opencadc.inventory.SiteLocation; import org.opencadc.inventory.StorageLocation; import org.opencadc.inventory.StoredArtifactComparator; -import org.opencadc.inventory.db.version.InitDatabase; +import org.opencadc.inventory.db.version.InitDatabaseSI; /** * @@ -126,22 +127,23 @@ public ArtifactDAOTest() throws Exception { config.put(SQLGenerator.class.getName(), SQLGenerator.class); config.put("jndiDataSourceName", "jdbc/ArtifactDAOTest"); config.put("database", TestUtil.DATABASE); - config.put("schema", TestUtil.SCHEMA); - + config.put("invSchema", TestUtil.SCHEMA); + config.put("genSchema", TestUtil.SCHEMA); + originDAO = new ArtifactDAO(); originDAO.setConfig(config); - + nonOriginDAO = new ArtifactDAO(false); nonOriginDAO.setConfig(config); - + DBUtil.createJNDIDataSource("jdbc/ArtifactDAOTest-alt", cc); Map altConfig = new TreeMap(); altConfig.put(SQLGenerator.class.getName(), SQLGenerator.class); altConfig.put("jndiDataSourceName", "jdbc/ArtifactDAOTest-alt"); altConfig.put("database", TestUtil.DATABASE); - altConfig.put("schema", TestUtil.SCHEMA); + altConfig.put("invSchema", TestUtil.SCHEMA); + altConfig.put("genSchema", TestUtil.SCHEMA); altDAO.setConfig(altConfig); - } catch (Exception ex) { log.error("setup failed", ex); throw ex; @@ -151,7 +153,7 @@ public ArtifactDAOTest() throws Exception { @Before public void init_cleanup() throws Exception { log.info("init database..."); - InitDatabase init = new InitDatabase(originDAO.getDataSource(), TestUtil.DATABASE, TestUtil.SCHEMA); + InitDatabaseSI init = new InitDatabaseSI(originDAO.getDataSource(), TestUtil.DATABASE, TestUtil.SCHEMA); init.doInit(); log.info("init database... OK"); @@ -954,6 +956,9 @@ public void testIteratorClose() { public void testArtifactIterator() { int num = 10; try { + final Date startDate = new Date(); + Thread.sleep(10L); + int numArtifacts = 0; int numStuffExpected = 0; // artifacts with storageLocation @@ -976,6 +981,7 @@ public void testArtifactIterator() { numStuffExpected++; } } + // some artifacts with no storageLocation collection = "STUFF"; for (int i = num; i < 2 * num; i++) { @@ -995,6 +1001,12 @@ public void testArtifactIterator() { numStuffExpected++; } } + + Thread.sleep(10L); + final Date midDate = new Date(); + Thread.sleep(10L); + final int midNumStuff = numStuffExpected; + // some artifacts with siteLocations UUID siteID = UUID.randomUUID(); int numSiteExpected = 0; @@ -1017,6 +1029,8 @@ public void testArtifactIterator() { numStuffExpected++; } } + Thread.sleep(10L); + final Date endDate = new Date(); log.info("added: " + numArtifacts); log.info("count all..."); @@ -1031,13 +1045,14 @@ public void testArtifactIterator() { Assert.assertEquals("count", numArtifacts, count); log.info("count with criteria..."); + final Namespace ns = new Namespace("cadc:STUFF/"); count = 0; - try (ResourceIterator iter = originDAO.iterator("uri like 'cadc:STUFF/%'", null, false)) { + try (ResourceIterator iter = originDAO.iterator(ns, null, false)) { while (iter.hasNext()) { Artifact actual = iter.next(); count++; log.info("found: " + actual.getURI()); - Assert.assertTrue("STUFF", actual.getURI().toASCIIString().startsWith("cadc:STUFF/")); + Assert.assertTrue("STUFF", actual.getURI().toASCIIString().startsWith(ns.getNamespace())); } } Assert.assertEquals("count", numStuffExpected, count); @@ -1064,7 +1079,8 @@ public void testArtifactIterator() { while (iter.hasNext()) { Artifact actual = iter.next(); count++; - log.info("found: " + actual.getURI()); + log.info("found: " + actual.getBucket() + " " + actual.getURI()); + Assert.assertTrue(actual.getBucket().startsWith(bpre)); } } } @@ -1074,18 +1090,56 @@ public void testArtifactIterator() { count = 0; for (byte b = 0; b < 16; b++) { String bpre = HexUtil.toHex(b).substring(1); - log.debug("bucket prefix: " + bpre); - try (ResourceIterator iter = originDAO.iterator("uri like 'cadc:STUFF/%'", bpre, false)) { + log.info("bucket prefix: " + bpre); + try (ResourceIterator iter = originDAO.iterator(ns, bpre, false)) { while (iter.hasNext()) { Artifact actual = iter.next(); count++; - log.info("found: " + actual.getURI()); - Assert.assertTrue("STUFF", actual.getURI().toASCIIString().startsWith("cadc:STUFF/")); + log.info("found: " + actual.getBucket() + " " + actual.getURI()); + Assert.assertTrue(actual.getBucket().startsWith(bpre)); + Assert.assertTrue("STUFF", actual.getURI().toASCIIString().startsWith(ns.getNamespace())); } } } Assert.assertEquals("count", numStuffExpected, count); + log.info("count vs Namespace incremental from start..."); + DateFormat df = DateUtil.getDateFormat(DateUtil.IVOA_DATE_FORMAT, DateUtil.UTC); + count = 0; + try (ResourceIterator iter = originDAO.iterator(ns, null, startDate, true)) { + while (iter.hasNext()) { + Artifact actual = iter.next(); + count++; + log.info("found: " + actual.getBucket() + " " + actual.getURI() + " " + df.format(actual.getContentLastModified())); + Assert.assertTrue("STUFF", actual.getURI().toASCIIString().startsWith(ns.getNamespace())); + } + } + Assert.assertEquals("count", numStuffExpected, count); + + log.info("count vs Namespace incremental from mid..."); + count = 0; + try (ResourceIterator iter = originDAO.iterator(ns, null, midDate, true)) { + while (iter.hasNext()) { + Artifact actual = iter.next(); + count++; + log.info("found: " + actual.getBucket() + " " + actual.getURI() + " " + df.format(actual.getContentLastModified())); + Assert.assertTrue("STUFF", actual.getURI().toASCIIString().startsWith(ns.getNamespace())); + } + } + Assert.assertEquals("count", midNumStuff, count); + + log.info("count vs Namespace incremental from end..."); + count = 0; + try (ResourceIterator iter = originDAO.iterator(ns, null, endDate, true)) { + while (iter.hasNext()) { + Artifact actual = iter.next(); + count++; + log.info("found: " + actual.getBucket() + " " + actual.getURI() + " " + df.format(actual.getContentLastModified())); + Assert.assertTrue("STUFF", actual.getURI().toASCIIString().startsWith(ns.getNamespace())); + } + } + Assert.assertEquals("count", 0, count); + } catch (Exception unexpected) { log.error("unexpected exception", unexpected); Assert.fail("unexpected exception: " + unexpected); diff --git a/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/EntityEventDAOTest.java b/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/EntityEventDAOTest.java index f6e40cf68..2dbba4be6 100644 --- a/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/EntityEventDAOTest.java +++ b/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/EntityEventDAOTest.java @@ -87,7 +87,7 @@ import org.opencadc.inventory.DeletedStorageLocationEvent; import org.opencadc.inventory.InventoryUtil; import org.opencadc.inventory.StorageLocationEvent; -import org.opencadc.inventory.db.version.InitDatabase; +import org.opencadc.inventory.db.version.InitDatabaseSI; /** * @@ -106,25 +106,31 @@ public class EntityEventDAOTest { StorageLocationEventDAO slDAO = new StorageLocationEventDAO(); public EntityEventDAOTest() throws Exception { - DBConfig dbrc = new DBConfig(); - ConnectionConfig cc = dbrc.getConnectionConfig(TestUtil.SERVER, TestUtil.DATABASE); - DBUtil.createJNDIDataSource("jdbc/EntityEventDAOTest", cc); - - Map config = new TreeMap(); - config.put(SQLGenerator.class.getName(), SQLGenerator.class); - config.put("jndiDataSourceName", "jdbc/EntityEventDAOTest"); - config.put("database", TestUtil.DATABASE); - config.put("schema", TestUtil.SCHEMA); - daeDAO.setConfig(config); - dslDAO.setConfig(config); - slDAO.setConfig(config); + try { + DBConfig dbrc = new DBConfig(); + ConnectionConfig cc = dbrc.getConnectionConfig(TestUtil.SERVER, TestUtil.DATABASE); + DBUtil.createJNDIDataSource("jdbc/EntityEventDAOTest", cc); + + Map config = new TreeMap(); + config.put(SQLGenerator.class.getName(), SQLGenerator.class); + config.put("jndiDataSourceName", "jdbc/EntityEventDAOTest"); + config.put("database", TestUtil.DATABASE); + config.put("invSchema", TestUtil.SCHEMA); + config.put("genSchema", TestUtil.SCHEMA); + daeDAO.setConfig(config); + dslDAO.setConfig(config); + slDAO.setConfig(config); + } catch (Exception ex) { + log.error("setup failed", ex); + throw ex; + } } @Before public void setup() throws Exception { log.info("init database..."); - InitDatabase init = new InitDatabase(daeDAO.getDataSource(), TestUtil.DATABASE, TestUtil.SCHEMA); + InitDatabaseSI init = new InitDatabaseSI(daeDAO.getDataSource(), TestUtil.DATABASE, TestUtil.SCHEMA); init.doInit(); log.info("init database... OK"); diff --git a/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/HarvestStateDAOTest.java b/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/HarvestStateDAOTest.java index 7802893c8..e7a78cbc3 100644 --- a/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/HarvestStateDAOTest.java +++ b/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/HarvestStateDAOTest.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * -* (c) 2022. (c) 2022. +* (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -84,7 +84,7 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import org.opencadc.inventory.db.version.InitDatabase; +import org.opencadc.inventory.db.version.InitDatabaseSI; /** * @@ -112,7 +112,8 @@ public HarvestStateDAOTest() throws Exception { config.put(SQLGenerator.class.getName(), SQLGenerator.class); config.put("jndiDataSourceName", "jdbc/HarvestStateDAOTest"); config.put("database", TestUtil.DATABASE); - config.put("schema", TestUtil.SCHEMA); + config.put("invSchema", TestUtil.SCHEMA); + config.put("genSchema", TestUtil.SCHEMA); dao.setConfig(config); } catch (Exception ex) { log.error("setup failed", ex); @@ -123,7 +124,7 @@ public HarvestStateDAOTest() throws Exception { @Before public void setup() throws Exception { log.info("init database..."); - InitDatabase init = new InitDatabase(dao.getDataSource(), TestUtil.DATABASE, TestUtil.SCHEMA); + InitDatabaseSI init = new InitDatabaseSI(dao.getDataSource(), TestUtil.DATABASE, TestUtil.SCHEMA); init.doInit(); log.info("init database... OK"); @@ -136,6 +137,11 @@ public void setup() throws Exception { log.info("clearing old content... OK"); } + @Test + public void noop() { + log.info("no-op - just setup"); + } + @Test public void testPutGetUpdateDelete() { try { @@ -172,6 +178,7 @@ public void testPutGetUpdateDelete() { // update hs1.curLastModified = new Date(); hs1.curID = UUID.randomUUID(); + hs1.instanceID = UUID.randomUUID(); dao.put(hs1); //log.warn("SLEEPING for lock diagnostics: 20 sec"); @@ -181,12 +188,25 @@ public void testPutGetUpdateDelete() { HarvestState hs2 = dao.get(hs1.getID()); log.info("found: " + hs2); - Assert.assertNotNull("find by uuid", hs1); + Assert.assertNotNull("find by uuid", hs2); Assert.assertNotEquals(expected.getLastModified(), hs2.getLastModified()); Assert.assertNotEquals(expected.getMetaChecksum(), hs2.getMetaChecksum()); Assert.assertEquals("round trip metachecksum", hs1.getMetaChecksum(), hs2.getMetaChecksum()); Assert.assertEquals("curLastModified", hs1.curLastModified.getTime(), hs2.curLastModified.getTime()); Assert.assertEquals("curID", hs1.curID, hs2.curID); + Assert.assertEquals("instanceID", hs1.instanceID, hs2.instanceID); + + // force update with no state change + final Date prevT = hs2.getLastModified(); + Thread.sleep(100L); + dao.put(hs2, true); + + HarvestState tup = dao.get(hs2.getID()); + Assert.assertNotNull(tup); + URI tupCS = tup.computeMetaChecksum(MessageDigest.getInstance("MD5")); + Assert.assertEquals("meta: no change", tup.getMetaChecksum(), tupCS); + Assert.assertTrue("timestamp: update", tup.getLastModified().after(prevT)); + log.info("force update OK: " + tup); // clear tracking state hs1.curLastModified = null; diff --git a/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/ObsoleteStorageLocationDAOTest.java b/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/ObsoleteStorageLocationDAOTest.java index c5d818b11..7f1c4c229 100644 --- a/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/ObsoleteStorageLocationDAOTest.java +++ b/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/ObsoleteStorageLocationDAOTest.java @@ -67,7 +67,6 @@ package org.opencadc.inventory.db; -import org.opencadc.inventory.ObsoleteStorageLocation; import ca.nrc.cadc.db.ConnectionConfig; import ca.nrc.cadc.db.DBConfig; import ca.nrc.cadc.db.DBUtil; @@ -83,8 +82,9 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import org.opencadc.inventory.ObsoleteStorageLocation; import org.opencadc.inventory.StorageLocation; -import org.opencadc.inventory.db.version.InitDatabase; +import org.opencadc.inventory.db.version.InitDatabaseSI; /** * @@ -110,7 +110,8 @@ public ObsoleteStorageLocationDAOTest() throws Exception { config.put(SQLGenerator.class.getName(), SQLGenerator.class); config.put("jndiDataSourceName", "jdbc/ArtifactDAOTest"); config.put("database", TestUtil.DATABASE); - config.put("schema", TestUtil.SCHEMA); + config.put("invSchema", TestUtil.SCHEMA); + config.put("genSchema", TestUtil.SCHEMA); dao.setConfig(config); } catch (Exception ex) { log.error("setup failed", ex); @@ -123,7 +124,7 @@ public void setup() throws Exception { log.info("init database..."); - InitDatabase init = new InitDatabase(dao.getDataSource(), TestUtil.DATABASE, TestUtil.SCHEMA); + InitDatabaseSI init = new InitDatabaseSI(dao.getDataSource(), TestUtil.DATABASE, TestUtil.SCHEMA); init.doInit(); log.info("init database... OK"); diff --git a/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/PreauthKeyPairDAOTest.java b/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/PreauthKeyPairDAOTest.java new file mode 100644 index 000000000..2f6e86180 --- /dev/null +++ b/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/PreauthKeyPairDAOTest.java @@ -0,0 +1,203 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2023. (c) 2023. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.inventory.db; + +import ca.nrc.cadc.db.ConnectionConfig; +import ca.nrc.cadc.db.DBConfig; +import ca.nrc.cadc.db.DBUtil; +import ca.nrc.cadc.util.Log4jInit; +import ca.nrc.cadc.util.RsaSignatureGenerator; +import java.net.URI; +import java.security.KeyPair; +import java.security.MessageDigest; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; +import javax.sql.DataSource; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.opencadc.inventory.PreauthKeyPair; +import org.opencadc.inventory.db.version.InitDatabaseSI; + +/** + * + * @author pdowler + */ +public class PreauthKeyPairDAOTest { + private static final Logger log = Logger.getLogger(PreauthKeyPairDAOTest.class); + + static { + Log4jInit.setLevel("org.opencadc.inventory", Level.DEBUG); + Log4jInit.setLevel("ca.nrc.cadc.db.version", Level.DEBUG); + } + + PreauthKeyPairDAO dao = new PreauthKeyPairDAO(); + + public PreauthKeyPairDAOTest()throws Exception { + DBConfig dbrc = new DBConfig(); + ConnectionConfig cc = dbrc.getConnectionConfig(TestUtil.SERVER, TestUtil.DATABASE); + DBUtil.createJNDIDataSource("jdbc/PreauthKeyPairDAOTest", cc); + + Map config = new TreeMap(); + config.put(SQLGenerator.class.getName(), SQLGenerator.class); + config.put("jndiDataSourceName", "jdbc/PreauthKeyPairDAOTest"); + config.put("database", TestUtil.DATABASE); + config.put("invSchema", TestUtil.SCHEMA); + config.put("genSchema", TestUtil.SCHEMA); + dao.setConfig(config); + } + + @Before + public void setup() + throws Exception + { + log.info("init database..."); + InitDatabaseSI init = new InitDatabaseSI(dao.getDataSource(), TestUtil.DATABASE, TestUtil.SCHEMA); + init.doInit(); + log.info("init database... OK"); + + log.info("clearing old content..."); + SQLGenerator gen = dao.getSQLGenerator(); + DataSource ds = dao.getDataSource(); + String sql = "delete from " + gen.getTable(PreauthKeyPair.class); + log.info("pre-test cleanup: " + sql); + ds.getConnection().createStatement().execute(sql); + log.info("clearing old content... OK"); + } + + @Test + public void testPutGetUpdateDelete() { + String name = "testPutGetUpdateDelete"; + KeyPair kp = RsaSignatureGenerator.getKeyPair(4096); + byte[] publicKey = kp.getPublic().getEncoded(); + byte[] privateKey = kp.getPrivate().getEncoded(); + log.info("generated keys (4096): " + publicKey.length + "," + privateKey.length); + try { + + PreauthKeyPair expected = new PreauthKeyPair(name, publicKey, privateKey); + + PreauthKeyPair notFound = dao.get(expected.getID()); + Assert.assertNull(notFound); + + dao.put(expected); + + // persistence assigns entity state before put + Assert.assertNotNull(expected.getLastModified()); + Assert.assertNotNull(expected.getMetaChecksum()); + URI mcs = expected.getMetaChecksum(); + + URI mcs0 = expected.computeMetaChecksum(MessageDigest.getInstance("MD5")); + Assert.assertEquals("put metachecksum", mcs, mcs0); + + // get by ID + PreauthKeyPair fid = dao.get(expected.getID()); + Assert.assertNotNull(fid); + Assert.assertEquals(expected.getName(), fid.getName()); + Assert.assertEquals(expected.getPublicKey().length, fid.getPublicKey().length); + Assert.assertEquals(expected.getPrivateKey().length, fid.getPrivateKey().length); + URI mcs1 = fid.computeMetaChecksum(MessageDigest.getInstance("MD5")); + Assert.assertEquals("round trip metachecksum", mcs, mcs1); + + // get by name + fid = dao.get(name); + Assert.assertNotNull(fid); + Assert.assertEquals(expected.getName(), fid.getName()); + Assert.assertEquals(expected.getPublicKey().length, fid.getPublicKey().length); + Assert.assertEquals(expected.getPrivateKey().length, fid.getPrivateKey().length); + URI mcs2 = fid.computeMetaChecksum(MessageDigest.getInstance("MD5")); + Assert.assertEquals("round trip metachecksum", mcs, mcs2); + + // TODO: udpate + + // list + Set keys = dao.list(); + Assert.assertNotNull(keys); + Assert.assertEquals(1, keys.size()); + Iterator iter = keys.iterator(); + Assert.assertTrue(iter.hasNext()); + PreauthKeyPair actual = iter.next(); + Assert.assertEquals(expected.getPublicKey().length, fid.getPublicKey().length); + Assert.assertEquals(expected.getPrivateKey().length, fid.getPrivateKey().length); + URI mcs3 = fid.computeMetaChecksum(MessageDigest.getInstance("MD5")); + Assert.assertEquals("round trip metachecksum", mcs, mcs3); + + // delete + dao.delete(expected.getID()); + PreauthKeyPair deleted = dao.get(expected.getID()); + Assert.assertNull(deleted); + + } catch (Exception unexpected) { + log.error("unexpected exception", unexpected); + Assert.fail("unexpected exception: " + unexpected); + } + } +} diff --git a/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/StorageSiteDAOTest.java b/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/StorageSiteDAOTest.java index 2c80b595a..ce679f1fc 100644 --- a/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/StorageSiteDAOTest.java +++ b/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/StorageSiteDAOTest.java @@ -84,7 +84,7 @@ import org.junit.Test; import org.opencadc.inventory.InventoryUtil; import org.opencadc.inventory.StorageSite; -import org.opencadc.inventory.db.version.InitDatabase; +import org.opencadc.inventory.db.version.InitDatabaseSI; /** * @@ -109,7 +109,8 @@ public StorageSiteDAOTest() throws Exception { config.put(SQLGenerator.class.getName(), SQLGenerator.class); config.put("jndiDataSourceName", "jdbc/StorageSiteDAOTest"); config.put("database", TestUtil.DATABASE); - config.put("schema", TestUtil.SCHEMA); + config.put("invSchema", TestUtil.SCHEMA); + config.put("genSchema", TestUtil.SCHEMA); dao.setConfig(config); } @@ -118,7 +119,7 @@ public void setup() throws Exception { log.info("init database..."); - InitDatabase init = new InitDatabase(dao.getDataSource(), TestUtil.DATABASE, TestUtil.SCHEMA); + InitDatabaseSI init = new InitDatabaseSI(dao.getDataSource(), TestUtil.DATABASE, TestUtil.SCHEMA); init.doInit(); log.info("init database... OK"); diff --git a/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/TestUtil.java b/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/TestUtil.java index 92ba24bcc..8ab42bf71 100644 --- a/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/TestUtil.java +++ b/cadc-inventory-db/src/intTest/java/org/opencadc/inventory/db/TestUtil.java @@ -79,10 +79,11 @@ public class TestUtil { private static final Logger log = Logger.getLogger(TestUtil.class); - static String SERVER = "INVENTORY_TEST"; - static String DATABASE = "cadctest"; - static String SCHEMA = "inventory"; - static String TABLE_PREFIX = null; + public static String SERVER = "INVENTORY_TEST"; + public static String DATABASE = "cadctest"; + public static String SCHEMA = "inventory"; + public static String VOS_SCHEMA = "vospace"; + public static String TABLE_PREFIX = null; static { try { @@ -102,12 +103,17 @@ public class TestUtil { if (s != null) { SCHEMA = s.trim(); } + s = props.getProperty("vos_schema"); + if (s != null) { + VOS_SCHEMA = s.trim(); + } s = props.getProperty("tablePrefix"); if (s != null) { TABLE_PREFIX = s.trim(); } } - log.info("intTest database config: " + SERVER + " " + DATABASE + " " + SCHEMA + " " + TABLE_PREFIX); + log.info("intTest database config: " + SERVER + " " + DATABASE + " " + SCHEMA + " " + VOS_SCHEMA + + " tablePrefix=" + TABLE_PREFIX); } catch (Exception oops) { log.debug("failed to load/read optional db config", oops); } diff --git a/cadc-inventory-db/src/intTest/java/org/opencadc/vospace/db/DataNodeSizeWorkerTest.java b/cadc-inventory-db/src/intTest/java/org/opencadc/vospace/db/DataNodeSizeWorkerTest.java new file mode 100644 index 000000000..649800b31 --- /dev/null +++ b/cadc-inventory-db/src/intTest/java/org/opencadc/vospace/db/DataNodeSizeWorkerTest.java @@ -0,0 +1,244 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2024. (c) 2024. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.vospace.db; + +import ca.nrc.cadc.db.ConnectionConfig; +import ca.nrc.cadc.db.DBConfig; +import ca.nrc.cadc.db.DBUtil; +import ca.nrc.cadc.util.Log4jInit; +import java.net.URI; +import java.sql.Connection; +import java.util.Date; +import java.util.Map; +import java.util.TreeMap; +import java.util.UUID; +import javax.sql.DataSource; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.opencadc.inventory.Artifact; +import org.opencadc.vospace.db.DataNodeSizeWorker; +import org.opencadc.inventory.Namespace; +import org.opencadc.inventory.db.ArtifactDAO; +import org.opencadc.inventory.db.HarvestState; +import org.opencadc.inventory.db.HarvestStateDAO; +import org.opencadc.inventory.db.SQLGenerator; +import org.opencadc.inventory.db.TestUtil; +import org.opencadc.inventory.db.version.InitDatabaseSI; +import org.opencadc.vospace.ContainerNode; +import org.opencadc.vospace.DataNode; +import org.opencadc.vospace.db.InitDatabaseVOS; +import org.opencadc.vospace.db.NodeDAO; + +/** + * + * @author adriand + */ +public class DataNodeSizeWorkerTest { + private static final Logger log = Logger.getLogger(DataNodeSizeWorkerTest.class); + + static { + Log4jInit.setLevel("org.opencadc.inventory", Level.INFO); + Log4jInit.setLevel("org.opencadc.inventory.db", Level.INFO); + Log4jInit.setLevel("ca.nrc.cadc.db", Level.INFO); + Log4jInit.setLevel("org.opencadc.vospace", Level.INFO); + Log4jInit.setLevel("org.opencadc.vospace.db", Level.INFO); + } + + HarvestStateDAO harvestStateDAO; + NodeDAO nodeDAO; + ArtifactDAO artifactDAO; + + + public DataNodeSizeWorkerTest() throws Exception { + DBConfig dbrc = new DBConfig(); + ConnectionConfig cc = dbrc.getConnectionConfig(TestUtil.SERVER, TestUtil.DATABASE); + DBUtil.PoolConfig pool = new DBUtil.PoolConfig(cc, 1, 6000L, "select 123"); + DBUtil.createJNDIDataSource("jdbc/ArtifactSyncWorkerTest-node", pool); + + Map config = new TreeMap<>(); + config.put(SQLGenerator.class.getName(), SQLGenerator.class); + config.put("jndiDataSourceName", "jdbc/ArtifactSyncWorkerTest-node"); + config.put("database", TestUtil.DATABASE); + config.put("invSchema", TestUtil.SCHEMA); + config.put("genSchema", TestUtil.SCHEMA); + config.put("vosSchema", TestUtil.VOS_SCHEMA); + + this.harvestStateDAO = new HarvestStateDAO(); + harvestStateDAO.setConfig(config); + this.nodeDAO = new NodeDAO(); + nodeDAO.setConfig(config); + + pool = new DBUtil.PoolConfig(cc, 1, 6000L, "select 123"); + DBUtil.createJNDIDataSource("jdbc/ArtifactSyncWorkerTest-artifact", pool); + + config.put("jndiDataSourceName", "jdbc/ArtifactSyncWorkerTest-artifact"); + + this.artifactDAO = new ArtifactDAO(); + artifactDAO.setConfig(config); + } + + @Before + public void init_cleanup() throws Exception { + log.info("init database..."); + InitDatabaseSI initSI = new InitDatabaseSI(artifactDAO.getDataSource(), TestUtil.DATABASE, TestUtil.SCHEMA); + initSI.doInit(); + log.info("init SI database... OK"); + InitDatabaseVOS initVOS = new InitDatabaseVOS(nodeDAO.getDataSource(), TestUtil.DATABASE, TestUtil.VOS_SCHEMA); + initVOS.doInit(); + log.info("init VOS database... OK"); + + log.info("clearing old content..."); + // src DB + SQLGenerator gen = artifactDAO.getSQLGenerator(); + DataSource ds = artifactDAO.getDataSource(); + String sql = "delete from " + gen.getTable(Artifact.class); + Connection con = ds.getConnection(); + con.createStatement().execute(sql); + con.close(); + + gen = harvestStateDAO.getSQLGenerator(); + ds = harvestStateDAO.getDataSource(); + sql = "delete from " + gen.getTable(HarvestState.class); + con = ds.getConnection(); + con.createStatement().execute(sql); + + gen = nodeDAO.getSQLGenerator(); + sql = "delete from " + gen.getTable(ContainerNode.class); + log.info("pre-test cleanup: " + sql); + con.createStatement().execute(sql); + con.close(); + + log.info("clearing old content... OK"); + } + + @Test + public void testSyncArtifact() throws Exception { + UUID rootID = new UUID(0L, 0L); + ContainerNode root = new ContainerNode(rootID, "root"); + + // create the data node + Namespace siNamespace = new Namespace("myorg:VOS/"); + URI artifactURI = URI.create(siNamespace.getNamespace() + UUID.randomUUID()); + DataNode orig = new DataNode(UUID.randomUUID(), "data-test", artifactURI); + orig.parentID = root.getID(); + orig.ownerID = "the-owner"; + orig.isPublic = true; + orig.isLocked = false; + nodeDAO.put(orig); + + // get-by-id + DataNode actual = (DataNode)nodeDAO.get(orig.getID()); + Assert.assertNotNull(actual); + log.info("found: " + actual.getID() + " aka " + actual); + Assert.assertNull(orig.bytesUsed); + + // create the corresponding artifact + Artifact expected = new Artifact( + artifactURI, + URI.create("md5:d41d8cd98f00b204e9800998ecf8427e"), + new Date(), + 666L); + log.info("expected: " + expected); + + artifactDAO.put(expected); + Artifact actualArtifact = artifactDAO.get(expected.getID()); + Assert.assertNotNull(actual); + Assert.assertEquals(expected.getContentLength(), actualArtifact.getContentLength()); + + String hsName = "ArtifactSize"; + URI resourceID = URI.create("ivo://myorg.org/vospace"); + HarvestState hs = new HarvestState(hsName, resourceID); + harvestStateDAO.put(hs); + hs = harvestStateDAO.get(hsName, resourceID); + + DataNodeSizeWorker asWorker = new DataNodeSizeWorker(harvestStateDAO, hs, artifactDAO, siNamespace); + asWorker.run(); + + actual = (DataNode)nodeDAO.get(orig.getID()); + Assert.assertNotNull(actual); + log.info("found: " + actual.getID() + " aka " + actual); + Assert.assertEquals(expected.getContentLength(), actual.bytesUsed); + + // update the artifact only + artifactDAO.delete(actualArtifact.getID()); + expected = new Artifact(expected.getURI(), expected.getMetaChecksum(), new Date(), 333L); + artifactDAO.put(expected); + actual = (DataNode)nodeDAO.get(orig.getID()); + Assert.assertNotEquals(expected.getContentLength(), actual.bytesUsed); + + // run the update + asWorker.run(); + actual = (DataNode)nodeDAO.get(orig.getID()); + Assert.assertEquals(expected.getContentLength(), actual.bytesUsed); + + } + +} diff --git a/cadc-inventory-db/src/intTest/java/org/opencadc/vospace/db/NodeDAOTest.java b/cadc-inventory-db/src/intTest/java/org/opencadc/vospace/db/NodeDAOTest.java new file mode 100644 index 000000000..4f4e52e9a --- /dev/null +++ b/cadc-inventory-db/src/intTest/java/org/opencadc/vospace/db/NodeDAOTest.java @@ -0,0 +1,780 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2024. (c) 2024. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.vospace.db; + +import ca.nrc.cadc.db.ConnectionConfig; +import ca.nrc.cadc.db.DBConfig; +import ca.nrc.cadc.db.DBUtil; +import ca.nrc.cadc.io.ResourceIterator; +import ca.nrc.cadc.util.Log4jInit; +import java.io.IOException; +import java.net.URI; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.sql.Connection; +import java.util.Map; +import java.util.TreeMap; +import java.util.UUID; +import javax.sql.DataSource; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.opencadc.gms.GroupURI; +import org.opencadc.inventory.db.SQLGenerator; +import org.opencadc.inventory.db.TestUtil; +import org.opencadc.vospace.ContainerNode; +import org.opencadc.vospace.DataNode; +import org.opencadc.vospace.LinkNode; +import org.opencadc.vospace.Node; +import org.opencadc.vospace.NodeProperty; +import org.opencadc.vospace.VOS; + +/** + * + * @author pdowler + */ +public class NodeDAOTest { + private static final Logger log = Logger.getLogger(NodeDAOTest.class); + + static { + Log4jInit.setLevel("org.opencadc.inventory", Level.INFO); + Log4jInit.setLevel("org.opencadc.inventory.db", Level.INFO); + Log4jInit.setLevel("ca.nrc.cadc.db", Level.INFO); + Log4jInit.setLevel("org.opencadc.vospace", Level.INFO); + Log4jInit.setLevel("org.opencadc.vospace.db", Level.INFO); + } + + NodeDAO nodeDAO; + + public NodeDAOTest() throws Exception { + try { + DBConfig dbrc = new DBConfig(); + ConnectionConfig cc = dbrc.getConnectionConfig(TestUtil.SERVER, TestUtil.DATABASE); + DBUtil.PoolConfig pool = new DBUtil.PoolConfig(cc, 1, 6000L, "select 123"); + DBUtil.createJNDIDataSource("jdbc/NodeDAOTest", pool); + + Map config = new TreeMap<>(); + config.put(SQLGenerator.class.getName(), SQLGenerator.class); + config.put("jndiDataSourceName", "jdbc/NodeDAOTest"); + config.put("database", TestUtil.DATABASE); + config.put("invSchema", TestUtil.SCHEMA); + config.put("genSchema", TestUtil.VOS_SCHEMA); + config.put("vosSchema", TestUtil.VOS_SCHEMA); + + this.nodeDAO = new NodeDAO(); + nodeDAO.setConfig(config); + + } catch (Exception ex) { + log.error("setup failed", ex); + throw ex; + } + } + + @Before + public void init_cleanup() throws Exception { + log.info("init database..."); + InitDatabaseVOS init = new InitDatabaseVOS(nodeDAO.getDataSource(), TestUtil.DATABASE, TestUtil.VOS_SCHEMA); + init.doInit(); + log.info("init database... OK"); + + log.info("clearing old content..."); + SQLGenerator gen = nodeDAO.getSQLGenerator(); + DataSource ds = nodeDAO.getDataSource(); + String sql = "delete from " + gen.getTable(ContainerNode.class); + log.info("pre-test cleanup: " + sql); + Connection con = ds.getConnection(); + con.createStatement().execute(sql); + con.close(); + log.info("clearing old content... OK"); + } + + @Test + public void testGetByID_NotFound() { + UUID id = UUID.randomUUID(); + Node a = nodeDAO.get(id); + Assert.assertNull(a); + } + + @Test + public void testGetByPath_NotFound() { + ContainerNode parent = new ContainerNode("not-found"); + Node a = nodeDAO.get(parent, "not-found"); + Assert.assertNull(a); + + UUID rootID = new UUID(0L, 0L); + ContainerNode root = new ContainerNode(rootID, "root"); + a = nodeDAO.get(root, "not-found"); + Assert.assertNull(a); + } + + @Test + public void testPutGetUpdateDeleteContainerNode() throws InterruptedException, + NoSuchAlgorithmException { + UUID rootID = new UUID(0L, 0L); + ContainerNode root = new ContainerNode(rootID, "root"); + + // put + ContainerNode orig = new ContainerNode("container-test"); + orig.parentID = root.getID(); + orig.ownerID = "the-owner"; + nodeDAO.put(orig); + + // get-by-id + Node a = nodeDAO.get(orig.getID()); + Assert.assertNotNull(a); + log.info("found by id: " + a.getID() + " aka " + a); + Assert.assertEquals(orig.getID(), a.getID()); + Assert.assertEquals(orig.getName(), a.getName()); + Assert.assertEquals(root.getID(), a.parentID); + + // get-by-path + Node aa = nodeDAO.get(root, orig.getName()); + Assert.assertNotNull(aa); + log.info("found by path: " + aa.getID() + " aka " + aa); + Assert.assertEquals(orig.getID(), aa.getID()); + Assert.assertEquals(orig.getName(), aa.getName()); + Assert.assertEquals(root.getID(), a.parentID); + Assert.assertNotNull(aa.parentID); + Assert.assertEquals(root.getID(), aa.parentID); + + Assert.assertNull(a.parent); // get-node-by-id: comes pack without parent + Assert.assertEquals(orig.getName(), a.getName()); + Assert.assertEquals(orig.ownerID, a.ownerID); + Assert.assertEquals(orig.isPublic, a.isPublic); + Assert.assertEquals(orig.isLocked, a.isLocked); + Assert.assertEquals(orig.getReadOnlyGroup(), a.getReadOnlyGroup()); + Assert.assertEquals(orig.getReadWriteGroup(), a.getReadWriteGroup()); + Assert.assertEquals(orig.getProperties(), a.getProperties()); + + Assert.assertTrue(a instanceof ContainerNode); + ContainerNode c = (ContainerNode) a; + Assert.assertEquals(orig.inheritPermissions, c.inheritPermissions); + Assert.assertEquals(orig.bytesUsed, c.bytesUsed); + + // these are set in put + Assert.assertEquals(orig.getMetaChecksum(), a.getMetaChecksum()); + Assert.assertEquals(orig.getLastModified(), a.getLastModified()); + + URI mcs = a.computeMetaChecksum(MessageDigest.getInstance("MD5")); + Assert.assertEquals("metaChecksum", a.getMetaChecksum(), mcs); + + // update + Thread.sleep(10L); + orig.getReadOnlyGroup().add(new GroupURI(URI.create("ivo://opencadc.org/gms?g1"))); + orig.getReadWriteGroup().add(new GroupURI(URI.create("ivo://opencadc.org/gms?g3"))); + orig.isPublic = true; + orig.inheritPermissions = true; + nodeDAO.put(orig); + Node updated = nodeDAO.get(orig.getID()); + Assert.assertNotNull(updated); + Assert.assertEquals(orig.getID(), updated.getID()); + Assert.assertEquals(orig.getName(), updated.getName()); + Assert.assertTrue(a.getLastModified().before(updated.getLastModified())); + Assert.assertNotEquals(a.getMetaChecksum(), updated.getMetaChecksum()); + + Assert.assertNull(updated.parent); // get-node-by-id: comes pack without parent + Assert.assertEquals(orig.getName(), updated.getName()); + Assert.assertEquals(orig.ownerID, updated.ownerID); + Assert.assertEquals(orig.isPublic, updated.isPublic); + Assert.assertEquals(orig.isLocked, updated.isLocked); + Assert.assertEquals(orig.getReadOnlyGroup(), updated.getReadOnlyGroup()); + Assert.assertEquals(orig.getReadWriteGroup(), updated.getReadWriteGroup()); + Assert.assertEquals(orig.getProperties(), updated.getProperties()); + + Assert.assertTrue(updated instanceof ContainerNode); + ContainerNode uc = (ContainerNode) updated; + Assert.assertEquals(orig.inheritPermissions, uc.inheritPermissions); + Assert.assertEquals(orig.bytesUsed, uc.bytesUsed); + + nodeDAO.delete(orig.getID()); + Node gone = nodeDAO.get(orig.getID()); + Assert.assertNull(gone); + } + + @Test + public void testPutGetUpdateDeleteContainerNodeMax() throws InterruptedException, + NoSuchAlgorithmException { + UUID rootID = new UUID(0L, 0L); + ContainerNode root = new ContainerNode(rootID, "root"); + + // TODO: use get-by-path to find and remove the test node + + ContainerNode orig = new ContainerNode("container-test"); + orig.parentID = root.getID(); + orig.ownerID = "the-owner"; + orig.isPublic = true; + orig.isLocked = false; + orig.inheritPermissions = false; + orig.getReadOnlyGroup().add(new GroupURI(URI.create("ivo://opencadc.org/gms?g1"))); + orig.getReadOnlyGroup().add(new GroupURI(URI.create("ivo://opencadc.org/gms?g2"))); + orig.getReadWriteGroup().add(new GroupURI(URI.create("ivo://opencadc.org/gms?g3"))); + orig.getReadWriteGroup().add(new GroupURI(URI.create("ivo://opencadc.org/gms?g6-g7"))); + orig.getReadWriteGroup().add(new GroupURI(URI.create("ivo://opencadc.org/gms?g6.g7"))); + orig.getReadWriteGroup().add(new GroupURI(URI.create("ivo://opencadc.org/gms?g6_g7"))); + orig.getReadWriteGroup().add(new GroupURI(URI.create("ivo://opencadc.org/gms?g6~g7"))); + orig.getProperties().add(new NodeProperty(URI.create("custom:prop"), "spaces in value")); + orig.getProperties().add(new NodeProperty(URI.create("sketchy:a,b"), "comma in uri")); + orig.getProperties().add(new NodeProperty(URI.create("sketchy:funny"), "value-with-{delims}")); + nodeDAO.put(orig); + + // get-by-id + Node a = nodeDAO.get(orig.getID()); + Assert.assertNotNull(a); + log.info("found by id: " + a.getID() + " aka " + a); + Assert.assertEquals(orig.getID(), a.getID()); + Assert.assertEquals(orig.getName(), a.getName()); + Assert.assertNotNull(a.parentID); + Assert.assertEquals(root.getID(), a.parentID); + + // get-by-path + Node aa = nodeDAO.get(root, orig.getName()); + Assert.assertNotNull(aa); + log.info("found by path: " + aa.getID() + " aka " + aa); + Assert.assertEquals(orig.getID(), aa.getID()); + Assert.assertEquals(orig.getName(), aa.getName()); + Assert.assertNotNull(aa.parentID); + Assert.assertEquals(root.getID(), aa.parentID); + + Assert.assertNull(a.parent); // get-node-by-id: comes pack without parent + Assert.assertEquals(orig.getName(), a.getName()); + Assert.assertEquals(orig.ownerID, a.ownerID); + Assert.assertEquals(orig.isPublic, a.isPublic); + Assert.assertEquals(orig.isLocked, a.isLocked); + Assert.assertEquals(orig.getReadOnlyGroup(), a.getReadOnlyGroup()); + Assert.assertEquals(orig.getReadWriteGroup(), a.getReadWriteGroup()); + Assert.assertEquals(orig.getProperties(), a.getProperties()); + + Assert.assertTrue(a instanceof ContainerNode); + ContainerNode c = (ContainerNode) a; + Assert.assertEquals(orig.inheritPermissions, c.inheritPermissions); + Assert.assertEquals(orig.bytesUsed, c.bytesUsed); + + // these are set in put + Assert.assertEquals(orig.getMetaChecksum(), a.getMetaChecksum()); + Assert.assertEquals(orig.getLastModified(), a.getLastModified()); + + URI mcs = a.computeMetaChecksum(MessageDigest.getInstance("MD5")); + Assert.assertEquals("metaChecksum", a.getMetaChecksum(), mcs); + + // update + Thread.sleep(10L); + orig.isPublic = false; + orig.isLocked = true; + orig.getReadOnlyGroup().clear(); + orig.getReadOnlyGroup().add(new GroupURI(URI.create("ivo://opencadc.org/gms?g1"))); + orig.getReadWriteGroup().clear(); + orig.getReadWriteGroup().add(new GroupURI(URI.create("ivo://opencadc.org/gms?g3"))); + orig.getProperties().clear(); + orig.inheritPermissions = true; + nodeDAO.put(orig); + Node updated = nodeDAO.get(orig.getID()); + Assert.assertNotNull(updated); + Assert.assertEquals(orig.getID(), updated.getID()); + Assert.assertEquals(orig.getName(), updated.getName()); + Assert.assertTrue(a.getLastModified().before(updated.getLastModified())); + Assert.assertNotEquals(a.getMetaChecksum(), updated.getMetaChecksum()); + + Assert.assertNull(updated.parent); // get-node-by-id: comes pack without parent + Assert.assertEquals(orig.getName(), updated.getName()); + Assert.assertEquals(orig.ownerID, updated.ownerID); + Assert.assertEquals(orig.isPublic, updated.isPublic); + Assert.assertEquals(orig.isLocked, updated.isLocked); + Assert.assertEquals(orig.getReadOnlyGroup(), updated.getReadOnlyGroup()); + Assert.assertEquals(orig.getReadWriteGroup(), updated.getReadWriteGroup()); + Assert.assertEquals(orig.getProperties(), updated.getProperties()); + + Assert.assertTrue(updated instanceof ContainerNode); + ContainerNode uc = (ContainerNode) updated; + Assert.assertEquals(orig.inheritPermissions, uc.inheritPermissions); + Assert.assertEquals(orig.bytesUsed, uc.bytesUsed); + + nodeDAO.delete(orig.getID()); + Node gone = nodeDAO.get(orig.getID()); + Assert.assertNull(gone); + } + + @Test + public void testPutGetUpdateDeleteDataNode() throws InterruptedException, + NoSuchAlgorithmException { + UUID rootID = new UUID(0L, 0L); + ContainerNode root = new ContainerNode(rootID, "root"); + + DataNode orig = new DataNode("data-test"); + orig.parentID = root.getID(); + orig.ownerID = "the-owner"; + orig.storageID = URI.create("vault:" + UUID.randomUUID().toString()); + orig.isPublic = true; + orig.isLocked = false; + orig.getProperties().add(new NodeProperty(VOS.PROPERTY_URI_TYPE, "text/plain")); + orig.getProperties().add(new NodeProperty(VOS.PROPERTY_URI_DESCRIPTION, "this is the good stuff(tm)")); + nodeDAO.put(orig); + + // get-by-id + Node a = nodeDAO.get(orig.getID()); + Assert.assertNotNull(a); + log.info("found: " + a.getID() + " aka " + a); + Assert.assertEquals(orig.getID(), a.getID()); + Assert.assertEquals(orig.getName(), a.getName()); + Assert.assertEquals(root.getID(), a.parentID); + Assert.assertEquals(root.getID(), a.parentID); + + // get-by-path + Node aa = nodeDAO.get(root, orig.getName()); + Assert.assertNotNull(aa); + log.info("found: " + aa.getID() + " aka " + aa); + Assert.assertEquals(orig.getID(), aa.getID()); + Assert.assertEquals(orig.getName(), aa.getName()); + Assert.assertNotNull(aa.parentID); + Assert.assertEquals(root.getID(), aa.parentID); + + Assert.assertNull(a.parent); // get-node-by-id: comes pack without parent + Assert.assertEquals(orig.getName(), a.getName()); + Assert.assertEquals(orig.ownerID, a.ownerID); + Assert.assertEquals(orig.isPublic, a.isPublic); + Assert.assertEquals(orig.isLocked, a.isLocked); + Assert.assertEquals(orig.getReadOnlyGroup(), a.getReadOnlyGroup()); + Assert.assertEquals(orig.getReadWriteGroup(), a.getReadWriteGroup()); + Assert.assertEquals(orig.getProperties(), a.getProperties()); + + Assert.assertTrue(a instanceof DataNode); + DataNode dn = (DataNode) a; + Assert.assertEquals(orig.storageID, dn.storageID); + + // these are set in put + Assert.assertEquals(orig.getMetaChecksum(), a.getMetaChecksum()); + Assert.assertEquals(orig.getLastModified(), a.getLastModified()); + + URI mcs = a.computeMetaChecksum(MessageDigest.getInstance("MD5")); + Assert.assertEquals("metaChecksum", a.getMetaChecksum(), mcs); + + // update + Thread.sleep(10L); + orig.isPublic = false; + orig.isLocked = true; + orig.getReadOnlyGroup().clear(); + orig.getReadOnlyGroup().add(new GroupURI(URI.create("ivo://opencadc.org/gms?g1"))); + orig.getReadWriteGroup().clear(); + orig.getReadWriteGroup().add(new GroupURI(URI.create("ivo://opencadc.org/gms?g3"))); + orig.getProperties().clear(); + // don't change storageID + nodeDAO.put(orig); + Node updated = nodeDAO.get(orig.getID()); + Assert.assertNotNull(updated); + Assert.assertEquals(orig.getID(), updated.getID()); + Assert.assertEquals(orig.getName(), updated.getName()); + Assert.assertTrue(a.getLastModified().before(updated.getLastModified())); + Assert.assertNotEquals(a.getMetaChecksum(), updated.getMetaChecksum()); + + Assert.assertNull(updated.parent); // get-node-by-id: comes pack without parent + Assert.assertEquals(orig.getName(), updated.getName()); + Assert.assertEquals(orig.ownerID, updated.ownerID); + Assert.assertEquals(orig.isPublic, updated.isPublic); + Assert.assertEquals(orig.isLocked, updated.isLocked); + Assert.assertEquals(orig.getReadOnlyGroup(), updated.getReadOnlyGroup()); + Assert.assertEquals(orig.getReadWriteGroup(), updated.getReadWriteGroup()); + Assert.assertEquals(orig.getProperties(), updated.getProperties()); + + + Assert.assertTrue(a instanceof DataNode); + DataNode udn = (DataNode) updated; + Assert.assertEquals(orig.bytesUsed, udn.bytesUsed); + Assert.assertEquals(orig.storageID, udn.storageID); + + nodeDAO.delete(orig.getID()); + Node gone = nodeDAO.get(orig.getID()); + Assert.assertNull(gone); + } + + @Test + public void testPutGetUpdateDeleteLinkNode() throws InterruptedException, + NoSuchAlgorithmException { + UUID rootID = new UUID(0L, 0L); + ContainerNode root = new ContainerNode(rootID, "root"); + + // TODO: use get-by-path to find and remove the test node + + LinkNode orig = new LinkNode("data-test", URI.create("vos://opencadc.org~srv/path/to/something")); + orig.parentID = root.getID(); + orig.ownerID = "the-owner"; + orig.isPublic = true; + orig.isLocked = false; + orig.getProperties().add(new NodeProperty(VOS.PROPERTY_URI_DESCRIPTION, "link to the good stuff(tm)")); + nodeDAO.put(orig); + + // get-by-id + Node a = nodeDAO.get(orig.getID()); + Assert.assertNotNull(a); + log.info("found: " + a.getID() + " aka " + a); + Assert.assertEquals(orig.getID(), a.getID()); + Assert.assertEquals(orig.getName(), a.getName()); + Assert.assertEquals(root.getID(), a.parentID); + + // get-by-path + Node aa = nodeDAO.get(root, orig.getName()); + Assert.assertNotNull(aa); + log.info("found: " + aa.getID() + " aka " + aa); + Assert.assertEquals(orig.getID(), aa.getID()); + Assert.assertEquals(orig.getName(), aa.getName()); + Assert.assertNotNull(aa.parentID); + Assert.assertEquals(root.getID(), aa.parentID); + + Assert.assertNull(a.parent); // get-node-by-id: comes pack without parent + Assert.assertEquals(orig.getName(), a.getName()); + Assert.assertEquals(orig.ownerID, a.ownerID); + Assert.assertEquals(orig.isPublic, a.isPublic); + Assert.assertEquals(orig.isLocked, a.isLocked); + Assert.assertEquals(orig.getReadOnlyGroup(), a.getReadOnlyGroup()); + Assert.assertEquals(orig.getReadWriteGroup(), a.getReadWriteGroup()); + Assert.assertEquals(orig.getProperties(), a.getProperties()); + + Assert.assertTrue(a instanceof LinkNode); + LinkNode link = (LinkNode) a; + Assert.assertEquals(orig.getTarget(), link.getTarget()); + + // these are set in put + Assert.assertEquals(orig.getMetaChecksum(), a.getMetaChecksum()); + Assert.assertEquals(orig.getLastModified(), a.getLastModified()); + + URI mcs = a.computeMetaChecksum(MessageDigest.getInstance("MD5")); + Assert.assertEquals("metaChecksum", a.getMetaChecksum(), mcs); + + // update + Thread.sleep(10L); + orig.isPublic = false; + orig.isLocked = true; + orig.getReadOnlyGroup().clear(); + orig.getReadOnlyGroup().add(new GroupURI(URI.create("ivo://opencadc.org/gms?g1"))); + orig.getReadWriteGroup().clear(); + orig.getReadWriteGroup().add(new GroupURI(URI.create("ivo://opencadc.org/gms?g3"))); + orig.getProperties().clear(); + // don't change target + nodeDAO.put(orig); + Node updated = nodeDAO.get(orig.getID()); + Assert.assertNotNull(updated); + Assert.assertEquals(orig.getID(), updated.getID()); + Assert.assertEquals(orig.getName(), updated.getName()); + Assert.assertTrue(a.getLastModified().before(updated.getLastModified())); + Assert.assertNotEquals(a.getMetaChecksum(), updated.getMetaChecksum()); + + Assert.assertNull(updated.parent); // get-node-by-id: comes pack without parent + Assert.assertEquals(orig.getName(), updated.getName()); + Assert.assertEquals(orig.ownerID, updated.ownerID); + Assert.assertEquals(orig.isPublic, updated.isPublic); + Assert.assertEquals(orig.isLocked, updated.isLocked); + Assert.assertEquals(orig.getReadOnlyGroup(), updated.getReadOnlyGroup()); + Assert.assertEquals(orig.getReadWriteGroup(), updated.getReadWriteGroup()); + Assert.assertEquals(orig.getProperties(), updated.getProperties()); + + Assert.assertTrue(updated instanceof LinkNode); + LinkNode ulink = (LinkNode) updated; + Assert.assertEquals(orig.getTarget(), ulink.getTarget()); + + nodeDAO.delete(orig.getID()); + Node gone = nodeDAO.get(orig.getID()); + Assert.assertNull(gone); + } + + @Test + public void testGetByStorageID() { + UUID rootID = new UUID(0L, 0L); + ContainerNode root = new ContainerNode(rootID, "root"); + + DataNode notFound = nodeDAO.getDataNode(URI.create("vault:not-found")); + Assert.assertNull(notFound); + + DataNode orig = new DataNode("testGetByStorageID"); + orig.parentID = root.getID(); + orig.ownerID = "the-owner"; + orig.storageID = URI.create("vault:" + UUID.randomUUID().toString()); + nodeDAO.put(orig); + + // get-by-storageID + DataNode gbs = nodeDAO.getDataNode(orig.storageID); + Assert.assertNotNull(gbs); + log.info("found: " + gbs.getID() + " aka " + gbs); + Assert.assertEquals(orig.getID(), gbs.getID()); + + nodeDAO.delete(orig.getID()); + } + + @Test + public void testGetWithLock() { + UUID rootID = new UUID(0L, 0L); + ContainerNode root = new ContainerNode(rootID, "root"); + + // put + ContainerNode orig = new ContainerNode("container-test"); + orig.parentID = root.getID(); + orig.ownerID = "the-owner"; + nodeDAO.put(orig); + + // get-by-id + Node a = nodeDAO.get(orig.getID()); + Assert.assertNotNull(a); + log.info("found by id: " + a.getID() + " aka " + a); + Assert.assertEquals(orig.getID(), a.getID()); + Assert.assertEquals(orig.getName(), a.getName()); + Assert.assertEquals(root.getID(), a.parentID); + + // get with lock + Node locked = nodeDAO.lock(a); + Assert.assertNotNull(locked); + log.info("locked: " + a.getID() + " aka " + a); + + nodeDAO.delete(orig.getID()); + Node gone = nodeDAO.get(orig.getID()); + Assert.assertNull(gone); + } + + @Test + public void testUpdateNodeSize() throws InterruptedException, + NoSuchAlgorithmException { + UUID rootID = new UUID(0L, 0L); + ContainerNode root = new ContainerNode(rootID, "root"); + + final ContainerNode cnode = new ContainerNode("testUpdateNodeSize-container"); + cnode.parentID = root.getID(); + cnode.ownerID = "the-owner"; + nodeDAO.put(cnode); + + final DataNode dnode = new DataNode("testUpdateNodeSize-data"); + dnode.ownerID = "the-owner"; + dnode.storageID = URI.create("cadc:vault/" + UUID.randomUUID()); + dnode.parentID = cnode.getID(); + nodeDAO.put(dnode); + + final ContainerNode c1 = (ContainerNode) nodeDAO.get(cnode.getID()); + Assert.assertNotNull(c1); + log.info("found: " + c1.getID() + " aka " + c1); + Assert.assertEquals(cnode.getID(), c1.getID()); + Assert.assertEquals(cnode.getName(), c1.getName()); + Assert.assertEquals(root.getID(), c1.parentID); + Assert.assertNull(c1.bytesUsed); + + final DataNode d1 = (DataNode) nodeDAO.get(dnode.getID()); + Assert.assertNotNull(d1); + log.info("found: " + d1.getID() + " aka " + d1); + Assert.assertEquals(dnode.getID(), d1.getID()); + Assert.assertEquals(dnode.getName(), d1.getName()); + Assert.assertEquals(cnode.getID(), d1.parentID); + Assert.assertNull(d1.bytesUsed); + + final URI ccs = c1.getMetaChecksum(); + final URI dcs = d1.getMetaChecksum(); + + log.info("update DataNode"); + d1.bytesUsed = 123L; + nodeDAO.put(d1); + final DataNode d2 = (DataNode) nodeDAO.get(dnode.getID()); + Assert.assertNotNull(d2); + Assert.assertNotNull(d2.bytesUsed); + Assert.assertEquals(d1.bytesUsed, d2.bytesUsed); + + log.info("update ContainerNode.bytesUsed"); + c1.bytesUsed = 123L; + nodeDAO.put(c1); + final ContainerNode c2 = (ContainerNode) nodeDAO.get(cnode.getID()); + Assert.assertNotNull(c2); + Assert.assertNotNull(c2.bytesUsed); + Assert.assertEquals(123L, c2.bytesUsed.longValue()); + + nodeDAO.delete(dnode.getID()); + nodeDAO.delete(cnode.getID()); + } + + @Test + public void testContainerNodeIterator() throws IOException { + UUID rootID = new UUID(0L, 0L); + ContainerNode root = new ContainerNode(rootID, "root"); + + ContainerNode orig = new ContainerNode("container-test"); + orig.parentID = root.getID(); + orig.ownerID = "the-owner"; + nodeDAO.put(orig); + + Node a = nodeDAO.get(orig.getID()); + Assert.assertNotNull(a); + log.info("found: " + a.getID() + " aka " + a); + Assert.assertEquals(orig.getID(), a.getID()); + Assert.assertEquals(orig.getName(), a.getName()); + + Assert.assertTrue(a instanceof ContainerNode); + ContainerNode cn = (ContainerNode) a; + Assert.assertTrue(nodeDAO.isEmpty(cn)); + + // these are set in put + Assert.assertEquals(orig.getMetaChecksum(), a.getMetaChecksum()); + Assert.assertEquals(orig.getLastModified(), a.getLastModified()); + try (ResourceIterator emptyIter = nodeDAO.iterator(orig, null, null)) { + Assert.assertNotNull(emptyIter); + Assert.assertFalse(emptyIter.hasNext()); + } // auto-close + + Node top = null; + try (ResourceIterator rootIter = nodeDAO.iterator(root, null, null)) { + if (rootIter.hasNext()) { + top = rootIter.next(); + } + } + Assert.assertNotNull(top); + Assert.assertEquals(orig.getID(), top.getID()); + + // add children + ContainerNode cont = new ContainerNode("container1"); + cont.parentID = orig.getID(); + cont.ownerID = orig.ownerID; + DataNode data = new DataNode(UUID.randomUUID(), "data1", URI.create("cadc:vault/" + UUID.randomUUID())); + data.parentID = orig.getID(); + data.ownerID = orig.ownerID; + LinkNode link = new LinkNode("link1", URI.create("cadc:ARCHIVE/data")); + link.parentID = orig.getID(); + link.ownerID = orig.ownerID; + log.info("put child: " + cont + " of " + cont.parent); + nodeDAO.put(cont); + Assert.assertFalse(nodeDAO.isEmpty(cn)); + log.info("put child: " + data + " of " + data.parent); + nodeDAO.put(data); + Assert.assertFalse(nodeDAO.isEmpty(cn)); + log.info("put child: " + link + " of " + link.parent); + nodeDAO.put(link); + Assert.assertFalse(nodeDAO.isEmpty(cn)); + + Node c1; + Node c2; + Node c3; + try (ResourceIterator iter = nodeDAO.iterator(orig, null, null)) { + Assert.assertNotNull(iter); + Assert.assertTrue(iter.hasNext()); + c1 = iter.next(); + Assert.assertTrue(iter.hasNext()); + c2 = iter.next(); + Assert.assertTrue(iter.hasNext()); + c3 = iter.next(); + Assert.assertFalse(iter.hasNext()); + } + // default order: alpha + Assert.assertEquals(cont.getID(), c1.getID()); + Assert.assertEquals(cont.getName(), c1.getName()); + + Assert.assertEquals(data.getID(), c2.getID()); + Assert.assertEquals(data.getName(), c2.getName()); + + Assert.assertEquals(link.getID(), c3.getID()); + Assert.assertEquals(link.getName(), c3.getName()); + + // iterate with limit + try (ResourceIterator iter = nodeDAO.iterator(orig, 2, null)) { + Assert.assertNotNull(iter); + Assert.assertTrue(iter.hasNext()); + c1 = iter.next(); + Assert.assertTrue(iter.hasNext()); + c2 = iter.next(); + Assert.assertFalse(iter.hasNext()); + } + Assert.assertEquals(cont.getID(), c1.getID()); + Assert.assertEquals(cont.getName(), c1.getName()); + + Assert.assertEquals(data.getID(), c2.getID()); + Assert.assertEquals(data.getName(), c2.getName()); + + // iterate with start + try (ResourceIterator iter = nodeDAO.iterator(orig, null, c2.getName())) { + Assert.assertNotNull(iter); + Assert.assertTrue(iter.hasNext()); + c2 = iter.next(); + Assert.assertTrue(iter.hasNext()); + c3 = iter.next(); + Assert.assertFalse(iter.hasNext()); + } + Assert.assertEquals(data.getID(), c2.getID()); + Assert.assertEquals(data.getName(), c2.getName()); + + Assert.assertEquals(link.getID(), c3.getID()); + Assert.assertEquals(link.getName(), c3.getName()); + + // iterate with limit and start + try (ResourceIterator iter = nodeDAO.iterator(orig, 1, c2.getName())) { + Assert.assertNotNull(iter); + Assert.assertTrue(iter.hasNext()); + c2 = iter.next(); + Assert.assertFalse(iter.hasNext()); + } + Assert.assertEquals(data.getID(), c2.getID()); + Assert.assertEquals(data.getName(), c2.getName()); + + // depth first delete required but not enforced by DAO + nodeDAO.delete(cont.getID()); + nodeDAO.delete(data.getID()); + nodeDAO.delete(link.getID()); + nodeDAO.delete(orig.getID()); + Node gone = nodeDAO.get(orig.getID()); + Assert.assertNull(gone); + } +} diff --git a/cadc-inventory-db/src/main/java/org/opencadc/inventory/PreauthKeyPair.java b/cadc-inventory-db/src/main/java/org/opencadc/inventory/PreauthKeyPair.java new file mode 100644 index 000000000..3266918be --- /dev/null +++ b/cadc-inventory-db/src/main/java/org/opencadc/inventory/PreauthKeyPair.java @@ -0,0 +1,139 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2023. (c) 2023. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.inventory; + +import java.util.Objects; +import java.util.UUID; +import org.apache.log4j.Logger; + +/** + * Entity class to support storing a key pair in the database. + * + * @author pdowler + */ +public class PreauthKeyPair extends Entity implements Comparable { + private static final Logger log = Logger.getLogger(PreauthKeyPair.class); + + private final String name; + private final byte[] publicKey; + private final byte[] privateKey; + + public PreauthKeyPair(String name, byte[] publicKey, byte[] privateKey) { + super(); + InventoryUtil.assertNotNull(PreauthKeyPair.class, "name", name); + InventoryUtil.assertNotNull(PreauthKeyPair.class, "publicKey", publicKey); + InventoryUtil.assertNotNull(PreauthKeyPair.class, "privateKey", privateKey); + this.name = name; + this.publicKey = publicKey; + this.privateKey = privateKey; + } + + // ctor for DAO class + public PreauthKeyPair(UUID id, String name, byte[] publicKey, byte[] privateKey) { + super(id); + InventoryUtil.assertNotNull(PreauthKeyPair.class, "name", name); + InventoryUtil.assertNotNull(PreauthKeyPair.class, "publicKey", publicKey); + InventoryUtil.assertNotNull(PreauthKeyPair.class, "privateKey", privateKey); + this.name = name; + this.publicKey = publicKey; + this.privateKey = privateKey; + } + + public String getName() { + return name; + } + + public byte[] getPublicKey() { + return publicKey; + } + + public byte[] getPrivateKey() { + return privateKey; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 43 * hash + Objects.hashCode(this.name); + return hash; + } + + @Override + public boolean equals(Object o) { + if (o == null) { + return false; + } + PreauthKeyPair f = (PreauthKeyPair) o; + return this.compareTo(f) == 0; + } + + @Override + public int compareTo(PreauthKeyPair t) { + return name.compareTo(t.name); + } +} diff --git a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/AbstractDAO.java b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/AbstractDAO.java index 7b823e304..6100383b0 100644 --- a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/AbstractDAO.java +++ b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/AbstractDAO.java @@ -86,8 +86,8 @@ import javax.naming.NamingException; import javax.sql.DataSource; import org.apache.log4j.Logger; -import org.opencadc.inventory.Entity; import org.opencadc.inventory.InventoryUtil; +import org.opencadc.persist.Entity; import org.springframework.jdbc.BadSqlGrammarException; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.RowMapper; @@ -168,7 +168,7 @@ public DataSource getDataSource() { return dataSource; } - SQLGenerator getSQLGenerator() { + public SQLGenerator getSQLGenerator() { checkInit(); return gen; } @@ -190,7 +190,9 @@ public Map getParams() { Map ret = new TreeMap(); ret.put("jndiDataSourceName", String.class); ret.put("database", String.class); - ret.put("schema", String.class); + ret.put("invSchema", String.class); + ret.put("genSchema", String.class); + ret.put("vosSchema", String.class); ret.put(SQLGenerator.class.getName(), Class.class); return ret; } @@ -223,10 +225,12 @@ public void setConfig(Map config) { } String database = (String) config.get("database"); - String schema = (String) config.get("schema"); + String invSchema = (String) config.get("invSchema"); + String genSchema = (String) config.get("genSchema"); + String vosSchema = (String) config.get("vosSchema"); try { - Constructor ctor = genClass.getConstructor(String.class, String.class); - this.gen = (SQLGenerator) ctor.newInstance(database, schema); + Constructor ctor = genClass.getConstructor(String.class, String.class, String.class, String.class); + this.gen = (SQLGenerator) ctor.newInstance(database, invSchema, genSchema, vosSchema); } catch (Exception ex) { throw new RuntimeException("failed to instantiate SQLGenerator: " + genClass.getName(), ex); } diff --git a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/ArtifactDAO.java b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/ArtifactDAO.java index 200b4e63c..cb7964ce8 100644 --- a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/ArtifactDAO.java +++ b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/ArtifactDAO.java @@ -69,9 +69,11 @@ import ca.nrc.cadc.io.ResourceIterator; import java.net.URI; +import java.util.Date; import java.util.UUID; import org.apache.log4j.Logger; import org.opencadc.inventory.Artifact; +import org.opencadc.inventory.Namespace; import org.opencadc.inventory.SiteLocation; import org.opencadc.inventory.StorageLocation; import org.springframework.jdbc.BadSqlGrammarException; @@ -190,8 +192,8 @@ public ResourceIterator storedIterator(String storageBucketPrefix) { try { SQLGenerator.ArtifactIteratorQuery iter = (SQLGenerator.ArtifactIteratorQuery) gen.getEntityIteratorQuery(Artifact.class); iter.setStorageLocationRequired(true); + iter.setStorageBucket(storageBucketPrefix); iter.setOrderedOutput(true); - iter.setPrefix(storageBucketPrefix); return iter.query(dataSource); } catch (BadSqlGrammarException ex) { handleInternalFail(ex); @@ -218,8 +220,8 @@ public ResourceIterator unstoredIterator(String uriBucketPrefix) { try { SQLGenerator.ArtifactIteratorQuery iter = (SQLGenerator.ArtifactIteratorQuery) gen.getEntityIteratorQuery(Artifact.class); iter.setStorageLocationRequired(false); + iter.setUriBucket(uriBucketPrefix); iter.setOrderedOutput(true); - iter.setPrefix(uriBucketPrefix); return iter.query(dataSource); } catch (BadSqlGrammarException ex) { handleInternalFail(ex); @@ -240,7 +242,7 @@ public ResourceIterator unstoredIterator(String uriBucketPrefix) { * @return iterator over artifacts */ public ResourceIterator iterator(String uriBucketPrefix, boolean ordered) { - return iterator((String) null, uriBucketPrefix, ordered); + return iterator((Namespace) null, uriBucketPrefix, ordered); } /** @@ -250,19 +252,37 @@ public ResourceIterator iterator(String uriBucketPrefix, boolean order * *

Use case: local cleanup by arbitrary criteria * - * @param criteria conditions for selecting artifacts + * @param ns namespace for selecting artifacts * @param uriBucketPrefix null, prefix, or complete Artifact.uriBucket string * @param ordered order by Artifact.uri (true) or not ordered (false) * @return iterator over artifacts matching criteria */ - public ResourceIterator iterator(String criteria, String uriBucketPrefix, boolean ordered) { + public ResourceIterator iterator(Namespace ns, String uriBucketPrefix, boolean ordered) { + return iterator(ns, uriBucketPrefix, null, ordered); + } + + /** + * Iterate over artifacts that match criteria. This method adds an optional Date argument to + * support incremental processing. In this case, ordered will be in timestamp order rather than + * uri order. + * + *

Use case: process artifact events directly in the database + * + * @param ns namespace for selecting artifacts + * @param uriBucketPrefix null, prefix, or complete Artifact.uriBucket string + * @param minLastModified minimum Artifact.lastModified to consider (incremental mode) + * @param ordered order by Artifact.uri (true) or not ordered (false) + * @return iterator over artifacts matching criteria + */ + public ResourceIterator iterator(Namespace ns, String uriBucketPrefix, Date minLastModified, boolean ordered) { checkInit(); long t = System.currentTimeMillis(); try { SQLGenerator.ArtifactIteratorQuery iter = (SQLGenerator.ArtifactIteratorQuery) gen.getEntityIteratorQuery(Artifact.class); - iter.setPrefix(uriBucketPrefix); - iter.setCriteria(criteria); + iter.setUriBucket(uriBucketPrefix); + iter.setNamespace(ns); + iter.setMinLastModified(minLastModified); iter.setOrderedOutput(ordered); return iter.query(dataSource); } catch (BadSqlGrammarException ex) { @@ -273,7 +293,7 @@ public ResourceIterator iterator(String criteria, String uriBucketPref } throw new RuntimeException("BUG: should be unreachable"); } - + /** * Iterate over Artifacts from a specific site. If a siteID is specified, only artifacts where * artifact.siteLocations includes that siteID are returned; this is only applicable in a global @@ -292,7 +312,7 @@ public ResourceIterator iterator(UUID siteID, String uriBucketPrefix, try { SQLGenerator.ArtifactIteratorQuery iter = (SQLGenerator.ArtifactIteratorQuery) gen.getEntityIteratorQuery(Artifact.class); - iter.setPrefix(uriBucketPrefix); + iter.setUriBucket(uriBucketPrefix); iter.setSiteID(siteID); iter.setOrderedOutput(ordered); return iter.query(dataSource); diff --git a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/EntityGet.java b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/EntityGet.java index c065b58a6..029ca037b 100644 --- a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/EntityGet.java +++ b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/EntityGet.java @@ -68,7 +68,7 @@ package org.opencadc.inventory.db; import java.util.UUID; -import org.opencadc.inventory.Entity; +import org.opencadc.persist.Entity; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.PreparedStatementCreator; diff --git a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/EntityIteratorQuery.java b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/EntityIteratorQuery.java index 782de3129..04e065bb8 100644 --- a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/EntityIteratorQuery.java +++ b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/EntityIteratorQuery.java @@ -67,7 +67,7 @@ package org.opencadc.inventory.db; -import java.util.Iterator; +import ca.nrc.cadc.io.ResourceIterator; import javax.sql.DataSource; /** @@ -76,5 +76,5 @@ * @param entity subclass */ public interface EntityIteratorQuery { - Iterator query(DataSource ds); + ResourceIterator query(DataSource ds); } diff --git a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/EntityLock.java b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/EntityLock.java index 238518d78..d99e41742 100644 --- a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/EntityLock.java +++ b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/EntityLock.java @@ -68,7 +68,7 @@ package org.opencadc.inventory.db; import java.util.UUID; -import org.opencadc.inventory.Entity; +import org.opencadc.persist.Entity; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.PreparedStatementCreator; diff --git a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/EntityPut.java b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/EntityPut.java index 383347eba..e236c7a2b 100644 --- a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/EntityPut.java +++ b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/EntityPut.java @@ -69,7 +69,7 @@ package org.opencadc.inventory.db; -import org.opencadc.inventory.Entity; +import org.opencadc.persist.Entity; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.PreparedStatementCreator; diff --git a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/HarvestState.java b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/HarvestState.java index 210030c66..4d012cc24 100644 --- a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/HarvestState.java +++ b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/HarvestState.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * -* (c) 2020. (c) 2020. +* (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -70,7 +70,6 @@ import java.net.URI; import java.util.Date; import java.util.UUID; - import org.apache.log4j.Logger; import org.opencadc.inventory.Entity; import org.opencadc.inventory.InventoryUtil; @@ -95,6 +94,12 @@ public class HarvestState extends Entity { */ public UUID curID; + /** + * The ID of the current running instance. This is optional and only used by applications + * that share workload between instances. + */ + public UUID instanceID; + public HarvestState(String name, URI resourceID) { super(); InventoryUtil.assertNotNull(HarvestState.class, "name", name); @@ -117,6 +122,7 @@ public String getName() { } public void setName(String name) { + InventoryUtil.assertNotNull(HarvestState.class, "name", name); this.name = name; } @@ -131,6 +137,9 @@ public void setResourceID(URI resourceID) { @Override public String toString() { + if (instanceID != null) { + return HarvestState.class.getSimpleName() + "[" + instanceID + "," + name + "," + resourceID + "]"; + } return HarvestState.class.getSimpleName() + "[" + name + "," + resourceID + "]"; } } diff --git a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/HarvestStateDAO.java b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/HarvestStateDAO.java index d7dd81e2b..a0f8d8f74 100644 --- a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/HarvestStateDAO.java +++ b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/HarvestStateDAO.java @@ -68,6 +68,7 @@ package org.opencadc.inventory.db; import java.net.URI; +import java.sql.Connection; import java.sql.SQLException; import java.util.UUID; import org.apache.log4j.Logger; @@ -176,12 +177,16 @@ public HarvestState get(String name, URI resourceID) { @Override public void put(HarvestState val) { - if (curBufferCount < updateBufferCount) { + put(val, false); + } + + public void put(HarvestState val, boolean forceTimestampUpdate) { + if (curBufferCount < updateBufferCount && !forceTimestampUpdate) { log.debug("buffering: " + curBufferCount + " < " + updateBufferCount + " " + val); curBufferCount++; bufferedState = val; } else { - super.put(val); + super.put(val, false, forceTimestampUpdate); curBufferCount = 0; bufferedState = null; @@ -190,13 +195,15 @@ public void put(HarvestState val) { if (curMaintCount == maintCount) { String sql = "VACUUM " + gen.getTable(HarvestState.class); log.warn("maintenance: " + curMaintCount + "==" + maintCount + " " + sql); - //JdbcTemplate jdbc = new JdbcTemplate(dataSource); - //jdbc.execute(sql); try { - dataSource.getConnection().createStatement().execute(sql); - } catch (SQLException ex) { - log.error("ERROR: " + sql + " FAILED", ex); - // yes, log and proceed + try (Connection c = dataSource.getConnection()) { + c.createStatement().execute(sql); + } catch (SQLException ex) { + log.error("maintenance failed: " + sql, ex); + // yes, log and proceed + } // auto-close to return to pool + } catch (Exception ex) { + log.error("failed to close connection after maintenance: " + sql, ex); } curMaintCount = 0; } else { diff --git a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/PreauthKeyPairDAO.java b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/PreauthKeyPairDAO.java new file mode 100644 index 000000000..aa8085594 --- /dev/null +++ b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/PreauthKeyPairDAO.java @@ -0,0 +1,144 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2022. (c) 2022. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.inventory.db; + +import java.util.List; +import java.util.Set; +import java.util.UUID; +import org.apache.log4j.Logger; +import org.opencadc.inventory.PreauthKeyPair; +import org.springframework.jdbc.BadSqlGrammarException; +import org.springframework.jdbc.core.JdbcTemplate; + +/** + * Simple DAO class to store and retrieve public-private key pairs. + * + * @author pdowler + */ +public class PreauthKeyPairDAO extends AbstractDAO { + private static final Logger log = Logger.getLogger(PreauthKeyPairDAO.class); + + public PreauthKeyPairDAO() { + super(true); + } + + public PreauthKeyPairDAO(AbstractDAO src) { + super(src); + } + + public PreauthKeyPair get(UUID id) { + return super.get(PreauthKeyPair.class, id); + } + + public PreauthKeyPair get(String name) { + if (name == null) { + throw new IllegalArgumentException("name cannot be null"); + } + checkInit(); + log.debug("GET: " + name); + long t = System.currentTimeMillis(); + + try { + JdbcTemplate jdbc = new JdbcTemplate(dataSource); + + SQLGenerator.KeyPairGet get = ( SQLGenerator.KeyPairGet) gen.getEntityGet(PreauthKeyPair.class); + get.setName(name); + PreauthKeyPair ret = get.execute(jdbc); + return ret; + } catch (BadSqlGrammarException ex) { + handleInternalFail(ex); + } finally { + long dt = System.currentTimeMillis() - t; + log.debug("GET: " + name + " " + dt + "ms"); + } + throw new RuntimeException("BUG: should be unreachable"); + } + + public void delete(UUID id) { + super.delete(PreauthKeyPair.class, id); + } + + public Set list() { + checkInit(); + log.debug("LIST"); + long t = System.currentTimeMillis(); + + try { + JdbcTemplate jdbc = new JdbcTemplate(dataSource); + EntityList get = gen.getEntityList(PreauthKeyPair.class); + Set result = get.query(jdbc); + return result; + } catch (BadSqlGrammarException ex) { + handleInternalFail(ex); + } finally { + long dt = System.currentTimeMillis() - t; + log.debug("LIST: " + dt + "ms"); + } + throw new RuntimeException("BUG: should be unreachable"); + } +} diff --git a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/SQLGenerator.java b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/SQLGenerator.java index fb5da9fec..0652a9184 100644 --- a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/SQLGenerator.java +++ b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/SQLGenerator.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * -* (c) 2022. (c) 2022. +* (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -89,16 +89,25 @@ import java.util.UUID; import javax.sql.DataSource; import org.apache.log4j.Logger; +import org.opencadc.gms.GroupURI; import org.opencadc.inventory.Artifact; import org.opencadc.inventory.DeletedArtifactEvent; import org.opencadc.inventory.DeletedStorageLocationEvent; -import org.opencadc.inventory.Entity; import org.opencadc.inventory.InventoryUtil; +import org.opencadc.inventory.Namespace; import org.opencadc.inventory.ObsoleteStorageLocation; +import org.opencadc.inventory.PreauthKeyPair; import org.opencadc.inventory.SiteLocation; import org.opencadc.inventory.StorageLocation; import org.opencadc.inventory.StorageLocationEvent; import org.opencadc.inventory.StorageSite; +import org.opencadc.persist.Entity; +import org.opencadc.vospace.ContainerNode; +import org.opencadc.vospace.DataNode; +import org.opencadc.vospace.DeletedNodeEvent; +import org.opencadc.vospace.LinkNode; +import org.opencadc.vospace.Node; +import org.opencadc.vospace.NodeProperty; import org.springframework.dao.DataAccessException; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.ResultSetExtractor; @@ -111,43 +120,57 @@ public class SQLGenerator { private static final Logger log = Logger.getLogger(SQLGenerator.class); - private final Map tableMap = new TreeMap(new ClassComp()); - private final Map columnMap = new TreeMap(new ClassComp()); + private final Map tableMap = new TreeMap<>(new ClassComp()); + private final Map columnMap = new TreeMap<>(new ClassComp()); protected final String database; // currently not used in SQL - protected final String schema; // may be null + protected final String invSchema; + protected final String genSchema; + protected final String vosSchema; /** * Constructor. The database name is currently not used in any generated SQL; code assumes * that the DataSource is connected to the right database already and cross-database statements - * are not supported. The schema name is used to qualify table names (if set). The optional table - * name prefix is pre-pended to the table name (after the optional {schema}.) and is provided as - * a work around in shared (usually test) databases where schema name is a username and table - * names may collide with other content. Normal "production" use would specify the schema only. + * are not supported. * - * @param database database name (may be null) - * @param schema schema name (may be null) + * @param database database name: not used; future-proof + * @param invSchema inventory schema name (required, implies genSchema = invSchema) + * @param genSchema generic schema name for internal tables (PreauthKeys, HarvestState) - optional */ - public SQLGenerator(String database, String schema) { + public SQLGenerator(String database, String invSchema, String genSchema) { + this(database, invSchema, genSchema, null); + } + + /** + * Constructor. The database name is currently not used in any generated SQL; code assumes + * that the DataSource is connected to the right database already and cross-database statements + * are not supported. The genSchema is optional (required for HarvestState and PreauthKeyPair). + * The vosSchema is optional (used for Node and DeletedNodeEvent). + * + * @param database database name: not used; future-proof + * @param invSchema inventory schema name - required + * @param genSchema generic schema name - optional + * @param vosSchema vospace schema name - optional + */ + public SQLGenerator(String database, String invSchema, String genSchema, String vosSchema) { this.database = database; - this.schema = schema; + InventoryUtil.assertNotNull(SQLGenerator.class, "invSchema", invSchema); // required for all uses + this.invSchema = invSchema; + InventoryUtil.assertNotNull(SQLGenerator.class, "genSchema", genSchema); // required for correct init + this.genSchema = genSchema; + this.vosSchema = vosSchema; // only required for vospace init(); } protected void init() { - String pref = ""; - if (schema != null) { - pref = schema + "."; - } // inventory model - this.tableMap.put(Artifact.class, pref + Artifact.class.getSimpleName()); - this.tableMap.put(StorageSite.class, pref + StorageSite.class.getSimpleName()); - this.tableMap.put(DeletedArtifactEvent.class, pref + DeletedArtifactEvent.class.getSimpleName()); - this.tableMap.put(DeletedStorageLocationEvent.class, pref + DeletedStorageLocationEvent.class.getSimpleName()); - this.tableMap.put(StorageLocationEvent.class, pref + StorageLocationEvent.class.getSimpleName()); + this.tableMap.put(Artifact.class, invSchema + "." + Artifact.class.getSimpleName()); + this.tableMap.put(StorageSite.class, invSchema + "." + StorageSite.class.getSimpleName()); + this.tableMap.put(DeletedArtifactEvent.class, invSchema + "." + DeletedArtifactEvent.class.getSimpleName()); + this.tableMap.put(DeletedStorageLocationEvent.class, invSchema + "." + DeletedStorageLocationEvent.class.getSimpleName()); + this.tableMap.put(StorageLocationEvent.class, invSchema + "." + StorageLocationEvent.class.getSimpleName()); // internal - this.tableMap.put(ObsoleteStorageLocation.class, pref + ObsoleteStorageLocation.class.getSimpleName()); - this.tableMap.put(HarvestState.class, pref + HarvestState.class.getSimpleName()); + this.tableMap.put(ObsoleteStorageLocation.class, invSchema + "." + ObsoleteStorageLocation.class.getSimpleName()); String[] cols = new String[] { "uri", // first column is logical key @@ -193,16 +216,71 @@ protected void init() { }; this.columnMap.put(ObsoleteStorageLocation.class, cols); - cols = new String[] { - "name", - "resourceID", - "curLastModified", - "curID", - "lastModified", - "metaChecksum", - "id" // last column is always PK - }; - this.columnMap.put(HarvestState.class, cols); + log.debug("genSchema: " + genSchema); + if (genSchema != null) { + // generic support + this.tableMap.put(HarvestState.class, genSchema + "." + HarvestState.class.getSimpleName()); + this.tableMap.put(PreauthKeyPair.class, genSchema + "." + PreauthKeyPair.class.getSimpleName()); + cols = new String[] { + "name", + "resourceID", + "curLastModified", + "curID", + "instanceID", + "lastModified", + "metaChecksum", + "id" // last column is always PK + }; + this.columnMap.put(HarvestState.class, cols); + + cols = new String[] { + "name", + "publicKey", + "privateKey", + "lastModified", + "metaChecksum", + "id" // last column is always PK + }; + this.columnMap.put(PreauthKeyPair.class, cols); + } + + // optional vospace + log.debug("vosSchema: " + vosSchema); + if (vosSchema != null) { + tableMap.put(Node.class, vosSchema + "." + Node.class.getSimpleName()); + tableMap.put(DeletedNodeEvent.class, vosSchema + "." + DeletedNodeEvent.class.getSimpleName()); + + cols = new String[] { + "parentID", + "name", + "nodeType", + "ownerID", + "isPublic", + "isLocked", + "readOnlyGroups", + "readWriteGroups", + "properties", + "inheritPermissions", + "bytesUsed", + "busy", + "storageID", + "storageBucket", + "target", + "lastModified", + "metaChecksum", + "id" // last column is always PK + }; + this.columnMap.put(Node.class, cols); + + cols = new String[] { + "nodeType", + "storageID", + "lastModified", + "metaChecksum", + "id" // last column is always PK + }; + this.columnMap.put(DeletedNodeEvent.class, cols); + } } private static class ClassComp implements Comparator { @@ -218,9 +296,28 @@ public String getCurrentTimeSQL() { return "SELECT now()"; } - // test usage - String getTable(Class c) { - return tableMap.get(c); + public String getTable(Class c) { + Class targetClass = c; + String ret = tableMap.get(targetClass); + if (ret == null) { + // enable finding a common table that stores subclass instances + targetClass = targetClass.getSuperclass(); + ret = tableMap.get(targetClass); + } + log.debug("table: " + c.getSimpleName() + " -> " + targetClass.getSimpleName() + " -> " + ret); + return ret; + } + + private String[] getColumns(Class c) { + Class targetClass = c; + String[] ret = columnMap.get(targetClass); + if (ret == null) { + // enable finding a common table that stores subclass instances + targetClass = targetClass.getSuperclass(); + ret = columnMap.get(targetClass); + } + log.debug("columns: " + c.getSimpleName() + " -> " + targetClass.getSimpleName() + " -> " + (ret == null ? null : ret.length)); + return ret; } public EntityGet getEntityGet(Class c) { @@ -234,11 +331,18 @@ public EntityGet getEntityGet(Class c, boolean forUpdate) { if (StorageSite.class.equals(c)) { return new StorageSiteGet(forUpdate); } + if (PreauthKeyPair.class.equals(c)) { + return new KeyPairGet(forUpdate); + } + if (Node.class.equals(c)) { + return new NodeGet(forUpdate); + } if (forUpdate) { throw new UnsupportedOperationException("entity-get + forUpdate: " + c.getSimpleName()); } + // raw events are never locked for update if (DeletedArtifactEvent.class.equals(c)) { return new DeletedArtifactEventGet(); } @@ -251,29 +355,58 @@ public EntityGet getEntityGet(Class c, boolean forUpdate) { if (ObsoleteStorageLocation.class.equals(c)) { return new ObsoleteStorageLocationGet(); } + + if (DeletedNodeEvent.class.equals(c)) { + //return new DeletedNodeGet(); + } + if (HarvestState.class.equals(c)) { return new HarvestStateGet(); } + + + throw new UnsupportedOperationException("entity-get: " + c.getName()); } + public NodeCount getNodeCount() { + return new NodeCount(); + } + + public class NodeCount { + private UUID id; + + public void setID(UUID id) { + this.id = id; + } + + public int execute(JdbcTemplate jdbc) { + StringBuilder sb = new StringBuilder(); + sb.append("SELECT count(*) FROM ").append(getTable(Node.class)); + sb.append(" WHERE parentID = '").append(id.toString()).append("'"); + String sql = sb.toString(); + log.debug("NodeCount: " + sql); + int ret = jdbc.queryForObject(sql, Integer.class); + return ret; + } + } + public EntityIteratorQuery getEntityIteratorQuery(Class c) { if (Artifact.class.equals(c)) { return new ArtifactIteratorQuery(); } - throw new UnsupportedOperationException("entity-list: " + c.getName()); + if (Node.class.equals(c)) { + return new NodeIteratorQuery(); + } + throw new UnsupportedOperationException("entity-iterator: " + c.getName()); } public EntityList getEntityList(Class c) { if (StorageSite.class.equals(c)) { return new StorageSiteList(); } - throw new UnsupportedOperationException("entity-list: " + c.getName()); - } - - public EntityLock getEntityLock(Class c) { - if (Artifact.class.equals(c)) { - return new EntityLockImpl(c); + if (PreauthKeyPair.class.equals(c)) { + return new KeyPairList(); } throw new UnsupportedOperationException("entity-list: " + c.getName()); } @@ -305,6 +438,15 @@ public EntityPut getEntityPut(Class c, boolean update) { if (HarvestState.class.equals(c)) { return new HarvestStatePut(update); } + if (PreauthKeyPair.class.equals(c)) { + return new KeyPairPut(update); + } + if (Node.class.isAssignableFrom(c)) { + return new NodePut(update); + } + if (DeletedNodeEvent.class.equals(c)) { + //return new DeletedNodePut(update); + } throw new UnsupportedOperationException("entity-put: " + c.getName()); } @@ -312,41 +454,6 @@ public EntityDelete getEntityDelete(Class c) { return new EntityDeleteImpl(c); } - private class EntityLockImpl implements EntityLock { - private final Calendar utc = Calendar.getInstance(DateUtil.UTC); - private final Class entityClass; - private UUID id; - - EntityLockImpl(Class entityClass) { - this.entityClass = entityClass; - } - - @Override - public void setID(UUID id) { - this.id = id; - } - - @Override - public void execute(JdbcTemplate jdbc) throws EntityNotFoundException { - int n = jdbc.update(this); - if (n == 0) { - throw new EntityNotFoundException("not found: " + id); - } - } - - @Override - public PreparedStatement createPreparedStatement(Connection conn) throws SQLException { - String sql = getLockSQL(entityClass); - log.debug("EntityLockImpl: " + sql); - PreparedStatement prep = conn.prepareStatement(sql); - int col = 1; - prep.setObject(col++, id); - prep.setObject(col++, id); - - return prep; - } - } - private class SkeletonGet implements EntityGet { private UUID id; private final Class entityClass; @@ -428,7 +535,7 @@ public void setLocation(StorageLocation loc) { @Override public ObsoleteStorageLocation execute(JdbcTemplate jdbc) { - return (ObsoleteStorageLocation) jdbc.query(this, new ObsoleteStorageLocationExtractor()); + return jdbc.query(this, new ObsoleteStorageLocationExtractor()); } @Override @@ -443,7 +550,7 @@ public PreparedStatement createPreparedStatement(Connection conn) throws SQLExce String col = getKeyColumn(ObsoleteStorageLocation.class, true); sb.append(col).append(" = ?"); } else { - String[] cols = columnMap.get(ObsoleteStorageLocation.class); + String[] cols = getColumns(ObsoleteStorageLocation.class); sb.append(cols[0]).append(" = ?"); sb.append(" AND "); if (loc.storageBucket != null) { @@ -500,7 +607,7 @@ public PreparedStatement createPreparedStatement(Connection conn) throws SQLExce String col = getKeyColumn(HarvestState.class, true); sb.append(col).append(" = ?"); } else { - String[] cols = columnMap.get(HarvestState.class); + String[] cols = getColumns(HarvestState.class); sb.append(cols[0]).append(" = ?"); sb.append(" AND "); sb.append(cols[1]).append(" = ?"); @@ -571,10 +678,15 @@ public PreparedStatement createPreparedStatement(Connection conn) throws SQLExce class ArtifactIteratorQuery implements EntityIteratorQuery { private Boolean storageLocationRequired; - private String prefix; + private String storageBucket; + private UUID siteID; - private String whereClause; + private String uriBucket; + private Namespace namespace; + private Date minLastModified; private boolean ordered; + + private final Calendar utc = Calendar.getInstance(DateUtil.UTC); public ArtifactIteratorQuery() { } @@ -590,22 +702,30 @@ public void setStorageLocationRequired(Boolean slr) { this.storageLocationRequired = slr; } - public void setPrefix(String prefix) { + public void setStorageBucket(String prefix) { if (StringUtil.hasText(prefix)) { - this.prefix = prefix.trim(); + this.storageBucket = prefix.trim(); } else { - this.prefix = null; + this.storageBucket = null; } } - public void setCriteria(String whereClause) { - if (StringUtil.hasText(whereClause)) { - this.whereClause = whereClause.trim(); + public void setUriBucket(String uriBucket) { + if (StringUtil.hasText(uriBucket)) { + this.uriBucket = uriBucket.trim(); } else { - this.whereClause = null; + uriBucket = null; } } + public void setNamespace(Namespace namespace) { + this.namespace = namespace; + } + + public void setMinLastModified(Date minLastModified) { + this.minLastModified = minLastModified; + } + public void setOrderedOutput(boolean ordered) { this.ordered = ordered; } @@ -616,13 +736,14 @@ public void setSiteID(UUID siteID) { @Override public ResourceIterator query(DataSource ds) { - - StringBuilder sb = getSelectFromSQL(Artifact.class, false); - sb.append(" WHERE"); - + StringBuilder sb = new StringBuilder(); + boolean where = false; if (storageLocationRequired != null && storageLocationRequired) { // ArtifactDAO.storedIterator - if (StringUtil.hasText(prefix)) { + sb.append(" WHERE"); + where = true; + if (storageBucket != null) { + sb.append(" storageLocation_storageBucket LIKE ? AND"); } sb.append(" storageLocation_storageID IS NOT NULL"); @@ -636,42 +757,64 @@ public ResourceIterator query(DataSource ds) { } } else if (storageLocationRequired != null && !storageLocationRequired) { // ArtifactDAO.unstoredIterator - if (StringUtil.hasText(prefix)) { - sb.append(" uriBucket LIKE ? AND"); - } + sb.append(" WHERE"); + where = true; sb.append(" storageLocation_storageID IS NULL"); - if (ordered) { - sb.append(" ORDER BY uri"); - } - } else if (siteID != null) { - if (prefix != null && siteID != null) { - sb.append(" uriBucket LIKE ? AND ").append("siteLocations @> ARRAY[?]"); + } + + // optional params: + // uriBucket + // siteID + // namespace + // minLastModified + if (uriBucket != null) { + if (where) { + sb.append(" AND"); } else { - sb.append(" siteLocations @> ARRAY[?]"); + sb.append(" WHERE"); + where = true; } - if (ordered) { - sb.append(" ORDER BY uri"); + sb.append(" uriBucket LIKE ?"); + } + if (siteID != null) { + // ArtifactDAO.iterator(UUID, ...) + if (where) { + sb.append(" AND"); + } else { + sb.append(" WHERE"); + where = true; } - } else if (whereClause != null) { - if (prefix != null && whereClause != null) { - sb.append(" uriBucket LIKE ? AND ( ").append(whereClause).append(" )"); + sb.append(" siteLocations @> ARRAY[?]"); + } + if (namespace != null) { + if (where) { + sb.append(" AND"); } else { - sb.append(" (").append(whereClause).append(" )"); + sb.append(" WHERE"); + where = true; } - if (ordered) { - sb.append(" ORDER BY uri"); + sb.append(" uri LIKE ?"); + } + if (minLastModified != null) { + if (where) { + sb.append(" AND"); + } else { + sb.append(" WHERE"); + where = true; } - } else if (prefix != null) { - sb.append(" uriBucket LIKE ?"); - if (ordered) { + sb.append(" lastModified >= ?"); + } + if (ordered && !(storageLocationRequired != null && storageLocationRequired)) { + if (minLastModified != null) { + sb.append(" ORDER BY lastModified ASC"); + } else { sb.append(" ORDER BY uri"); } - } else { - // trim off " WHERE" - sb.delete(sb.length() - 6, sb.length()); } - String sql = sb.toString(); + StringBuilder select = getSelectFromSQL(Artifact.class, false); + select.append(sb.toString()); + String sql = select.toString(); log.debug("sql: " + sql); try { @@ -683,15 +826,29 @@ public ResourceIterator query(DataSource ds) { ps.setFetchSize(1000); ps.setFetchDirection(ResultSet.FETCH_FORWARD); int col = 1; - if (prefix != null) { - String val = prefix + "%"; + if (storageBucket != null) { + String val = storageBucket + "%"; + log.debug("bucket prefix: " + val); + ps.setString(col++, val); + } else if (uriBucket != null) { + String val = uriBucket + "%"; log.debug("bucket prefix: " + val); ps.setString(col++, val); } if (siteID != null) { log.debug("siteID: " + siteID); ps.setObject(col++, siteID); + } + if (namespace != null) { + String val = namespace.getNamespace() + "%"; + log.debug("namespace prefix: " + val); + ps.setString(col++, val); + } + if (minLastModified != null) { + log.debug("min lastModified: " + minLastModified); + ps.setTimestamp(col++, new Timestamp(minLastModified.getTime()), utc); } + ResultSet rs = ps.executeQuery(); return new ArtifactResultSetIterator(con, rs); @@ -704,7 +861,6 @@ public ResourceIterator query(DataSource ds) { private class StorageSiteGet implements EntityGet { private UUID id; - private URI uri; private final boolean forUpdate; public StorageSiteGet(boolean forUpdate) { @@ -731,6 +887,9 @@ public PreparedStatement createPreparedStatement(Connection conn) throws SQLExce } else { throw new IllegalStateException("primary key is null"); } + if (forUpdate) { + sb.append(" FOR UPDATE"); + } String sql = sb.toString(); log.debug("StorageSiteGet: " + sql); PreparedStatement prep = conn.prepareStatement(sql); @@ -759,6 +918,228 @@ public PreparedStatement createPreparedStatement(Connection conn) throws SQLExce } } + class KeyPairGet implements EntityGet { + private UUID id; + private String name; + private final boolean forUpdate; + + public KeyPairGet(boolean forUpdate) { + this.forUpdate = forUpdate; + } + + @Override + public void setID(UUID id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public PreauthKeyPair execute(JdbcTemplate jdbc) { + return (PreauthKeyPair) jdbc.query(this, new KeyPairExtractor()); + } + + @Override + public PreparedStatement createPreparedStatement(Connection conn) throws SQLException { + StringBuilder sb = getSelectFromSQL(PreauthKeyPair.class, false); + sb.append(" WHERE "); + if (id != null) { + String col = getKeyColumn(PreauthKeyPair.class, true); + sb.append(col).append(" = ?"); + } else if (name != null) { + sb.append("name = ?"); + } else { + throw new IllegalStateException("primary key and name are both null"); + } + if (forUpdate) { + sb.append(" FOR UPDATE"); + } + String sql = sb.toString(); + log.debug("KeyPairGet: " + sql); + PreparedStatement prep = conn.prepareStatement(sql); + if (id != null) { + prep.setObject(1, id); + } else { + prep.setString(1, name); + } + return prep; + } + } + + private class KeyPairList implements EntityList { + + @Override + public Set query(JdbcTemplate jdbc) { + List keys = (List) jdbc.query(this, new KeyPairRowMapper()); + Set ret = new TreeSet<>(); + ret.addAll(keys); + return ret; + } + + @Override + public PreparedStatement createPreparedStatement(Connection conn) throws SQLException { + StringBuilder sb = getSelectFromSQL(PreauthKeyPair.class, false); + String sql = sb.toString(); + log.debug("KeyPairList: " + sql); + PreparedStatement prep = conn.prepareStatement(sql); + return prep; + } + } + + public class NodeGet implements EntityGet { + private UUID id; + private ContainerNode parent; + private String name; + private URI storageID; + private final boolean forUpdate; + + public NodeGet(boolean forUpdate) { + this.forUpdate = forUpdate; + } + + @Override + public void setID(UUID id) { + this.id = id; + } + + public void setPath(ContainerNode parent, String name) { + this.parent = parent; + this.name = name; + } + + public void setStorageID(URI storageID) { + this.storageID = storageID; + } + + @Override + public Node execute(JdbcTemplate jdbc) { + return (Node) jdbc.query(this, new NodeExtractor(parent)); + } + + @Override + public PreparedStatement createPreparedStatement(Connection conn) throws SQLException { + StringBuilder sb = getSelectFromSQL(Node.class, false); + sb.append(" WHERE "); + if (id != null) { + String col = getKeyColumn(Node.class, true); + sb.append(col).append(" = ?"); + } else if (storageID != null) { + String col = "storageID"; + sb.append(col).append(" = ?"); + } else if (parent != null && name != null) { + String pidCol = "parentID"; + String nameCol = "name"; + sb.append(pidCol).append(" = ? and ").append(nameCol).append(" = ?"); + } else { + throw new IllegalStateException("primary key is null"); + } + if (forUpdate) { + sb.append(" FOR UPDATE"); + } + String sql = sb.toString(); + log.debug("Node: " + sql); + PreparedStatement prep = conn.prepareStatement(sql); + if (id != null) { + prep.setObject(1, id); + } else if (storageID != null) { + prep.setObject(1, storageID.toASCIIString()); + } else { + prep.setObject(1, parent.getID()); + prep.setObject(2, name); + } + + return prep; + } + } + + public class NodeIteratorQuery implements EntityIteratorQuery { + private ContainerNode parent; + private String start; + private Integer limit; + + public NodeIteratorQuery() { + } + + public void setParent(ContainerNode parent) { + this.parent = parent; + } + + public void setStart(String start) { + this.start = start; + } + + public void setLimit(Integer limit) { + this.limit = limit; + } + + @Override + public ResourceIterator query(DataSource ds) { + if (parent == null) { + throw new RuntimeException("BUG: cannot query for children with parent==null"); + } + + StringBuilder sb = getSelectFromSQL(Node.class, false); + sb.append(" WHERE parentID = ?"); + if (start != null) { + sb.append(" AND ? <= name"); + } + sb.append(" ORDER BY name ASC"); + if (limit != null) { + sb.append(" LIMIT ?"); + } + + String sql = sb.toString(); + log.debug("sql: " + sql); + + try { + Connection con = ds.getConnection(); + log.debug("NodeIteratorQuery: setAutoCommit(false)"); + con.setAutoCommit(false); + // defaults for options: ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY + PreparedStatement ps = con.prepareStatement(sql); + ps.setFetchSize(1000); + ps.setFetchDirection(ResultSet.FETCH_FORWARD); + int col = 1; + + ps.setObject(col++, parent.getID()); + log.debug("parentID = " + parent.getID()); + if (start != null) { + ps.setString(col++, start); + log.debug("start = " + start); + } + if (limit != null) { + ps.setInt(col++, limit); + log.debug("limit = " + limit); + } + ResultSet rs = ps.executeQuery(); + + return new NodeResultSetIterator(con, rs, parent); + } catch (SQLException ex) { + throw new RuntimeException("BUG: artifact iterator query failed", ex); + } + + } + } + + private void safeSetBoolean(PreparedStatement prep, int col, Boolean value) throws SQLException { + log.debug("safeSetBoolean: " + col + " " + value); + if (value != null) { + prep.setBoolean(col, value); + } else { + prep.setNull(col, Types.BOOLEAN); + } + } + + private void safeSetString(PreparedStatement prep, int col, URI value) throws SQLException { + String v = null; + if (value != null) { + v = value.toASCIIString(); + } + safeSetString(prep, col, v); + } + private void safeSetString(PreparedStatement prep, int col, String value) throws SQLException { log.debug("safeSetString: " + col + " " + value); if (value != null) { @@ -786,6 +1167,24 @@ private void safeSetTimestamp(PreparedStatement prep, int col, Timestamp value, } } + private void safeSetArray(PreparedStatement prep, int col, Set values) throws SQLException { + + if (values != null && !values.isEmpty()) { + log.debug("safeSetArray: " + col + " " + values.size()); + String[] array1d = new String[values.size()]; + int i = 0; + for (GroupURI u : values) { + array1d[i] = u.getURI().toASCIIString(); + i++; + } + java.sql.Array arr = prep.getConnection().createArrayOf("text", array1d); + prep.setObject(col, arr); + } else { + log.debug("safeSetArray: " + col + " null"); + prep.setNull(col, Types.ARRAY); + } + } + private void safeSetArray(PreparedStatement prep, int col, UUID[] value) throws SQLException { if (value != null) { @@ -798,6 +1197,25 @@ private void safeSetArray(PreparedStatement prep, int col, UUID[] value) throws } } + private void safeSetProps(PreparedStatement prep, int col, Set values) throws SQLException { + + if (values != null && !values.isEmpty()) { + log.debug("safeSetProps: " + col + " " + values.size()); + String[][] array2d = new String[values.size()][2]; // TODO: w-h or h-w?? + int i = 0; + for (NodeProperty np : values) { + array2d[i][0] = np.getKey().toASCIIString(); + array2d[i][1] = np.getValue(); + i++; + } + java.sql.Array arr = prep.getConnection().createArrayOf("text", array2d); + prep.setObject(col, arr); + } else { + log.debug("safeSetProps: " + col + " = null"); + prep.setNull(col, Types.ARRAY); + } + } + private class ArtifactPut implements EntityPut { private final Calendar utc = Calendar.getInstance(DateUtil.UTC); private final boolean update; @@ -852,8 +1270,8 @@ public PreparedStatement createPreparedStatement(Connection conn) throws SQLExce safeSetString(prep, col++, value.storageLocation.getStorageID().toASCIIString()); safeSetString(prep, col++, value.storageLocation.storageBucket); } else { - safeSetString(prep, col++, null); // storageLocation.storageID - safeSetString(prep, col++, null); // storageLocation.storageBucket + safeSetString(prep, col++, (URI) null); // storageLocation.storageID + safeSetString(prep, col++, (URI) null); // storageLocation.storageBucket } safeSetTimestamp(prep, col++, new Timestamp(value.getLastModified().getTime()), utc); @@ -948,7 +1366,7 @@ public PreparedStatement createPreparedStatement(Connection conn) throws SQLExce if (value.getLocation().storageBucket != null) { safeSetString(prep, col++, value.getLocation().storageBucket); } else { - safeSetString(prep, col++, null); + safeSetString(prep, col++, (String) null); } prep.setTimestamp(col++, new Timestamp(value.getLastModified().getTime()), utc); @@ -1004,6 +1422,11 @@ public PreparedStatement createPreparedStatement(Connection conn) throws SQLExce } else { prep.setNull(col++, Types.OTHER); } + if (value.instanceID != null) { + prep.setObject(col++, value.instanceID); + } else { + prep.setNull(col++, Types.OTHER); + } prep.setTimestamp(col++, new Timestamp(value.getLastModified().getTime()), utc); prep.setString(col++, value.getMetaChecksum().toASCIIString()); @@ -1014,6 +1437,149 @@ public PreparedStatement createPreparedStatement(Connection conn) throws SQLExce } + private class KeyPairPut implements EntityPut { + private final Calendar utc = Calendar.getInstance(DateUtil.UTC); + private final boolean update; + private PreauthKeyPair value; + + KeyPairPut(boolean update) { + this.update = update; + } + + @Override + public void setValue(PreauthKeyPair value) { + this.value = value; + } + + @Override + public void execute(JdbcTemplate jdbc) { + jdbc.update(this); + } + + @Override + public PreparedStatement createPreparedStatement(Connection conn) throws SQLException { + String sql = null; + if (update) { + sql = getUpdateSQL(PreauthKeyPair.class); + + } else { + sql = getInsertSQL(PreauthKeyPair.class); + } + log.debug("KeyPairPut: " + sql); + PreparedStatement prep = conn.prepareStatement(sql); + int col = 1; + + prep.setString(col++, value.getName()); + prep.setBytes(col++, value.getPublicKey()); + prep.setBytes(col++, value.getPrivateKey()); + + prep.setTimestamp(col++, new Timestamp(value.getLastModified().getTime()), utc); + prep.setString(col++, value.getMetaChecksum().toASCIIString()); + prep.setObject(col++, value.getID()); + + return prep; + } + } + + private class NodePut implements EntityPut { + private final Calendar utc = Calendar.getInstance(DateUtil.UTC); + private final boolean update; + private Node value; + + NodePut(boolean update) { + this.update = update; + } + + @Override + public void setValue(Node value) { + this.value = value; + } + + @Override + public void execute(JdbcTemplate jdbc) { + jdbc.update(this); + } + + @Override + public PreparedStatement createPreparedStatement(Connection conn) throws SQLException { + String sql = null; + if (update) { + sql = getUpdateSQL(Node.class); + + } else { + sql = getInsertSQL(Node.class); + } + log.debug("NodePut: " + sql); + PreparedStatement prep = conn.prepareStatement(sql); + int col = 1; + + if (value.parentID == null) { + throw new RuntimeException("BUG: cannot put Node without a parentID: " + value); + } + prep.setObject(col++, value.parentID); + prep.setString(col++, value.getName()); + prep.setString(col++, value.getClass().getSimpleName().substring(0, 1)); // HACK + if (value.ownerID == null) { + throw new RuntimeException("BUG: cannot put Node without an ownerID: " + value); + } + prep.setString(col++, value.ownerID.toString()); + safeSetBoolean(prep, col++, value.isPublic); + safeSetBoolean(prep, col++, value.isLocked); + safeSetArray(prep, col++, value.getReadOnlyGroup()); + safeSetArray(prep, col++, value.getReadWriteGroup()); + safeSetProps(prep, col++, value.getProperties()); + + // ContainerNode-specific fields + if (value instanceof ContainerNode) { + ContainerNode cn = (ContainerNode) value; + safeSetBoolean(prep, col++, cn.inheritPermissions); + } else { + safeSetBoolean(prep, col++, null); + } + + // bytesUsed is in between CN and DN specific columns + if (value instanceof ContainerNode) { + ContainerNode cn = (ContainerNode) value; + safeSetLong(prep, col++, cn.bytesUsed); + } else if (value instanceof DataNode) { + DataNode dn = (DataNode) value; + safeSetLong(prep, col++, dn.bytesUsed); + } else { + safeSetLong(prep, col++, null); + } + + // DataNode specific fields + if (value instanceof DataNode) { + DataNode dn = (DataNode) value; + if (dn.storageID == null) { + throw new RuntimeException("BUG: cannot put DataNode without a storageID: " + value); + } + safeSetBoolean(prep, col++, dn.busy); + safeSetString(prep, col++, dn.storageID); + safeSetString(prep, col++, InventoryUtil.computeBucket(dn.storageID, 5)); // same as Artifact + } else { + safeSetBoolean(prep, col++, null); + safeSetString(prep, col++, (URI) null); + safeSetString(prep, col++, (URI) null); + } + + // LinkNode-specific fields + if (value instanceof LinkNode) { + LinkNode ln = (LinkNode) value; + prep.setString(col++, ln.getTarget().toASCIIString()); + } else { + safeSetString(prep, col++, (URI) null); + } + + // Entity fields + prep.setTimestamp(col++, new Timestamp(value.getLastModified().getTime()), utc); + prep.setString(col++, value.getMetaChecksum().toASCIIString()); + prep.setObject(col++, value.getID()); + + return prep; + } + } + private class EntityEventPut implements EntityPut { private final Calendar utc = Calendar.getInstance(DateUtil.UTC); private final boolean update; @@ -1083,11 +1649,13 @@ public PreparedStatement createPreparedStatement(Connection conn) throws SQLExce } private StringBuilder getSelectFromSQL(Class c, boolean entityCols) { - String tab = tableMap.get(c); - String[] cols = columnMap.get(c); + String tab = getTable(c); + Class targetClass = c; if (entityCols) { - cols = columnMap.get(Entity.class); + targetClass = Entity.class; } + String[] cols = getColumns(targetClass); + if (tab == null || cols == null) { throw new IllegalArgumentException("BUG: no table/columns for class " + c.getName()); } @@ -1106,21 +1674,13 @@ private StringBuilder getSelectFromSQL(Class c, boolean entityCols) { return sb; } - private String getLockSQL(Class c) { - StringBuilder sb = new StringBuilder(); - String pk = getKeyColumn(c, true); - sb.append("UPDATE "); - sb.append(tableMap.get(c)); - sb.append(" SET ").append(pk).append(" = ? WHERE ").append(pk).append(" = ?"); - return sb.toString(); - } - private String getUpdateSQL(Class c) { StringBuilder sb = new StringBuilder(); + String tab = getTable(c); sb.append("UPDATE "); - sb.append(tableMap.get(c)); + sb.append(tab); sb.append(" SET "); - String[] cols = columnMap.get(c); + String[] cols = getColumns(c); for (int i = 0; i < cols.length - 1; i++) { // PK is last if (i > 0) { sb.append(","); @@ -1137,10 +1697,11 @@ private String getUpdateSQL(Class c) { private String getInsertSQL(Class c) { StringBuilder sb = new StringBuilder(); + String tab = getTable(c); sb.append("INSERT INTO "); - sb.append(tableMap.get(c)); + sb.append(tab); sb.append(" ("); - String[] cols = columnMap.get(c); + String[] cols = getColumns(c); for (int i = 0; i < cols.length; i++) { if (i > 0) { sb.append(","); @@ -1161,14 +1722,15 @@ private String getInsertSQL(Class c) { private String getDeleteSQL(Class c) { StringBuilder sb = new StringBuilder(); + String tab = getTable(c); sb.append("DELETE FROM "); - sb.append(tableMap.get(c)); + sb.append(tab); sb.append(" WHERE id = ?"); return sb.toString(); } private String getKeyColumn(Class c, boolean pk) { - String[] cols = columnMap.get(c); + String[] cols = getColumns(c); if (cols == null) { throw new IllegalArgumentException("BUG: no table/columns for class " + c.getName()); } @@ -1226,6 +1788,7 @@ private class ArtifactResultSetIterator implements ResourceIterator { private final Connection con; private final ResultSet rs; boolean hasRow; + boolean closeWhenDone = true; // return to pool | assume close suppressed for static connections ArtifactResultSetIterator(Connection con, ResultSet rs) throws SQLException { this.con = con; @@ -1234,7 +1797,14 @@ private class ArtifactResultSetIterator implements ResourceIterator { log.debug("ArtifactResultSetIterator: " + super.toString() + " ctor " + hasRow); if (!hasRow) { log.debug("ArtifactResultSetIterator: " + super.toString() + " ctor - setAutoCommit(true)"); - con.setAutoCommit(true); + try { + con.setAutoCommit(true); // commit txn + if (closeWhenDone) { + con.close(); // return to pool + } + } catch (SQLException unexpected) { + log.error("Connection.setAutoCommit(true) & close() failed", unexpected); + } } } @@ -1243,10 +1813,13 @@ public void close() throws IOException { if (hasRow) { log.debug("ArtifactResultSetIterator: " + super.toString() + " ctor - setAutoCommit(true)"); try { - con.setAutoCommit(true); - hasRow = false; - } catch (SQLException ex) { - throw new RuntimeException("BUG: artifact list query failed during close()", ex); + con.setAutoCommit(true); // commit txn + if (closeWhenDone) { + con.close(); // return to pool + } + hasRow = false; + } catch (SQLException unexpected) { + log.error("Connection.setAutoCommit(true) & close() failed", unexpected); } } } @@ -1263,17 +1836,27 @@ public Artifact next() { hasRow = rs.next(); if (!hasRow) { log.debug("ArtifactResultSetIterator: " + super.toString() + " DONE - setAutoCommit(true)"); - con.setAutoCommit(true); + try { + con.setAutoCommit(true); // commit txn + if (closeWhenDone) { + con.close(); // return to pool + } + } catch (SQLException unexpected) { + log.error("Connection.setAutoCommit(true) & close() failed", unexpected); + } } return ret; } catch (Exception ex) { if (hasRow) { log.debug("ArtifactResultSetIterator: " + super.toString() + " ResultSet.next() FAILED - setAutoCommit(true)"); try { - close(); + con.setAutoCommit(true); // commit txn + if (closeWhenDone) { + con.close(); // return to pool + } hasRow = false; - } catch (IOException unexpected) { - log.debug("BUG: unexpected IOException from close", unexpected); + } catch (SQLException unexpected) { + log.error("Connection.setAutoCommit(true) & close() failed", unexpected); } } throw new RuntimeException("BUG: artifact list query failed while iterating", ex); @@ -1281,6 +1864,83 @@ public Artifact next() { } } + private class NodeResultSetIterator implements ResourceIterator { + final Calendar utc = Calendar.getInstance(DateUtil.UTC); + private final Connection con; + private final ResultSet rs; + boolean hasRow; + + ContainerNode parent; + + public NodeResultSetIterator(Connection con, ResultSet rs, ContainerNode parent) throws SQLException { + this.con = con; + this.rs = rs; + this.parent = parent; + hasRow = rs.next(); + log.debug("NodeResultSetIterator: " + super.toString() + " ctor " + hasRow); + if (!hasRow) { + log.debug("NodeResultSetIterator: " + super.toString() + " ctor - setAutoCommit(true)"); + + try { + con.setAutoCommit(true); // commit txn + con.close(); // return to pool + } catch (SQLException ignore) { + log.error("Connection.setAutoCommit(true) & close() failed", ignore); + } + } + } + + @Override + public void close() throws IOException { + if (hasRow) { + log.debug("NodeResultSetIterator: " + super.toString() + " close - setAutoCommit(true)"); + try { + con.setAutoCommit(true); // commit txn + con.close(); // return to pool + hasRow = false; + } catch (SQLException ignore) { + log.error("Connection.setAutoCommit(true) & close() failed", ignore); + } + } + } + + @Override + public boolean hasNext() { + return hasRow; + } + + @Override + public Node next() { + try { + Node ret = mapRowToNode(rs, utc, parent); + hasRow = rs.next(); + if (!hasRow) { + log.debug("NodeResultSetIterator: " + super.toString() + " DONE - setAutoCommit(true)"); + try { + con.setAutoCommit(true); // commit txn + con.close(); // return to pool + hasRow = false; + } catch (SQLException ignore) { + log.error("Connection.setAutoCommit(true) & close() failed", ignore); + } + } + return ret; + } catch (Exception ex) { + if (hasRow) { + log.debug("NodeResultSetIterator: " + super.toString() + " ResultSet.next() FAILED - setAutoCommit(true)"); + try { + con.setAutoCommit(true); // commit txn + con.close(); // return to pool + hasRow = false; + } catch (SQLException ignore) { + log.error("Connection.setAutoCommit(true) & close() failed", ignore); + } + } + throw new RuntimeException("BUG: node list query failed while iterating", ex); + } + } + } + private Artifact mapRowToArtifact(ResultSet rs, Calendar utc) throws SQLException { int col = 1; final URI uri = Util.getURI(rs, col++); @@ -1314,12 +1974,70 @@ private Artifact mapRowToArtifact(ResultSet rs, Calendar utc) throws SQLExceptio return a; } - private class ObsoleteStorageLocationExtractor implements ResultSetExtractor { + private Node mapRowToNode(ResultSet rs, Calendar utc, ContainerNode parent) throws SQLException { + int col = 1; + final UUID parentID = Util.getUUID(rs, col++); + final String name = rs.getString(col++); + final String nodeType = rs.getString(col++); + final String ownerID = rs.getString(col++); + final Boolean isPublic = Util.getBoolean(rs, col++); + final Boolean isLocked = Util.getBoolean(rs, col++); + final String rawROG = rs.getString(col++); + final String rawRWG = rs.getString(col++); + final String rawProps = rs.getString(col++); + final Boolean inheritPermissions = Util.getBoolean(rs, col++); + final Long bytesUsed = Util.getLong(rs, col++); + final Boolean busy = Util.getBoolean(rs, col++); + final URI storageID = Util.getURI(rs, col++); + final String storageBucket = rs.getString(col++); + // TODO: return this somehow or just use in DataNode iterator? + final URI linkTarget = Util.getURI(rs, col++); + final Date lastModified = Util.getDate(rs, col++, utc); + final URI metaChecksum = Util.getURI(rs, col++); + final UUID id = Util.getUUID(rs, col++); + + Node ret; + if (nodeType.equals("C")) { + ContainerNode cn = new ContainerNode(id, name); + cn.inheritPermissions = inheritPermissions; + cn.bytesUsed = bytesUsed; + ret = cn; + } else if (nodeType.equals("D")) { + DataNode dn = new DataNode(id, name, storageID); + dn.bytesUsed = bytesUsed; + ret = dn; + } else if (nodeType.equals("L")) { + ret = new LinkNode(id, name, linkTarget); + } else { + throw new RuntimeException("BUG: unexpected node type code: " + nodeType); + } + ret.parentID = parentID; + ret.ownerID = ownerID; + ret.isPublic = isPublic; + ret.isLocked = isLocked; + + if (rawROG != null) { + Util.parseArrayGroupURI(rawROG, ret.getReadOnlyGroup()); + } + if (rawRWG != null) { + Util.parseArrayGroupURI(rawRWG, ret.getReadWriteGroup()); + } + if (rawProps != null) { + Util.parseArrayProps(rawProps, ret.getProperties()); + } + + InventoryUtil.assignLastModified(ret, lastModified); + InventoryUtil.assignMetaChecksum(ret, metaChecksum); + + return ret; + } + + private class ObsoleteStorageLocationExtractor implements ResultSetExtractor { final Calendar utc = Calendar.getInstance(DateUtil.UTC); @Override - public Object extractData(ResultSet rs) throws SQLException, DataAccessException { + public ObsoleteStorageLocation extractData(ResultSet rs) throws SQLException, DataAccessException { if (!rs.next()) { return null; } @@ -1332,7 +2050,7 @@ public Object extractData(ResultSet rs) throws SQLException, DataAccessException StorageLocation s = new StorageLocation(storLoc); s.storageBucket = storBucket; - Entity ret = new ObsoleteStorageLocation(id, s); + ObsoleteStorageLocation ret = new ObsoleteStorageLocation(id, s); InventoryUtil.assignLastModified(ret, lastModified); InventoryUtil.assignMetaChecksum(ret, metaChecksum); return ret; @@ -1352,6 +2070,7 @@ public HarvestState extractData(ResultSet rs) throws SQLException, DataAccessExc final URI resourecID = Util.getURI(rs, col++); final Date curLastModified = Util.getDate(rs, col++, utc); final UUID curID = Util.getUUID(rs, col++); + final UUID instanceID = Util.getUUID(rs, col++); final Date lastModified = Util.getDate(rs, col++, utc); final URI metaChecksum = Util.getURI(rs, col++); @@ -1360,6 +2079,7 @@ public HarvestState extractData(ResultSet rs) throws SQLException, DataAccessExc HarvestState ret = new HarvestState(id, name, resourecID); ret.curLastModified = curLastModified; ret.curID = curID; + ret.instanceID = instanceID; InventoryUtil.assignLastModified(ret, lastModified); InventoryUtil.assignMetaChecksum(ret, metaChecksum); return ret; @@ -1401,6 +2121,40 @@ public StorageSite extractData(ResultSet rs) throws SQLException, DataAccessExce } } + private class KeyPairRowMapper implements RowMapper { + Calendar utc = Calendar.getInstance(DateUtil.UTC); + + @Override + public PreauthKeyPair mapRow(ResultSet rs, int i) throws SQLException { + int col = 1; + final String name = rs.getString(col++); + final byte[] pub = rs.getBytes(col++); + final byte[] priv = rs.getBytes(col++); + + final Date lastModified = Util.getDate(rs, col++, utc); + final URI metaChecksum = Util.getURI(rs, col++); + final UUID id = Util.getUUID(rs, col++); + + PreauthKeyPair s = new PreauthKeyPair(id, name, pub, priv); + InventoryUtil.assignLastModified(s, lastModified); + InventoryUtil.assignMetaChecksum(s, metaChecksum); + return s; + } + } + + private class KeyPairExtractor implements ResultSetExtractor { + final Calendar utc = Calendar.getInstance(DateUtil.UTC); + + @Override + public PreauthKeyPair extractData(ResultSet rs) throws SQLException, DataAccessException { + if (!rs.next()) { + return null; + } + KeyPairRowMapper m = new KeyPairRowMapper(); + return m.mapRow(rs, 1); + } + } + private class DeletedArtifactEventExtractor implements ResultSetExtractor { final Calendar utc = Calendar.getInstance(DateUtil.UTC); @@ -1465,4 +2219,22 @@ public StorageLocationEvent extractData(ResultSet rs) throws SQLException, DataA return ret; } } + + private class NodeExtractor implements ResultSetExtractor { + private ContainerNode parent; + final Calendar utc = Calendar.getInstance(DateUtil.UTC); + + public NodeExtractor(ContainerNode parent) { + this.parent = parent; // optional + } + + @Override + public Node extractData(ResultSet rs) throws SQLException, DataAccessException { + if (!rs.next()) { + return null; + } + + return mapRowToNode(rs, utc, parent); + } + } } diff --git a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/Util.java b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/Util.java index 404841ff8..782cb8a6f 100644 --- a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/Util.java +++ b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/Util.java @@ -77,8 +77,12 @@ import java.sql.SQLException; import java.util.Calendar; import java.util.Date; +import java.util.Set; +import java.util.StringTokenizer; import java.util.UUID; import org.apache.log4j.Logger; +import org.opencadc.gms.GroupURI; +import org.opencadc.vospace.NodeProperty; /** * @@ -378,39 +382,116 @@ public static byte[] getByteArray(ResultSet rs, int col) throw new UnsupportedOperationException("converting " + o.getClass().getName() + " " + o + " to byte[]"); } - /* - * public static int[] getIntArray(ResultSet rs, int col) - * throws SQLException - * { - * Object o = rs.getObject(col); - * return toIntArray(o); - * } - * - * static int[] toIntArray(Object o) - * throws SQLException - * { - * if (o == null) - * return null; - * if (o instanceof Array) - * { - * Array a = (Array) o; - * o = a.getArray(); - * } - * if (o instanceof int[]) - * return (int[]) o; - * if (o instanceof byte[]) - * return CaomUtil.decodeIntArray((byte[]) o); - * if (o instanceof Integer[]) - * { - * Integer[] ia = (Integer[]) o; - * int[] ret = new int[ia.length]; - * for (int i=0; i dest) { + // postgresql 1D array: {a,"b,c"} + if (val == null || val.isEmpty()) { + return; + } + // GroupURI names can contain alphanumeric,comma,dash,dot,underscore,~ + // PG quotes them if comma is present (eg in the group name) + char delim = '"'; + int i = 0; + int j = val.indexOf(delim); + while (j != -1) { + String token = val.substring(i, j); + //log.warn("token: " + i + "," + j + " " + token); + i = j + 1; + j = val.indexOf(delim, i); + + handleToken(token, dest); + } + String token = val.substring(i); + //log.warn("token: " + i + " " + token); + handleToken(token, dest); + } + private static void handleToken(String token, Set dest) { + if (token.startsWith("ivo://")) { + dest.add(new GroupURI(URI.create(token))); + } else { + StringTokenizer st = new StringTokenizer(token, "{,}"); + while (st.hasMoreTokens()) { + String s = st.nextToken(); + dest.add(new GroupURI(URI.create(s))); + } + } + } + + // fills the dest set + public static void parseArrayProps(String val, Set dest) { + // postgresql 2D array: {{a,b},{c,d}} + if (val == null || val.isEmpty()) { + return; + } + char open = '{'; + char close = '}'; + char quote = '"'; + int i = val.indexOf(open); + int j = val.lastIndexOf(close); + if (j > i) { + val = val.substring(i + 1, j); + } + i = val.indexOf(open); + j = val.indexOf(close, i + 1); + int k = 0; + while (i != -1 && j != -1 && k++ < 20) { + String t1 = val.substring(i + 1, j); + //log.warn("\tt1: " + i + "," + j + " " + t1); + handleProp(t1, dest); + + if (i != -1 && j > 0) { + i = val.indexOf(open, j); + j = val.indexOf(close, i + 1); + // look ahead for quotes + int q = val.indexOf(quote, i); + //log.warn("i=" + i + " j=" + j + " q=" + q); + if (q != -1 && q < j) { + int cq = val.indexOf(quote, q + 1); + j = val.indexOf(close, cq); + //log.warn("\tcq=" + cq + " j=" + j); + } + } + } + } + + private static void handleProp(String token, Set dest) { + int q = token.indexOf('"'); + int cq = -1; + if (q == -1) { + q = Integer.MAX_VALUE; + } else { + cq = token.indexOf('"', q + 1); + } + int c = token.indexOf(','); + + String key; + int split = c; + if (c < q) { + // key + key = token.substring(0, c); + } else { + // "key" + key = token.substring(q + 1, cq); + split = cq + 1; + } + //log.warn("\tkey: " + key); + + q = token.indexOf('"', split + 1); + cq = -1; + if (q == -1) { + q = Integer.MAX_VALUE; + } else { + cq = token.indexOf('"', q + 1); + } + String val; + if (token.length() < q) { + val = token.substring(split + 1); + } else { + val = token.substring(q + 1, cq); + } + //log.warn("\tval: " + val); + + dest.add(new NodeProperty(URI.create(key), val)); + } } diff --git a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/version/InitDatabase.java b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/version/InitDatabaseSI.java similarity index 88% rename from cadc-inventory-db/src/main/java/org/opencadc/inventory/db/version/InitDatabase.java rename to cadc-inventory-db/src/main/java/org/opencadc/inventory/db/version/InitDatabaseSI.java index 6b299106a..f59d7d767 100644 --- a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/version/InitDatabase.java +++ b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/version/InitDatabaseSI.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * -* (c) 2020. (c) 2020. +* (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -67,6 +67,7 @@ package org.opencadc.inventory.db.version; +import ca.nrc.cadc.db.version.InitDatabase; import java.net.URL; import javax.sql.DataSource; import org.apache.log4j.Logger; @@ -75,32 +76,34 @@ * * @author pdowler */ -public class InitDatabase extends ca.nrc.cadc.db.version.InitDatabase { - private static final Logger log = Logger.getLogger(InitDatabase.class); +public class InitDatabaseSI extends InitDatabase { + private static final Logger log = Logger.getLogger(InitDatabaseSI.class); public static final String MODEL_NAME = "storage-inventory"; - public static final String MODEL_VERSION = "0.14"; - public static final String PREV_MODEL_VERSION = "0.10"; + public static final String MODEL_VERSION = "1.0.0"; + public static final String PREV_MODEL_VERSION = "0.14"; //public static final String PREV_MODEL_VERSION = "DO-NOT_UPGRADE-BY-ACCIDENT"; static String[] CREATE_SQL = new String[] { - "inventory.ModelVersion.sql", + "generic.ModelVersion.sql", "inventory.Artifact.sql", "inventory.StorageSite.sql", "inventory.ObsoleteStorageLocation.sql", "inventory.DeletedArtifactEvent.sql", "inventory.DeletedStorageLocationEvent.sql", "inventory.StorageLocationEvent.sql", - "inventory.HarvestState.sql", - "inventory.permissions.sql" + "generic.HarvestState.sql", + "generic.PreauthKeyPair.sql", + "generic.permissions.sql" }; static String[] UPGRADE_SQL = new String[] { - "inventory.StorageLocationEvent.sql", - "inventory.permissions.sql" + "inventory.upgrade-1.0.0.sql", + "generic.PreauthKeyPair.sql", + "generic.permissions.sql" }; - public InitDatabase(DataSource ds, String database, String schema) { + public InitDatabaseSI(DataSource ds, String database, String schema) { super(ds, database, schema, MODEL_NAME, MODEL_VERSION, PREV_MODEL_VERSION); for (String s : CREATE_SQL) { createSQL.add(s); @@ -113,6 +116,6 @@ public InitDatabase(DataSource ds, String database, String schema) { @Override protected URL findSQL(String fname) { // SQL files are stored inside the jar file - return InitDatabase.class.getClassLoader().getResource(fname); + return InitDatabaseSI.class.getClassLoader().getResource(fname); } } diff --git a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/version/Main.java b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/version/Main.java index 5eddc5360..4d18205f3 100644 --- a/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/version/Main.java +++ b/cadc-inventory-db/src/main/java/org/opencadc/inventory/db/version/Main.java @@ -145,7 +145,7 @@ public void run() { DataSource ds = DBUtil.getDataSource(cc); log.info("target: " + server + " " + database + " " + schema); - InitDatabase init = new InitDatabase(ds, database, schema); + InitDatabaseSI init = new InitDatabaseSI(ds, database, schema); boolean result = init.doInit(); if (result) { log.info("init: complete"); diff --git a/cadc-inventory-db/src/main/java/org/opencadc/vospace/db/DataNodeSizeWorker.java b/cadc-inventory-db/src/main/java/org/opencadc/vospace/db/DataNodeSizeWorker.java new file mode 100644 index 000000000..5bfcb30e3 --- /dev/null +++ b/cadc-inventory-db/src/main/java/org/opencadc/vospace/db/DataNodeSizeWorker.java @@ -0,0 +1,216 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2024. (c) 2024. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ + */ + +package org.opencadc.vospace.db; + +import ca.nrc.cadc.date.DateUtil; +import ca.nrc.cadc.db.TransactionManager; +import ca.nrc.cadc.io.ResourceIterator; +import java.io.IOException; +import java.text.DateFormat; +import java.util.Date; +import org.apache.log4j.Logger; +import org.opencadc.inventory.Artifact; +import org.opencadc.inventory.Namespace; +import org.opencadc.inventory.db.ArtifactDAO; +import org.opencadc.inventory.db.HarvestState; +import org.opencadc.inventory.db.HarvestStateDAO; +import org.opencadc.vospace.DataNode; + +/** + * This class performs the work of synchronizing the size of Data Nodes from + * inventory (Artifact) to vopsace (Node). + * + * @author adriand + */ +public class DataNodeSizeWorker implements Runnable { + private static final Logger log = Logger.getLogger(DataNodeSizeWorker.class); + + // lookback when doing incremental harvest because head of sequence is + // not monotonic over short timescales (events arrive out of sequence) + private static final long LOOKBACK_TIME_MS = 60 * 1000L; + + private final HarvestState harvestState; + private final NodeDAO nodeDAO; + private final ArtifactDAO artifactDAO; + private final HarvestStateDAO harvestStateDAO; + private final Namespace storageNamespace; + + private long numArtifactsProcessed; + + /** + * Worker constructor. + * + * @param harvestStateDAO DAO class to persist progress in the vospace database + * @param harvestState current HarvestState instance + * @param artifactDAO DAO class to query for artifacts + * @param namespace artifact namespace + */ + public DataNodeSizeWorker(HarvestStateDAO harvestStateDAO, HarvestState harvestState, + ArtifactDAO artifactDAO, Namespace namespace) { + this.harvestState = harvestState; + this.harvestStateDAO = harvestStateDAO; + this.nodeDAO = new NodeDAO(harvestStateDAO); + this.artifactDAO = artifactDAO; + this.storageNamespace = namespace; + } + + public long getNumArtifactsProcessed() { + return numArtifactsProcessed; + } + + @Override + public void run() { + this.numArtifactsProcessed = 0L; + String opName = DataNodeSizeWorker.class.getSimpleName() + ".artifactQuery"; + DateFormat df = DateUtil.getDateFormat(DateUtil.IVOA_DATE_FORMAT, DateUtil.UTC); + if (harvestState.curLastModified != null) { + log.debug(opName + " source=" + harvestState.getResourceID() + + " instance=" + harvestState.instanceID + + " start=" + df.format(harvestState.curLastModified)); + } else { + log.debug(opName + " source=" + harvestState.getResourceID() + + " instance=" + harvestState.instanceID + + " start=null"); + } + + final Date now = new Date(); + final Date lookBack = new Date(now.getTime() - LOOKBACK_TIME_MS); + Date startTime = getQueryLowerBound(lookBack, harvestState.curLastModified); + if (lookBack != null && harvestState.curLastModified != null) { + log.debug("lookBack=" + df.format(lookBack) + " curLastModified=" + df.format(harvestState.curLastModified) + + " -> " + df.format(startTime)); + } + + String uriBucket = null; // process all artifacts in a single thread + try (final ResourceIterator iter = artifactDAO.iterator(storageNamespace, uriBucket, startTime, true)) { + TransactionManager tm = nodeDAO.getTransactionManager(); + while (iter.hasNext()) { + Artifact artifact = iter.next(); + DataNode node = nodeDAO.getDataNode(artifact.getURI()); + log.debug(artifact.getURI() + " len=" + artifact.getContentLength() + " -> " + node.getName()); + if (node != null && !artifact.getContentLength().equals(node.bytesUsed)) { + tm.startTransaction(); + try { + node = (DataNode)nodeDAO.lock(node); + if (node == null) { + continue; // node gone - race condition + } + node.bytesUsed = artifact.getContentLength(); + nodeDAO.put(node); + tm.commitTransaction(); + log.debug("ArtifactSyncWorker.updateDataNode id=" + node.getID() + + " bytesUsed=" + node.bytesUsed + " artifact.lastModified=" + df.format(artifact.getLastModified())); + } catch (Exception ex) { + log.debug("Failed to update data node size for " + node.getName(), ex); + tm.rollbackTransaction(); + throw ex; + } finally { + if (tm.isOpen()) { + log.error("BUG: transaction open in finally. Rolling back..."); + tm.rollbackTransaction(); + log.error("Rollback: OK"); + throw new RuntimeException("BUG: transaction open in finally"); + } + } + } + harvestState.curLastModified = artifact.getLastModified(); + harvestState.curID = artifact.getID(); + harvestStateDAO.put(harvestState); + numArtifactsProcessed++; + } + } catch (IOException ex) { + log.error("Error closing iterator", ex); + throw new RuntimeException("error while closing ResourceIterator", ex); + } + if (harvestState.curLastModified != null) { + log.debug(opName + " source=" + harvestState.getResourceID() + + " instance=" + harvestState.instanceID + + " end=" + df.format(harvestState.curLastModified)); + } else { + log.debug(opName + " source=" + harvestState.getResourceID() + + " instance=" + harvestState.instanceID + + " end=null"); + } + } + + private Date getQueryLowerBound(Date lookBack, Date lastModified) { + if (lookBack == null) { + // feature not enabled + return lastModified; + } + if (lastModified == null) { + // first harvest + return null; + } + if (lookBack.before(lastModified)) { + return lookBack; + } + return lastModified; + + } +} diff --git a/cadc-inventory-server/src/main/java/org/opencadc/inventory/server/InitDatabaseAction.java b/cadc-inventory-db/src/main/java/org/opencadc/vospace/db/InitDatabaseVOS.java similarity index 74% rename from cadc-inventory-server/src/main/java/org/opencadc/inventory/server/InitDatabaseAction.java rename to cadc-inventory-db/src/main/java/org/opencadc/vospace/db/InitDatabaseVOS.java index 6236091a3..40d7f29de 100644 --- a/cadc-inventory-server/src/main/java/org/opencadc/inventory/server/InitDatabaseAction.java +++ b/cadc-inventory-db/src/main/java/org/opencadc/vospace/db/InitDatabaseVOS.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * -* (c) 2020. (c) 2020. +* (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -65,52 +65,50 @@ ************************************************************************ */ -package org.opencadc.inventory.server; +package org.opencadc.vospace.db; -import ca.nrc.cadc.db.DBUtil; -import ca.nrc.cadc.rest.InitAction; -import java.util.Map; -import java.util.TreeMap; +import java.net.URL; import javax.sql.DataSource; import org.apache.log4j.Logger; -import org.opencadc.inventory.db.version.InitDatabase; +import org.opencadc.inventory.db.version.InitDatabaseSI; /** - * Base class for storage service database initialisation. - * + * * @author pdowler */ -public abstract class InitDatabaseAction extends InitAction { - private static final Logger log = Logger.getLogger(InitDatabaseAction.class); +public class InitDatabaseVOS extends ca.nrc.cadc.db.version.InitDatabase { + private static final Logger log = Logger.getLogger(InitDatabaseVOS.class); - protected final Map daoConfig = new TreeMap<>(); + public static final String MODEL_NAME = "storage-vospace"; + public static final String MODEL_VERSION = "1.0.0"; + public static final String PREV_MODEL_VERSION = "n/a"; - protected InitDatabaseAction() { - } - - @Override - public void doInit() { - initDaoConfig(); - initDatabase(); - } + static String[] CREATE_SQL = new String[] { + "generic.ModelVersion.sql", + "vospace.Node.sql", + "vospace.DeletedNodeEvent.sql", + "generic.HarvestState.sql", + "generic.PreauthKeyPair.sql", + "generic.permissions.sql" + }; - /** - * Add content to the (protected) daoConfig map. - */ - protected abstract void initDaoConfig(); + static String[] UPGRADE_SQL = new String[] { + "generic.permissions.sql" + }; - private void initDatabase() { - log.info("initDatabase: START"); - try { - String jndiDataSourceName = (String) daoConfig.get("jndiDataSourceName"); - String database = (String) daoConfig.get("database"); - String schema = (String) daoConfig.get("schema"); - DataSource ds = DBUtil.findJNDIDataSource(jndiDataSourceName); - InitDatabase init = new InitDatabase(ds, database, schema); - init.doInit(); - log.info("initDatabase: " + jndiDataSourceName + " " + schema + " OK"); - } catch (Exception ex) { - throw new IllegalStateException("check/init database failed", ex); + public InitDatabaseVOS(DataSource ds, String database, String schema) { + super(ds, database, schema, MODEL_NAME, MODEL_VERSION, PREV_MODEL_VERSION); + for (String s : CREATE_SQL) { + createSQL.add(s); } + for (String s : UPGRADE_SQL) { + upgradeSQL.add(s); + } + } + + @Override + protected URL findSQL(String fname) { + // SQL files are stored inside the jar file + return InitDatabaseSI.class.getClassLoader().getResource(fname); } } diff --git a/cadc-inventory-db/src/main/java/org/opencadc/vospace/db/NodeDAO.java b/cadc-inventory-db/src/main/java/org/opencadc/vospace/db/NodeDAO.java new file mode 100644 index 000000000..8088c66b7 --- /dev/null +++ b/cadc-inventory-db/src/main/java/org/opencadc/vospace/db/NodeDAO.java @@ -0,0 +1,214 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2023. (c) 2023. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.vospace.db; + +import ca.nrc.cadc.io.ResourceIterator; +import java.net.URI; +import java.util.UUID; +import org.apache.log4j.Logger; +import org.opencadc.inventory.db.AbstractDAO; +import org.opencadc.inventory.db.HarvestStateDAO; +import org.opencadc.inventory.db.SQLGenerator; +import org.opencadc.vospace.ContainerNode; +import org.opencadc.vospace.DataNode; +import org.opencadc.vospace.Node; +import org.springframework.jdbc.BadSqlGrammarException; +import org.springframework.jdbc.core.JdbcTemplate; + +/** + * + * @author pdowler + */ +public class NodeDAO extends AbstractDAO { + private static final Logger log = Logger.getLogger(NodeDAO.class); + + public NodeDAO() { + super(true); + } + + public NodeDAO(boolean origin) { + super(origin); + } + + public NodeDAO(HarvestStateDAO harvestStateDAO) { + super(harvestStateDAO); + } + + @Override + public void put(Node val) { + super.put(val); + } + + @Override + public Node lock(Node n) { + if (n == null) { + throw new IllegalArgumentException("entity cannot be null"); + } + // override because Node has subclasses: force base class here + return super.lock(Node.class, n.getID()); + } + + public Node get(UUID id) { + checkInit(); + return super.get(Node.class, id); + } + + public Node get(ContainerNode parent, String name) { + checkInit(); + log.debug("GET: " + parent.getID() + " + " + name); + long t = System.currentTimeMillis(); + + try { + JdbcTemplate jdbc = new JdbcTemplate(dataSource); + SQLGenerator.NodeGet get = (SQLGenerator.NodeGet) gen.getEntityGet(Node.class); + get.setPath(parent, name); + return get.execute(jdbc); + } catch (BadSqlGrammarException ex) { + handleInternalFail(ex); + } finally { + long dt = System.currentTimeMillis() - t; + log.debug("GET: " + parent.getID() + " + " + name + " " + dt + "ms"); + } + throw new RuntimeException("BUG: handleInternalFail did not throw"); + } + + public DataNode getDataNode(URI storageID) { + checkInit(); + log.debug("GET: " + storageID); + long t = System.currentTimeMillis(); + + try { + JdbcTemplate jdbc = new JdbcTemplate(dataSource); + SQLGenerator.NodeGet get = (SQLGenerator.NodeGet) gen.getEntityGet(Node.class); + get.setStorageID(storageID); + return (DataNode) get.execute(jdbc); + } catch (BadSqlGrammarException ex) { + handleInternalFail(ex); + } finally { + long dt = System.currentTimeMillis() - t; + log.debug("GET: " + storageID + " " + dt + "ms"); + } + throw new RuntimeException("BUG: handleInternalFail did not throw"); + } + + public boolean isEmpty(ContainerNode parent) { + checkInit(); + log.debug("isEmpty: " + parent.getID()); + long t = System.currentTimeMillis(); + + try { + JdbcTemplate jdbc = new JdbcTemplate(dataSource); + SQLGenerator.NodeCount count = (SQLGenerator.NodeCount) gen.getNodeCount(); + count.setID(parent.getID()); + int num = count.execute(jdbc); + return (num == 0); + } catch (BadSqlGrammarException ex) { + handleInternalFail(ex); + } finally { + long dt = System.currentTimeMillis() - t; + log.debug("isEmpty: " + parent.getID() + " " + dt + "ms"); + } + throw new RuntimeException("BUG: handleInternalFail did not throw"); + } + + public void delete(UUID id) { + super.delete(Node.class, id); + } + + /** + * Get iterator of child nodes. + * + * @param parent the container node to list + * @param limit max number of nodes to return, or null + * @param start list starting point, or null + * @return iterator of child nodes matching the arguments + */ + public ResourceIterator iterator(ContainerNode parent, Integer limit, String start) { + if (parent == null) { + throw new IllegalArgumentException("childIterator: parent cannot be null"); + } + log.debug("iterator: " + parent.getID()); + + checkInit(); + long t = System.currentTimeMillis(); + + try { + SQLGenerator.NodeIteratorQuery iter = (SQLGenerator.NodeIteratorQuery) gen.getEntityIteratorQuery(Node.class); + iter.setParent(parent); + iter.setStart(start); + iter.setLimit(limit); + return iter.query(dataSource); + } catch (BadSqlGrammarException ex) { + handleInternalFail(ex); + } finally { + long dt = System.currentTimeMillis() - t; + log.debug("iterator: " + parent.getID() + " " + dt + "ms"); + } + throw new RuntimeException("BUG: should be unreachable"); + } +} diff --git a/cadc-inventory-db/src/main/resources/inventory.HarvestState.sql b/cadc-inventory-db/src/main/resources/generic.HarvestState.sql similarity index 94% rename from cadc-inventory-db/src/main/resources/inventory.HarvestState.sql rename to cadc-inventory-db/src/main/resources/generic.HarvestState.sql index bccc28f9f..4a52dfcda 100644 --- a/cadc-inventory-db/src/main/resources/inventory.HarvestState.sql +++ b/cadc-inventory-db/src/main/resources/generic.HarvestState.sql @@ -3,6 +3,7 @@ create table .HarvestState ( resourceID varchar(128), curLastModified timestamp, curID uuid, + instanceID uuid, lastModified timestamp not null, metaChecksum varchar(136) not null, diff --git a/cadc-inventory-db/src/main/resources/inventory.ModelVersion.sql b/cadc-inventory-db/src/main/resources/generic.ModelVersion.sql similarity index 100% rename from cadc-inventory-db/src/main/resources/inventory.ModelVersion.sql rename to cadc-inventory-db/src/main/resources/generic.ModelVersion.sql diff --git a/cadc-inventory-db/src/main/resources/generic.PreauthKeyPair.sql b/cadc-inventory-db/src/main/resources/generic.PreauthKeyPair.sql new file mode 100644 index 000000000..50fc0dcc2 --- /dev/null +++ b/cadc-inventory-db/src/main/resources/generic.PreauthKeyPair.sql @@ -0,0 +1,12 @@ + +create table .PreauthKeyPair ( + name varchar(32) not null, + publicKey bytea not null, + privateKey bytea not null, + + id uuid not null primary key, + lastModified timestamp not null, + metaChecksum varchar(136) not null +); + +create unique index kp_name_index on .PreauthKeyPair(name); diff --git a/cadc-inventory-db/src/main/resources/inventory.permissions.sql b/cadc-inventory-db/src/main/resources/generic.permissions.sql similarity index 100% rename from cadc-inventory-db/src/main/resources/inventory.permissions.sql rename to cadc-inventory-db/src/main/resources/generic.permissions.sql diff --git a/cadc-inventory-db/src/main/resources/inventory.upgrade-1.0.0.sql b/cadc-inventory-db/src/main/resources/inventory.upgrade-1.0.0.sql new file mode 100644 index 000000000..e0ce85860 --- /dev/null +++ b/cadc-inventory-db/src/main/resources/inventory.upgrade-1.0.0.sql @@ -0,0 +1,5 @@ + +-- changes for this version: incomplete + +alter table .HarvestState add column instanceID uuid; + diff --git a/cadc-inventory-db/src/main/resources/vospace.DeletedNodeEvent.sql b/cadc-inventory-db/src/main/resources/vospace.DeletedNodeEvent.sql new file mode 100644 index 000000000..341a670e5 --- /dev/null +++ b/cadc-inventory-db/src/main/resources/vospace.DeletedNodeEvent.sql @@ -0,0 +1,17 @@ + +create table .DeletedNodeEvent ( + -- type is immutable + nodeType char(1) not null, + + -- support cleanup of obsolete artifacts + storageID varchar(512), + + lastModified timestamp not null, + metaChecksum varchar(136) not null, + id uuid not null primary key +); + + + +create index dne_lastmodified on .DeletedNodeEvent(lastModified); + diff --git a/cadc-inventory-db/src/main/resources/vospace.Node.sql b/cadc-inventory-db/src/main/resources/vospace.Node.sql new file mode 100644 index 000000000..51e5d6467 --- /dev/null +++ b/cadc-inventory-db/src/main/resources/vospace.Node.sql @@ -0,0 +1,43 @@ + +create table .Node ( + -- require a special root ID value but prevent bugs + parentID uuid not null, + name varchar(512) not null, + nodeType char(1) not null, + + ownerID varchar(256) not null, + isPublic boolean, + isLocked boolean, + readOnlyGroups text[], + readWriteGroups text[], + + -- store misc props in a 2D array + properties text[][], + + -- ContainerNode + inheritPermissions boolean, + + -- DataNode + busy boolean, + bytesUsed bigint, + -- Artifact.uri and Artifact.uriBucket + storageID varchar(512), + storageBucket varchar(5), + + -- LinkNode + target text, + + lastModified timestamp not null, + metaChecksum varchar(136) not null, + id uuid not null primary key +); + +-- usage: vault path navigation +create unique index node_parent_child on .Node(parentID,name); + +-- usage: Node metadata-sync +create index node_lastmodified on .Node(lastModified); + +-- usage: vault incremental Artifact to Node for bytesUsed +-- usage: vault Node vs Artifact validation +create unique index node_storageID on .Node(storageID); \ No newline at end of file diff --git a/cadc-inventory-db/src/test/java/org/opencadc/inventory/db/UtilTest.java b/cadc-inventory-db/src/test/java/org/opencadc/inventory/db/UtilTest.java new file mode 100644 index 000000000..c26cf3f26 --- /dev/null +++ b/cadc-inventory-db/src/test/java/org/opencadc/inventory/db/UtilTest.java @@ -0,0 +1,159 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2023. (c) 2023. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.inventory.db; + +import ca.nrc.cadc.util.Log4jInit; +import java.util.Set; +import java.util.TreeSet; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.junit.Test; +import org.opencadc.gms.GroupURI; +import org.opencadc.vospace.NodeProperty; + +/** + * + * @author pdowler + */ +public class UtilTest { + private static final Logger log = Logger.getLogger(UtilTest.class); + + static { + Log4jInit.setLevel("org.opencadc.inventory.db", Level.INFO); + } + + public UtilTest() { + } + + @Test + public void testParseArrayGroupURI() throws Exception { + + String str = "{ivo://opencadc.org/gms?g3," + + "ivo://opencadc.org/gms?g6-g7," + + "ivo://opencadc.org/gms?g6.g7," + + "ivo://opencadc.org/gms?g6_g7," + + "ivo://opencadc.org/gms?g6~g7}"; + + Set dest = new TreeSet<>(); + Util.parseArrayGroupURI(str, dest); + for (GroupURI u : dest) { + log.info("uri: " + u.getURI()); + } + } + + @Test + public void testParseArrayNodeProperty() throws Exception { + + String str = ""; + Set dest = new TreeSet<>(); + + log.info("raw:\n" + str + "\n"); + Util.parseArrayProps(str, dest); + for (NodeProperty p : dest) { + log.info("prop: " + p.getKey() + " = " + p.getValue()); + } + + str = "{{ivo://ivoa.net/vospace/core#description,stuff}}"; + dest.clear(); + log.info("raw:\n" + str + "\n"); + Util.parseArrayProps(str, dest); + for (NodeProperty p : dest) { + log.info("prop: " + p.getKey() + " = " + p.getValue()); + } + + str = "{{ivo://ivoa.net/vospace/core#description,\"this is the good stuff(tm)\"}}"; + dest.clear(); + log.info("raw:\n" + str + "\n"); + Util.parseArrayProps(str, dest); + for (NodeProperty p : dest) { + log.info("prop: " + p.getKey() + " = " + p.getValue()); + } + + str = "{{ivo://ivoa.net/vospace/core#description,\"this is the good stuff(tm)\"}," + + "{ivo://ivoa.net/vospace/core#type,text/plain}}"; + dest.clear(); + log.info("raw:\n" + str + "\n"); + Util.parseArrayProps(str, dest); + for (NodeProperty p : dest) { + log.info("prop: " + p.getKey() + " = " + p.getValue()); + } + + str = "{{custom:prop,\"spaces in value\"}," + + "{ivo://ivoa.net/vospace/core#length,123}," + + "{ivo://ivoa.net/vospace/core#type,text/plain}," + + "{\"sketchy:a,b\",comma-in-uri}," + + "{sketchy:funny,\"value,with,{delims}\"}}"; + + dest.clear(); + log.info("raw:\n" + str + "\n"); + Util.parseArrayProps(str, dest); + for (NodeProperty p : dest) { + log.info("prop: " + p.getKey() + " = " + p.getValue()); + } + } +} diff --git a/cadc-inventory-server/build.gradle b/cadc-inventory-server/build.gradle index 2c6e23ba6..814e1f972 100644 --- a/cadc-inventory-server/build.gradle +++ b/cadc-inventory-server/build.gradle @@ -12,20 +12,22 @@ repositories { sourceCompatibility = 1.8 group = 'org.opencadc' -version = '0.2.2' +version = '0.3.0' description = 'OpenCADC Storage Inventory server utility library' def git_url = 'https://github.com/opencadc/storage-inventory' dependencies { compile 'org.opencadc:cadc-inventory:[0.7,1.0)' - compile 'org.opencadc:cadc-inventory-db:[0.9,)' + compile 'org.opencadc:cadc-inventory-db:[0.15,)' compile 'org.opencadc:cadc-util:[1.9,2.0)' compile 'org.opencadc:cadc-rest:[1.3.14,)' compile 'org.opencadc:cadc-gms:[1.0.4,)' compile 'org.opencadc:cadc-cdp:[1.3,2.0)' compile 'org.opencadc:cadc-permissions:[0.2,)' compile 'org.opencadc:cadc-permissions-client:[0.3,)' + compile 'org.opencadc:cadc-vos:[2.0,3.0)' + compile 'org.opencadc:cadc-vosi:[1.4.3,2.0)' testCompile 'junit:junit:[4.0,)' } diff --git a/cadc-inventory-server/src/main/java/org/opencadc/inventory/transfer/GetKeyAction.java b/cadc-inventory-server/src/main/java/org/opencadc/inventory/transfer/GetKeyAction.java new file mode 100644 index 000000000..d42c13355 --- /dev/null +++ b/cadc-inventory-server/src/main/java/org/opencadc/inventory/transfer/GetKeyAction.java @@ -0,0 +1,125 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2023. (c) 2023. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.inventory.transfer; + +import ca.nrc.cadc.rest.InlineContentHandler; +import ca.nrc.cadc.rest.RestAction; +import java.io.OutputStream; +import java.io.PrintWriter; +import javax.naming.Context; +import javax.naming.InitialContext; +import javax.naming.NamingException; +import org.apache.log4j.Logger; +import org.opencadc.inventory.PreauthKeyPair; + +/** + * Simple GET action that finds a PreauthKeyPair via JNDI and writes + * the binary public key to the output. + * + * @author pdowler + */ +public class GetKeyAction extends RestAction { + private static final Logger log = Logger.getLogger(GetKeyAction.class); + + public GetKeyAction() { + super(); + } + + @Override + protected InlineContentHandler getInlineContentHandler() { + return null; + } + + @Override + public void doAction() throws Exception { + String jndiPreauthKeys = appName + "-" + PreauthKeyPair.class.getName(); + Context ctx = new InitialContext(); + try { + log.debug("lookup: " + jndiPreauthKeys); + PreauthKeyPair keys = (PreauthKeyPair) ctx.lookup(jndiPreauthKeys); + log.debug("found: " + keys); + byte[] pub = keys.getPublicKey(); + syncOutput.setHeader("content-length", pub.length); + syncOutput.setHeader("content-type", "application/octet-stream"); + syncOutput.setCode(200); + try (OutputStream ostream = syncOutput.getOutputStream()) { + ostream.write(pub); + ostream.flush(); + } + } catch (NamingException ex) { + syncOutput.setHeader("content-type", "test/plain"); + syncOutput.setCode(404); + try (OutputStream ostream = syncOutput.getOutputStream()) { + PrintWriter w = new PrintWriter(ostream); + w.println("not found: key signing disabled"); + w.flush(); + w.close(); + } + } + } +} diff --git a/raven/src/main/java/org/opencadc/raven/ProtocolsGenerator.java b/cadc-inventory-server/src/main/java/org/opencadc/inventory/transfer/ProtocolsGenerator.java similarity index 69% rename from raven/src/main/java/org/opencadc/raven/ProtocolsGenerator.java rename to cadc-inventory-server/src/main/java/org/opencadc/inventory/transfer/ProtocolsGenerator.java index 2c4aa7e98..d8fca2f08 100644 --- a/raven/src/main/java/org/opencadc/raven/ProtocolsGenerator.java +++ b/cadc-inventory-server/src/main/java/org/opencadc/inventory/transfer/ProtocolsGenerator.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * -* (c) 2023. (c) 2023. +* (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -65,7 +65,7 @@ ************************************************************************ */ -package org.opencadc.raven; +package org.opencadc.inventory.transfer; import ca.nrc.cadc.cred.client.CredUtil; import ca.nrc.cadc.net.HttpGet; @@ -76,10 +76,6 @@ import ca.nrc.cadc.reg.Interface; import ca.nrc.cadc.reg.Standards; import ca.nrc.cadc.reg.client.RegistryClient; -import ca.nrc.cadc.vos.Direction; -import ca.nrc.cadc.vos.Protocol; -import ca.nrc.cadc.vos.Transfer; -import ca.nrc.cadc.vos.VOS; import ca.nrc.cadc.vosi.Availability; import java.io.File; import java.io.IOException; @@ -109,6 +105,10 @@ import org.opencadc.permissions.ReadGrant; import org.opencadc.permissions.TokenTool; import org.opencadc.permissions.WriteGrant; +import org.opencadc.vospace.VOS; +import org.opencadc.vospace.transfer.Direction; +import org.opencadc.vospace.transfer.Protocol; +import org.opencadc.vospace.transfer.Transfer; /** * Class for generating protocol lists corresponding to transfer requests. @@ -119,112 +119,88 @@ public class ProtocolsGenerator { private static final Logger log = Logger.getLogger(ProtocolsGenerator.class); + public static final URI SECURITY_EMBEDDED_TOKEN = URI.create("https://www.opencadc.org/std/storage#embedded-token"); + public static final String ARTIFACT_ID_HDR = "x-artifact-id"; // matches minoc.HeadAction.ARTIFACT_ID_HDR private final ArtifactDAO artifactDAO; private final DeletedArtifactEventDAO deletedArtifactEventDAO; - private final String user; - private final File publicKeyFile; - private final File privateKeyFile; + private final Map siteAvailabilities; private final Map siteRules; - private final StorageResolver storageResolver; - private final boolean preventNotFound; - // for use by FilesAction subclasses + /** + * Optional StorageResolver to resolve Artifact.uri to an external data provider. + */ + public StorageResolver storageResolver; + + /** + * Optional flag to enable prevention of 404 NotFound failure due to eventual + * consistency. Setting this to true will cause the code to make HTTP HEAD + * requests to all known storage sites looking for an artifact that is not + * in the local database. + */ + public boolean preventNotFound = false; + + /** + * Optional user value to put into generated preauth token. + */ + public String user; + + /** + * Optional TokenTool to generate and inject preauth tokens into otherwise anon URL. + */ + public TokenTool tokenGen; + + /** + * Optional restriction so that all anon URLs must have a preauth token. + */ + public boolean requirePreauthAnon = false; + + // for use by FilesAction subclasses to enhance logging boolean storageResolverAdded = false; - - public ProtocolsGenerator(ArtifactDAO artifactDAO, File publicKeyFile, File privateKeyFile, String user, - Map siteAvailabilities, Map siteRules, - boolean preventNotFound, StorageResolver storageResolver) { + public ProtocolsGenerator(ArtifactDAO artifactDAO, Map siteAvailabilities, Map siteRules) { this.artifactDAO = artifactDAO; this.deletedArtifactEventDAO = new DeletedArtifactEventDAO(this.artifactDAO); - this.user = user; - this.publicKeyFile = publicKeyFile; - this.privateKeyFile = privateKeyFile; this.siteAvailabilities = siteAvailabilities; this.siteRules = siteRules; - this.preventNotFound = preventNotFound; - this.storageResolver = storageResolver; + } + + public boolean getStorageResolverAdded() { + return storageResolverAdded; } - List getProtocols(Transfer transfer) throws ResourceNotFoundException, IOException { + public List getProtocols(Transfer transfer) throws ResourceNotFoundException, IOException { + return getProtocols(transfer, null); + } + + public List getProtocols(Transfer transfer, String filenameOverride) throws ResourceNotFoundException, IOException { String authToken = null; URI artifactURI = transfer.getTargets().get(0); // see PostAction line ~127 - if (publicKeyFile != null && privateKeyFile != null) { + if (tokenGen != null) { // create an auth token - TokenTool tk = new TokenTool(publicKeyFile, privateKeyFile); if (transfer.getDirection().equals(Direction.pullFromVoSpace)) { - authToken = tk.generateToken(artifactURI, ReadGrant.class, user); + authToken = tokenGen.generateToken(artifactURI, ReadGrant.class, user); } else { - authToken = tk.generateToken(artifactURI, WriteGrant.class, user); + authToken = tokenGen.generateToken(artifactURI, WriteGrant.class, user); } } List protos = null; if (Direction.pullFromVoSpace.equals(transfer.getDirection())) { - protos = doPullFrom(artifactURI, transfer, authToken); - } else { + // filename override only on GET + protos = doPullFrom(artifactURI, transfer, authToken, filenameOverride); + } else if (Direction.pushToVoSpace.equals(transfer.getDirection())) { protos = doPushTo(artifactURI, transfer, authToken); + } else { + throw new UnsupportedOperationException("unexpected transfer direction: " + transfer.getDirection().getValue()); + } return protos; } - static void prioritizePullFromSites(List storageSites) { - // contains the algorithm for prioritizing storage sites to pull from. - - // was: prefer read/write sites to put less load on a read-only "seeder" site during migration - //storageSites.sort((site1, site2) -> Boolean.compare(!site1.getAllowWrite(), !site2.getAllowWrite())); - - // random - Collections.shuffle(storageSites); - } - - Artifact getRemoteArtifact(URL location, URI artifactURI) { - try { - HttpGet head = new HttpGet(location, true); - head.setHeadOnly(true); - head.setReadTimeout(10000); - head.run(); - if (head.getResponseCode() != 200) { - // caught at the end of the method - throw new RuntimeException("Unsuccessful HEAD request: " + head.getResponseCode()); - } - UUID id = UUID.fromString(head.getResponseHeader(ARTIFACT_ID_HDR)); - Artifact result = new - Artifact(id, artifactURI, head.getDigest(), head.getLastModified(), head.getContentLength()); - result.contentType = head.getContentType(); - result.contentEncoding = head.getContentEncoding(); - return result; - } catch (Throwable t) { - log.debug("Could not retrieve artifact " + artifactURI.toASCIIString() + " from " + location, t); - return null; - } - } - - private Capability getFilesCapability(StorageSite storageSite) { - if (!isAvailable(storageSite.getResourceID())) { - log.warn("storage site is offline: " + storageSite.getResourceID()); - return null; - } - Capability filesCap = null; - try { - RegistryClient regClient = new RegistryClient(); - Capabilities caps = regClient.getCapabilities(storageSite.getResourceID()); - filesCap = caps.findCapability(Standards.SI_FILES); - if (filesCap == null) { - log.warn("service: " + storageSite.getResourceID() + " does not provide " + Standards.SI_FILES); - } - } catch (ResourceNotFoundException ex) { - log.warn("storage site not found: " + storageSite.getResourceID()); - } catch (Exception ex) { - log.warn("storage site not responding (capabilities): " + storageSite.getResourceID(), ex); - } - return filesCap; - } - - Artifact getUnsyncedArtifact(URI artifactURI, Transfer transfer, Set storageSites, String authToken) { + public Artifact getUnsyncedArtifact(URI artifactURI, Transfer transfer, Set storageSites, String authToken) { Artifact result = null; for (StorageSite storageSite : storageSites) { // check if site is currently offline @@ -296,86 +272,166 @@ Artifact getUnsyncedArtifact(URI artifactURI, Transfer transfer, Set doPullFrom(URI artifactURI, Transfer transfer, String authToken) throws ResourceNotFoundException, IOException { + // contains the algorithm for prioritizing storage sites to get file + static List prioritizePullFromSites(List storageSites) { + // filter out non-readble + List ret = new ArrayList<>(storageSites.size()); + for (StorageSite s : storageSites) { + if (s.getAllowRead()) { + ret.add(s); + } else { + log.debug("storage site is not readable: " + s.getResourceID()); + } + } + + // random + Collections.shuffle(ret); + return ret; + } + + List doPullFrom(URI artifactURI, Transfer transfer, String authToken, String filenameOverride) + throws ResourceNotFoundException, IOException { StorageSiteDAO storageSiteDAO = new StorageSiteDAO(artifactDAO); Set sites = storageSiteDAO.list(); // this set could be cached List protos = new ArrayList<>(); Artifact artifact = artifactDAO.get(artifactURI); - // produce URLs to each of the copies for each of the protocols - List storageSites = new ArrayList<>(); + if (artifact == null) { if (this.preventNotFound) { log.debug("Artifact " + artifactURI.toASCIIString() + " not found in global. Check sites."); artifact = getUnsyncedArtifact(artifactURI, transfer, sites, authToken); } } - + log.debug(artifactURI + " found: " + artifact); + + List storageSites = new ArrayList<>(); if (artifact != null) { - for (SiteLocation site : artifact.siteLocations) { - StorageSite storageSite = getSite(sites, site.getSiteID()); - storageSites.add(storageSite); + if (artifact.storageLocation != null) { + // this is a single storage site + Iterator iter = sites.iterator(); + if (iter.hasNext()) { + storageSites.add(iter.next()); + } + if (iter.hasNext()) { + log.error("BUG: found second StorageSite in database with assigned Artifact.storageLocation"); + } + } else { + // this is a global inventory + for (SiteLocation site : artifact.siteLocations) { + StorageSite storageSite = getSite(sites, site.getSiteID()); + storageSites.add(storageSite); + } } } - - prioritizePullFromSites(storageSites); - for (StorageSite storageSite : storageSites) { + + List readableSites = prioritizePullFromSites(storageSites); + log.debug("pullFrom: known sites " + storageSites.size() + " -> readableSites " + readableSites.size()); + for (StorageSite storageSite : readableSites) { + log.debug("trying site: " + storageSite.getResourceID() + " allowRead=" + storageSite.getAllowRead()); Capability filesCap = getFilesCapability(storageSite); - if (filesCap != null) { + if (filesCap != null && storageSite.getAllowRead()) { for (Protocol proto : transfer.getProtocols()) { - if (storageSite.getAllowRead()) { - // less generic request for service that implements an API - // HACK: this is filesCap specific in here - if (proto.getUri().equals(filesCap.getStandardID().toASCIIString())) { + log.debug("\tprotocol: " + proto); + // less generic request for service that implements an API + // HACK: this is filesCap specific in here + if (proto.getUri().equals(filesCap.getStandardID())) { + Protocol p = new Protocol(proto.getUri()); + p.setEndpoint(storageSite.getResourceID().toASCIIString()); + protos.add(p); + } + URI sec = proto.getSecurityMethod(); + if (sec == null) { + sec = Standards.SECURITY_METHOD_ANON; + } + Interface iface = filesCap.findInterface(sec); + if (iface != null) { + URL baseURL = iface.getAccessURL().getURL(); + log.debug("base url for site " + storageSite.getResourceID() + ": " + baseURL); + if (protocolCompat(proto, baseURL)) { + StringBuilder sb = new StringBuilder(); + sb.append(baseURL.toExternalForm()).append("/"); + if (authToken != null && Standards.SECURITY_METHOD_ANON.equals(sec)) { + sb.append(authToken).append("/"); + } + sb.append(artifactURI.toASCIIString()); + if (filenameOverride != null) { + sb.append(":fo/").append(filenameOverride); + } Protocol p = new Protocol(proto.getUri()); - p.setEndpoint(storageSite.getResourceID().toASCIIString()); + if (transfer.version == VOS.VOSPACE_21) { + p.setSecurityMethod(proto.getSecurityMethod()); + } + p.setEndpoint(sb.toString()); protos.add(p); - } - URI sec = proto.getSecurityMethod(); - if (sec == null) { - sec = Standards.SECURITY_METHOD_ANON; - } - Interface iface = filesCap.findInterface(sec); - if (iface != null) { - URL baseURL = iface.getAccessURL().getURL(); - log.debug("base url for site " + storageSite.getResourceID() + ": " + baseURL); - if (protocolCompat(proto, baseURL)) { - StringBuilder sb = new StringBuilder(); + log.debug("added: " + p); + + // add a plain anon URL + if (authToken != null && !requirePreauthAnon && Standards.SECURITY_METHOD_ANON.equals(sec)) { + sb = new StringBuilder(); sb.append(baseURL.toExternalForm()).append("/"); - if (authToken != null && Standards.SECURITY_METHOD_ANON.equals(sec)) { - sb.append(authToken).append("/"); - } sb.append(artifactURI.toASCIIString()); - Protocol p = new Protocol(proto.getUri()); - if (transfer.version == VOS.VOSPACE_21) { - p.setSecurityMethod(proto.getSecurityMethod()); + p = new Protocol(proto.getUri()); + if (filenameOverride != null) { + sb.append(":fo/").append(filenameOverride); } p.setEndpoint(sb.toString()); protos.add(p); log.debug("added: " + p); - - // add a plain anon URL - if (authToken != null && Standards.SECURITY_METHOD_ANON.equals(sec)) { - sb = new StringBuilder(); - sb.append(baseURL.toExternalForm()).append("/"); - sb.append(artifactURI.toASCIIString()); - p = new Protocol(proto.getUri()); - p.setEndpoint(sb.toString()); - protos.add(p); - log.debug("added: " + p); - } - } else { - log.debug("reject protocol: " + proto - + " reason: no compatible URL protocol"); } } else { log.debug("reject protocol: " + proto - + " reason: unsupported security method: " + proto.getSecurityMethod()); + + " reason: no compatible URL protocol"); } } else { - log.debug("Storage not allowed read " + storageSite.getName()); + log.debug("reject protocol: " + proto + + " reason: unsupported security method: " + proto.getSecurityMethod()); } + } } } @@ -399,23 +455,24 @@ List doPullFrom(URI artifactURI, Transfer transfer, String authToken) } } - if (protos.isEmpty() && ((artifact == null) || artifact.siteLocations.size() == 0)) { - // artifact not find internally and has no external resolvers either - // TODO: second condition can currently happen but maybe should not: - // --- when the last siteLocation is removed, the artifact should be deleted (fenwick, ratik) + if (protos.isEmpty()) { + // unable to generate any URLs throw new ResourceNotFoundException("not found: " + artifactURI.toString()); } return protos; } + // the algorithm for prioritizing storage sites to put file static SortedSet prioritizePushToSites(Set storageSites, URI artifactURI, Map siteRules) { PrioritizingStorageSiteComparator comparator = new PrioritizingStorageSiteComparator(siteRules, artifactURI, null); TreeSet orderedSet = new TreeSet<>(comparator); - for (StorageSite storageSite : storageSites) { - if (storageSite.getAllowWrite()) { - orderedSet.add(storageSite); + for (StorageSite s : storageSites) { + if (s.getAllowWrite()) { + orderedSet.add(s); + } else { + log.debug("storage site is not writable: " + s.getResourceID()); } } return orderedSet; @@ -427,16 +484,18 @@ private List doPushTo(URI artifactURI, Transfer transfer, String authT Set storageSites = storageSiteDAO.list(); // this set could be cached List protos = new ArrayList<>(); - SortedSet orderedSites = prioritizePushToSites(storageSites, artifactURI, this.siteRules); + // prioritize also filters out non-writable sites + Set orderedSites = prioritizePushToSites(storageSites, artifactURI, this.siteRules); // produce URLs for all writable sites + log.debug("pushTo: known sites " + storageSites.size() + " -> writableSites " + orderedSites.size()); for (StorageSite storageSite : orderedSites) { // check if site is currently offline if (!isAvailable(storageSite.getResourceID())) { log.warn("storage site is offline: " + storageSite.getResourceID()); continue; } - - //log.warn("PUT: " + storageSite); + + log.debug("pushTo: trying site " + storageSite.getResourceID()); Capability filesCap = null; try { Capabilities caps = regClient.getCapabilities(storageSite.getResourceID()); @@ -449,28 +508,31 @@ private List doPushTo(URI artifactURI, Transfer transfer, String authT } if (filesCap != null) { for (Protocol proto : transfer.getProtocols()) { - //log.warn("PUT: " + storageSite + " proto: " + proto); - if (storageSite.getAllowWrite()) { - // less generic request for service that implements - // HACK: this is filesCap specific in here - if (proto.getUri().equals(filesCap.getStandardID().toASCIIString())) { - Protocol p = new Protocol(proto.getUri()); - p.setEndpoint(storageSite.getResourceID().toASCIIString()); - protos.add(p); - } - URI sec = proto.getSecurityMethod(); - if (sec == null) { - sec = Standards.SECURITY_METHOD_ANON; - } - Interface iface = filesCap.findInterface(sec); - log.debug("PUT: " + storageSite + " proto: " + proto + " iface: " + iface); - if (iface != null) { - URL baseURL = iface.getAccessURL().getURL(); - //log.debug("base url for site " + storageSite.getResourceID() + ": " + baseURL); - if (protocolCompat(proto, baseURL)) { + log.debug("pushTo: " + storageSite + " proto: " + proto); + // less generic request for service that implements + // HACK: this is filesCap specific in here + if (proto.getUri().equals(filesCap.getStandardID())) { + Protocol p = new Protocol(proto.getUri()); + p.setEndpoint(storageSite.getResourceID().toASCIIString()); + protos.add(p); + } + URI sec = proto.getSecurityMethod(); + if (sec == null) { + sec = Standards.SECURITY_METHOD_ANON; + } + boolean anon = Standards.SECURITY_METHOD_ANON.equals(sec); + Interface iface = filesCap.findInterface(sec); + log.debug("pushTo: " + storageSite + " proto: " + proto + " iface: " + iface); + if (iface != null) { + URL baseURL = iface.getAccessURL().getURL(); + //log.debug("base url for site " + storageSite.getResourceID() + ": " + baseURL); + if (protocolCompat(proto, baseURL)) { + // // no plain anon URL for put: !anon or anon+token + boolean gen = (!anon || (anon && authToken != null)); + if (gen) { StringBuilder sb = new StringBuilder(); sb.append(baseURL.toExternalForm()).append("/"); - if (proto.getSecurityMethod() == null || Standards.SECURITY_METHOD_ANON.equals(proto.getSecurityMethod())) { + if (authToken != null && anon) { sb.append(authToken).append("/"); } sb.append(artifactURI.toASCIIString()); @@ -481,16 +543,15 @@ private List doPushTo(URI artifactURI, Transfer transfer, String authT p.setEndpoint(sb.toString()); protos.add(p); log.debug("added: " + p); - - // no plain anon URL for put - } else { - log.debug("PUT: " + storageSite + "PUT: reject protocol: " + proto - + " reason: no compatible URL protocol"); } + } else { log.debug("PUT: " + storageSite + "PUT: reject protocol: " + proto - + " reason: unsupported security method: " + proto.getSecurityMethod()); + + " reason: no compatible URL protocol"); } + } else { + log.debug("PUT: " + storageSite + "PUT: reject protocol: " + proto + + " reason: unsupported security method: " + proto.getSecurityMethod()); } } } diff --git a/cadc-inventory-server/src/main/java/org/opencadc/inventory/transfer/StorageSiteAvailabilityCheck.java b/cadc-inventory-server/src/main/java/org/opencadc/inventory/transfer/StorageSiteAvailabilityCheck.java new file mode 100644 index 000000000..656d2ec00 --- /dev/null +++ b/cadc-inventory-server/src/main/java/org/opencadc/inventory/transfer/StorageSiteAvailabilityCheck.java @@ -0,0 +1,198 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2023. (c) 2023. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ + */ + +package org.opencadc.inventory.transfer; + +import ca.nrc.cadc.vosi.Availability; +import ca.nrc.cadc.vosi.AvailabilityClient; +import java.net.URI; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import javax.naming.Context; +import javax.naming.InitialContext; +import javax.naming.NamingException; +import org.apache.log4j.Logger; +import org.opencadc.inventory.StorageSite; +import org.opencadc.inventory.db.StorageSiteDAO; + +/** + * Background check of storage site availability. This class stores and + * maintains a Map of site availability check results. ProtocolsGenerator + * consults the map when generating URLs to files so it can skip sites that + * are off-line. + * + * @author pdowler + */ +public class StorageSiteAvailabilityCheck implements Runnable { + private static final Logger log = Logger.getLogger(StorageSiteAvailabilityCheck.class); + + static final int AVAILABILITY_CHECK_TIMEOUT = 30; //secs + static final int AVAILABILITY_FULL_CHECK_TIMEOUT = 300; //secs + + private final StorageSiteDAO storageSiteDAO; + private final Map siteStates; + private final Map siteAvailabilities; + + public StorageSiteAvailabilityCheck(StorageSiteDAO storageSiteDAO, String siteAvailabilitiesKey) { + this.storageSiteDAO = storageSiteDAO; + this.siteStates = new HashMap<>(); + this.siteAvailabilities = new HashMap<>(); + + try { + Context initialContext = new InitialContext(); + // check if key already bound, if so unbind + try { + initialContext.unbind(siteAvailabilitiesKey); + } catch (NamingException ignore) { + // ignore + } + initialContext.bind(siteAvailabilitiesKey, this.siteAvailabilities); + } catch (NamingException e) { + throw new IllegalStateException(String.format("unable to bind %s to initial context: %s", + siteAvailabilitiesKey, e.getMessage()), e); + } + } + + @Override + public void run() { + int lastSiteQuerySecs = 0; + while (true) { + Set sites = storageSiteDAO.list(); + if (lastSiteQuerySecs >= AVAILABILITY_FULL_CHECK_TIMEOUT) { + sites = storageSiteDAO.list(); + lastSiteQuerySecs = 0; + } else { + lastSiteQuerySecs += AVAILABILITY_CHECK_TIMEOUT; + } + + for (StorageSite site : sites) { + URI resourceID = site.getResourceID(); + log.debug("checking site: " + resourceID); + SiteState siteState = this.siteStates.get(resourceID); + if (siteState == null) { + siteState = new SiteState(false, 0); + } + boolean minDetail = siteState.isMinDetail(); + Availability availability; + try { + availability = getAvailability(resourceID, minDetail); + } catch (Exception e) { + availability = new Availability(false, e.getMessage()); + log.debug(String.format("failed %s - %s", resourceID, e.getMessage())); + } + final boolean prev = siteState.available; + siteState.available = availability.isAvailable(); + this.siteStates.put(resourceID, siteState); + this.siteAvailabilities.put(resourceID, availability); + String message = String.format("%s %s - %s", minDetail ? "MIN" : "FULL", + resourceID, siteState.available ? "UP" : "DOWN"); + if (!siteState.available) { + log.warn(message); + } else if (prev != siteState.available) { + log.info(message); + } else { + log.debug(message); + } + } + + try { + log.debug(String.format("sleep availability checks for %d secs", AVAILABILITY_CHECK_TIMEOUT)); + Thread.sleep(AVAILABILITY_CHECK_TIMEOUT * 1000); + } catch (InterruptedException e) { + throw new IllegalStateException("AvailabilityCheck thread interrupted during sleep"); + } + } + } + + private Availability getAvailability(URI resourceID, boolean minDetail) { + AvailabilityClient client = new AvailabilityClient(resourceID, minDetail); + return client.getAvailability(); + } + + private class SiteState { + + public boolean available; + public int lastFullCheckSecs; + + public SiteState(boolean available, int lastFullCheckSecs) { + this.available = available; + this.lastFullCheckSecs = lastFullCheckSecs; + } + + public boolean isMinDetail() { + log.debug(String.format("isMinDetail() available=%b, lastFullCheckSecs=%d", + available, lastFullCheckSecs)); + if (this.available && this.lastFullCheckSecs < AVAILABILITY_FULL_CHECK_TIMEOUT) { + this.lastFullCheckSecs += AVAILABILITY_CHECK_TIMEOUT; + return true; + } + this.lastFullCheckSecs = 0; + return false; + } + } +} diff --git a/raven/src/main/java/org/opencadc/raven/StorageSiteRule.java b/cadc-inventory-server/src/main/java/org/opencadc/inventory/transfer/StorageSiteRule.java similarity index 97% rename from raven/src/main/java/org/opencadc/raven/StorageSiteRule.java rename to cadc-inventory-server/src/main/java/org/opencadc/inventory/transfer/StorageSiteRule.java index 5e2d73146..fbdba6bab 100644 --- a/raven/src/main/java/org/opencadc/raven/StorageSiteRule.java +++ b/cadc-inventory-server/src/main/java/org/opencadc/inventory/transfer/StorageSiteRule.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * - * (c) 2021. (c) 2021. + * (c) 2023. (c) 2023. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -67,10 +67,9 @@ ************************************************************************ */ -package org.opencadc.raven; +package org.opencadc.inventory.transfer; import java.util.List; - import org.opencadc.inventory.Namespace; public class StorageSiteRule { diff --git a/raven/src/test/java/org/opencadc/raven/ProtocolsGeneratorTest.java b/cadc-inventory-server/src/test/java/org/opencadc/inventory/transfer/ProtocolsGeneratorTest.java similarity index 95% rename from raven/src/test/java/org/opencadc/raven/ProtocolsGeneratorTest.java rename to cadc-inventory-server/src/test/java/org/opencadc/inventory/transfer/ProtocolsGeneratorTest.java index 0c42f6ddc..190aad14c 100644 --- a/raven/src/test/java/org/opencadc/raven/ProtocolsGeneratorTest.java +++ b/cadc-inventory-server/src/test/java/org/opencadc/inventory/transfer/ProtocolsGeneratorTest.java @@ -66,20 +66,19 @@ */ -package org.opencadc.raven; +package org.opencadc.inventory.transfer; import ca.nrc.cadc.util.Log4jInit; - import java.net.InetAddress; import java.net.URI; import java.util.ArrayList; import java.util.HashMap; +import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Random; import java.util.SortedSet; import java.util.TreeSet; - import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.junit.Assert; @@ -101,10 +100,16 @@ public void testPrioritizePullFromSites() throws Exception { for (int i = 0; i < 10; i++) { sites.add(new StorageSite(URI.create("ivo://site" + i), "site1" + i, true, rd.nextBoolean())); } - ProtocolsGenerator.prioritizePullFromSites(sites); - for (StorageSite s : sites) { - log.info("found: " + s.getID() + " aka " + s.getResourceID()); - } + List result1 = ProtocolsGenerator.prioritizePullFromSites(sites); + Assert.assertEquals(sites.size(), result1.size()); + Assert.assertTrue(result1.containsAll(sites)); + + List result2 = ProtocolsGenerator.prioritizePullFromSites(sites); + Assert.assertEquals(sites.size(), result2.size()); + Assert.assertTrue(result2.containsAll(sites)); + + // test random order + Assert.assertNotEquals(result1, result2); } @Test diff --git a/cadc-inventory/build.gradle b/cadc-inventory/build.gradle index 1019d3295..cfe772697 100644 --- a/cadc-inventory/build.gradle +++ b/cadc-inventory/build.gradle @@ -14,13 +14,13 @@ sourceCompatibility = 1.8 group = 'org.opencadc' -version = '0.9.4' +version = '1.0.0' description = 'OpenCADC Storage Inventory core library' def git_url = 'https://github.com/opencadc/storage-inventory' dependencies { - compile 'org.opencadc:cadc-util:[1.9.5,2.0)' + compile 'org.opencadc:cadc-util:[1.11.0,2.0)' testCompile 'junit:junit:[4.0,)' } diff --git a/cadc-inventory/src/main/java/org/opencadc/inventory/Entity.java b/cadc-inventory/src/main/java/org/opencadc/inventory/Entity.java index 8cdda73a9..ae7c8b535 100644 --- a/cadc-inventory/src/main/java/org/opencadc/inventory/Entity.java +++ b/cadc-inventory/src/main/java/org/opencadc/inventory/Entity.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * -* (c) 2023. (c) 2023. +* (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -75,12 +75,18 @@ * @author pdowler */ public abstract class Entity extends org.opencadc.persist.Entity { - + // Entity metaChecksum algorithm setup: DO NOT CHANGE + private static final boolean ENTITY_TRUNCATE_DATES = false; + // this was the default behaviour when SI was deployed + // operationally and the model is not currently vulnerable + // to "value shifting" bugs so it is still OK + private static final boolean ENTITY_DIGEST_FIELD_NAMES = false; + public Entity() { - super(false); + super(ENTITY_TRUNCATE_DATES, ENTITY_DIGEST_FIELD_NAMES); } public Entity(UUID id) { - super(id, false); + super(id, ENTITY_TRUNCATE_DATES, ENTITY_DIGEST_FIELD_NAMES); } } diff --git a/cadc-inventory/src/main/java/org/opencadc/inventory/InventoryUtil.java b/cadc-inventory/src/main/java/org/opencadc/inventory/InventoryUtil.java index a302179a3..c3c3704ab 100644 --- a/cadc-inventory/src/main/java/org/opencadc/inventory/InventoryUtil.java +++ b/cadc-inventory/src/main/java/org/opencadc/inventory/InventoryUtil.java @@ -68,6 +68,7 @@ package org.opencadc.inventory; import ca.nrc.cadc.util.HexUtil; +import java.io.UnsupportedEncodingException; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; @@ -129,28 +130,18 @@ public static Boolean isRemoteWinner(Artifact local, Artifact remote) { public static String computeBucket(URI uri, int length) { try { MessageDigest md = MessageDigest.getInstance("SHA-1"); - byte[] bytes = new DummyEntity().primitiveValueToBytes(uri, "Artifact.uri", md.getAlgorithm()); + byte[] bytes = uri.toASCIIString().trim().getBytes("UTF-8"); md.update(bytes); byte[] sha = md.digest(); String hex = HexUtil.toHex(sha); return hex.substring(0, length); } catch (NoSuchAlgorithmException ex) { throw new RuntimeException("BUG: failed to get instance of SHA-1", ex); + } catch (UnsupportedEncodingException ex) { + throw new RuntimeException("BUG: failed to encode String in UTF-8", ex); } } - // make primitiveValueToBytes usable in computeBucket above - private static class DummyEntity extends org.opencadc.inventory.Entity { - DummyEntity() { - super(); - } - - @Override - protected byte[] primitiveValueToBytes(Object o, String name, String digestAlg) { - return super.primitiveValueToBytes(o, name, digestAlg); - } - } - /** * Compute the filename of an artifact URI. * @param uri The uri to parse @@ -384,6 +375,7 @@ public static void assertValidPathComponent(Class caller, String name, String te boolean slash = (test.indexOf('/') >= 0); boolean escape = (test.indexOf('\\') >= 0); boolean percent = (test.indexOf('%') >= 0); + boolean colon = (test.indexOf(':') >= 0); boolean semic = (test.indexOf(';') >= 0); boolean amp = (test.indexOf('&') >= 0); boolean dollar = (test.indexOf('$') >= 0); @@ -391,14 +383,14 @@ public static void assertValidPathComponent(Class caller, String name, String te boolean sqopen = (test.indexOf('[') >= 0); boolean sqclose = (test.indexOf(']') >= 0); - if (space || slash || escape || percent || semic || amp || dollar || question || sqopen || sqclose) { + if (space || slash || escape || percent || colon || semic || amp || dollar || question || sqopen || sqclose) { String s = "invalid "; if (caller != null) { s += caller.getSimpleName() + "."; } throw new IllegalArgumentException(s + name + ": " + test + " reason: path component may not contain space ( ), slash (/), escape (\\), percent (%)," - + " semi-colon (;), ampersand (&), or dollar ($), question (?), or square brackets ([])"); + + " colon (:), semi-colon (;), ampersand (&), dollar ($), question (?), or square brackets ([])"); } } diff --git a/cadc-inventory/src/test/java/org/opencadc/inventory/EntityTest.java b/cadc-inventory/src/test/java/org/opencadc/inventory/EntityTest.java index 4bd396e50..938e2dbee 100644 --- a/cadc-inventory/src/test/java/org/opencadc/inventory/EntityTest.java +++ b/cadc-inventory/src/test/java/org/opencadc/inventory/EntityTest.java @@ -68,7 +68,12 @@ package org.opencadc.inventory; import ca.nrc.cadc.date.DateUtil; +import ca.nrc.cadc.util.FileUtil; import ca.nrc.cadc.util.Log4jInit; +import java.io.File; +import java.io.FileReader; +import java.io.LineNumberReader; +import java.io.PrintWriter; import java.net.URI; import java.security.MessageDigest; import java.text.DateFormat; @@ -217,6 +222,85 @@ public void testArtifactTransientState() { } } + @Test + public void testStableArtifactChecksum() { + URI uri = URI.create("cadc:FOO/bar"); + URI contentChecksum = URI.create("md5:d41d8cd98f00b204e9800998ecf8427e"); + Date contentLastModified = new Date(); + Long contentLength = 1024L; + + try { + + if (false) { + // generate a sample artifact from current code + Artifact cur = new Artifact(uri, contentChecksum, contentLastModified, contentLength); + cur.contentEncoding = "gzip"; + cur.contentType = "text/plain"; + log.info("created: " + cur); + final URI mcs1 = cur.computeMetaChecksum(MessageDigest.getInstance("MD5")); + + StringBuilder sb = new StringBuilder(); + sb.append("uri\tcontentChecksum\tcontentLastModified\tcontentLength\tcontentEncoding\tcontentType\tid\tmetaChecksum\n"); + sb.append(toTSV(cur, mcs1)); + + File out = new File("build/tmp/sample-artifact.tsv"); + PrintWriter w = new PrintWriter(out); + w.println(sb.toString()); + w.close(); + log.info("new sample artifact: " + out.getPath()); + } + + // check that meta checksum of previous samples is stable + final DateFormat df = DateUtil.getDateFormat(DateUtil.IVOA_DATE_FORMAT, DateUtil.UTC); + for (String fname : new String[] {"sample-artifact.tsv", "sample-iris.tsv" }) { + File in = FileUtil.getFileFromResource(fname, EntityTest.class); + log.info("checking: " + in.getPath()); + + LineNumberReader r = new LineNumberReader(new FileReader(in)); + String line = r.readLine(); + log.info("header: " + line); + line = r.readLine(); + String[] ss = line.split("[\t]"); + log.info("IN:\n" + line); + + URI suri = URI.create(ss[0]); + URI ccs = URI.create(ss[1]); + Date clm = df.parse(ss[2]); + Long clen = Long.parseLong(ss[3]); + String cenc = ss[4]; + String ctype = ss[5]; + UUID id = UUID.fromString(ss[6]); + Artifact actual = new Artifact(id, suri, ccs, clm, clen); + actual.contentEncoding = cenc; + actual.contentType = ctype; + URI metaChecksum = URI.create(ss[7]); + + URI recomp = actual.computeMetaChecksum(MessageDigest.getInstance("MD5")); + log.info("RE:\n" + toTSV(actual, recomp)); + + Assert.assertEquals(in.getName(), metaChecksum, recomp); + } + + } catch (Exception ex) { + log.error("unexpected exception", ex); + Assert.fail("unexpected exception: " + ex); + } + } + + private String toTSV(Artifact cur, URI mcs) { + DateFormat df = DateUtil.getDateFormat(DateUtil.IVOA_DATE_FORMAT, DateUtil.UTC); + StringBuilder sb = new StringBuilder(); + sb.append(cur.getURI().toASCIIString()).append("\t"); + sb.append(cur.getContentChecksum().toASCIIString()).append("\t"); + sb.append(df.format(cur.getContentLastModified())).append("\t"); + sb.append(cur.getContentLength()).append("\t"); + sb.append(cur.contentEncoding).append("\t"); + sb.append(cur.contentType).append("\t"); + sb.append(cur.getID().toString()).append("\t"); + sb.append(mcs.toASCIIString()); + return sb.toString(); + } + @Test public void testDeletedArtifactEvent() { try { diff --git a/cadc-inventory/src/test/resources/dummy b/cadc-inventory/src/test/resources/dummy deleted file mode 100644 index e92bbff87..000000000 --- a/cadc-inventory/src/test/resources/dummy +++ /dev/null @@ -1 +0,0 @@ -dummy file so gradle will make dir build/resources/test on build diff --git a/cadc-inventory/src/test/resources/sample-artifact.tsv b/cadc-inventory/src/test/resources/sample-artifact.tsv new file mode 100644 index 000000000..9b5d9621c --- /dev/null +++ b/cadc-inventory/src/test/resources/sample-artifact.tsv @@ -0,0 +1,2 @@ +uri contentChecksum contentLastModified contentLength contentEncoding contentType id metaChecksum +cadc:FOO/bar md5:d41d8cd98f00b204e9800998ecf8427e 2024-02-14T19:02:31.459 1024 gzip text/plain 9b053ecf-4f3f-45f0-934b-8b32be42b6bf md5:450e68e0536f9c0cf78452382c9db9f4 diff --git a/cadc-inventory/src/test/resources/sample-iris.tsv b/cadc-inventory/src/test/resources/sample-iris.tsv new file mode 100644 index 000000000..72b1feaac --- /dev/null +++ b/cadc-inventory/src/test/resources/sample-iris.tsv @@ -0,0 +1,2 @@ +uri contentChecksum contentLastModified contentLength contentEncoding contentType id metaChecksum +cadc:IRIS/I001B1H0.fits md5:b6ead425ae84289246e4528bbdd7da9a 2006-07-25T16:15:19.000 1008000 application/fits 9b53914f-1465-4440-94fa-2871b8532fca md5:daf96da09f157ff65272118ebb2ec589 diff --git a/critwall/VERSION b/critwall/VERSION index ba99f57c6..d849b8f34 100644 --- a/critwall/VERSION +++ b/critwall/VERSION @@ -1,6 +1,6 @@ ## deployable containers have a semantic and build tag # semantic version tag: major.minor[.patch] # build version tag: timestamp -VER=0.4.5 +VER=1.0.0 TAGS="${VER} ${VER}-$(date --utc +"%Y%m%dT%H%M%S")" unset VER diff --git a/critwall/build.gradle b/critwall/build.gradle index 14aba0e89..83716cb15 100644 --- a/critwall/build.gradle +++ b/critwall/build.gradle @@ -21,12 +21,11 @@ mainClassName = 'org.opencadc.critwall.Main' dependencies { compile 'org.opencadc:cadc-storage-adapter:[0.8,1.0)' compile 'org.opencadc:cadc-util:[1.10.2,2.0)' - compile 'org.opencadc:cadc-inventory:[0.9.4,2.0)' - // cadc-inventory-db-0.15 is in the vos2 feature branch - compile 'org.opencadc:cadc-inventory-db:[0.14.5,0.15.0)' + compile 'org.opencadc:cadc-inventory:[1.0,2.0)' + compile 'org.opencadc:cadc-inventory-db:[1.0,2.0)' compile 'org.opencadc:cadc-registry:[1.7,2.0)' compile 'org.opencadc:cadc-vosi:[1.3.6,2.0)' - compile 'org.opencadc:cadc-vos:[1.2,2.0)' + compile 'org.opencadc:cadc-vos:[2.0,)' runtime 'org.opencadc:cadc-storage-adapter-fs:[0.7,)' runtime 'org.opencadc:cadc-storage-adapter-swift:[0.6,)' diff --git a/critwall/src/intTest/java/org/opencadc/critwall/FileSyncJobTest.java b/critwall/src/intTest/java/org/opencadc/critwall/FileSyncJobTest.java index ed3a8edcb..54ddfeee3 100644 --- a/critwall/src/intTest/java/org/opencadc/critwall/FileSyncJobTest.java +++ b/critwall/src/intTest/java/org/opencadc/critwall/FileSyncJobTest.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * - * (c) 2021. (c) 2021. + * (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -133,8 +133,9 @@ public FileSyncJobTest() throws Exception { Map config = new TreeMap(); config.put(SQLGenerator.class.getName(), SQLGenerator.class); config.put("jndiDataSourceName", "jdbc/FileSyncJobTest"); - config.put("database", TestUtil.DATABASE); - config.put("schema", TestUtil.SCHEMA); + //config.put("database", TestUtil.DATABASE); + config.put("invSchema", TestUtil.SCHEMA); + config.put("genSchema", TestUtil.SCHEMA); dao.setConfig(config); String testDir = TEST_ROOT + File.separator + "testValidJob"; diff --git a/critwall/src/intTest/java/org/opencadc/critwall/FileSyncTest.java b/critwall/src/intTest/java/org/opencadc/critwall/FileSyncTest.java index e9a24e095..170a828ac 100644 --- a/critwall/src/intTest/java/org/opencadc/critwall/FileSyncTest.java +++ b/critwall/src/intTest/java/org/opencadc/critwall/FileSyncTest.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * - * (c) 2020. (c) 2020. + * (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -133,8 +133,9 @@ public FileSyncTest() throws Exception { daoConfig.put(SQLGenerator.class.getName(), SQLGenerator.class); daoConfig.put("jndiDataSourceName", "jdbc/FileSyncTest"); - daoConfig.put("database", TestUtil.DATABASE); - daoConfig.put("schema", TestUtil.SCHEMA); + //daoConfig.put("database", TestUtil.DATABASE); + daoConfig.put("invSchema", TestUtil.SCHEMA); + daoConfig.put("genSchema", TestUtil.SCHEMA); dao.setConfig(daoConfig); String testDir = TEST_ROOT + File.separator + "testFileSync"; diff --git a/critwall/src/main/java/org/opencadc/critwall/FileSync.java b/critwall/src/main/java/org/opencadc/critwall/FileSync.java index 797f55571..231564e9c 100644 --- a/critwall/src/main/java/org/opencadc/critwall/FileSync.java +++ b/critwall/src/main/java/org/opencadc/critwall/FileSync.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * -* (c) 2020. (c) 2020. +* (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -91,7 +91,7 @@ import org.opencadc.inventory.Artifact; import org.opencadc.inventory.InventoryUtil; import org.opencadc.inventory.db.ArtifactDAO; -import org.opencadc.inventory.db.version.InitDatabase; +import org.opencadc.inventory.db.version.InitDatabaseSI; import org.opencadc.inventory.storage.StorageAdapter; @@ -170,9 +170,9 @@ public FileSync(Map daoConfig, ConnectionConfig connectionConfig try { String database = null; // unused (String) daoConfig.get("database"); - String schema = (String) daoConfig.get("schema"); + String schema = (String) daoConfig.get("invSchema"); DataSource ds = ca.nrc.cadc.db.DBUtil.findJNDIDataSource(fileSyncDS); - InitDatabase init = new InitDatabase(ds, database, schema); + InitDatabaseSI init = new InitDatabaseSI(ds, database, schema); init.doInit(); log.info("initDatabase: " + schema + " OK"); } catch (Exception ex) { diff --git a/critwall/src/main/java/org/opencadc/critwall/FileSyncJob.java b/critwall/src/main/java/org/opencadc/critwall/FileSyncJob.java index a83e9381b..20c6c3732 100644 --- a/critwall/src/main/java/org/opencadc/critwall/FileSyncJob.java +++ b/critwall/src/main/java/org/opencadc/critwall/FileSyncJob.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * - * (c) 2022. (c) 2022. + * (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -84,13 +84,6 @@ import ca.nrc.cadc.net.TransientException; import ca.nrc.cadc.reg.Standards; import ca.nrc.cadc.reg.client.RegistryClient; -import ca.nrc.cadc.vos.Direction; -import ca.nrc.cadc.vos.Protocol; -import ca.nrc.cadc.vos.Transfer; -import ca.nrc.cadc.vos.TransferParsingException; -import ca.nrc.cadc.vos.TransferReader; -import ca.nrc.cadc.vos.TransferWriter; -import ca.nrc.cadc.vos.VOS; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.net.MalformedURLException; @@ -118,6 +111,13 @@ import org.opencadc.inventory.storage.StorageAdapter; import org.opencadc.inventory.storage.StorageEngageException; import org.opencadc.inventory.storage.StorageMetadata; +import org.opencadc.vospace.VOS; +import org.opencadc.vospace.transfer.Direction; +import org.opencadc.vospace.transfer.Protocol; +import org.opencadc.vospace.transfer.Transfer; +import org.opencadc.vospace.transfer.TransferParsingException; +import org.opencadc.vospace.transfer.TransferReader; +import org.opencadc.vospace.transfer.TransferWriter; /** * Single file sync instance. diff --git a/critwall/src/main/java/org/opencadc/critwall/Main.java b/critwall/src/main/java/org/opencadc/critwall/Main.java index f9af65789..dc4c7a8fd 100644 --- a/critwall/src/main/java/org/opencadc/critwall/Main.java +++ b/critwall/src/main/java/org/opencadc/critwall/Main.java @@ -187,7 +187,8 @@ public static void main(String[] args) { // populate/assign values to pass to FileSync Map daoConfig = new TreeMap<>(); - daoConfig.put("schema", schema); + daoConfig.put("invSchema", schema); + daoConfig.put("genSchema", schema); // needed for correct init try { daoConfig.put(SQLGENERATOR_CONFIG_KEY, Class.forName(generatorName)); diff --git a/fenwick/VERSION b/fenwick/VERSION index fed2d5959..38b1547ae 100644 --- a/fenwick/VERSION +++ b/fenwick/VERSION @@ -4,6 +4,6 @@ # tags with and without build number so operators use the versioned # tag but we always keep a timestamped tag in case a semantic tag gets # replaced accidentally -VER=0.5.7 +VER=1.0.0 TAGS="${VER} ${VER}-$(date --utc +"%Y%m%dT%H%M%S")" unset VER diff --git a/fenwick/build.gradle b/fenwick/build.gradle index f61aa5320..f7feee09f 100644 --- a/fenwick/build.gradle +++ b/fenwick/build.gradle @@ -16,9 +16,8 @@ group = 'org.opencadc' dependencies { compile 'org.opencadc:cadc-util:[1.10.2,2.0)' - compile 'org.opencadc:cadc-inventory:[0.9.4,2.0)' - // temporarily limit this lib because cadc-inventory-db-0.15.0 is from the vos2 feature branch - compile 'org.opencadc:cadc-inventory-db:[0.14.5,0.15)' + compile 'org.opencadc:cadc-inventory:[1.0,2.0)' + compile 'org.opencadc:cadc-inventory-db:[1.0,2.0)' compile 'org.opencadc:cadc-inventory-util:[0.1.8,1.0)' compile 'org.opencadc:cadc-registry:[1.5,2.0)' compile 'org.opencadc:cadc-tap:[1.1.14,1.2)' // 1.2 upper bound is correct #reasons diff --git a/fenwick/src/intTest/java/org/opencadc/fenwick/InventoryEnvironment.java b/fenwick/src/intTest/java/org/opencadc/fenwick/InventoryEnvironment.java index 6902637ae..71568adc6 100644 --- a/fenwick/src/intTest/java/org/opencadc/fenwick/InventoryEnvironment.java +++ b/fenwick/src/intTest/java/org/opencadc/fenwick/InventoryEnvironment.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * - * (c) 2020. (c) 2020. + * (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -81,7 +81,7 @@ import org.opencadc.inventory.db.SQLGenerator; import org.opencadc.inventory.db.StorageLocationEventDAO; import org.opencadc.inventory.db.StorageSiteDAO; -import org.opencadc.inventory.db.version.InitDatabase; +import org.opencadc.inventory.db.version.InitDatabaseSI; import org.springframework.jdbc.core.JdbcTemplate; public class InventoryEnvironment { @@ -102,8 +102,9 @@ public InventoryEnvironment() throws Exception { final DBConfig dbrc = new DBConfig(); connectionConfig = dbrc.getConnectionConfig(TestUtil.INVENTORY_SERVER, TestUtil.INVENTORY_DATABASE); daoConfig.put(SQLGenerator.class.getName(), SQLGenerator.class); - daoConfig.put("database", TestUtil.INVENTORY_DATABASE); - daoConfig.put("schema", TestUtil.INVENTORY_SCHEMA); + //daoConfig.put("database", TestUtil.INVENTORY_DATABASE); + daoConfig.put("invSchema", TestUtil.INVENTORY_SCHEMA); + daoConfig.put("genSchema", TestUtil.INVENTORY_SCHEMA); // connectionConfig and daoConfig used by InventoryHarvester to create it's own datasource Map testConfig = new TreeMap<>(); @@ -113,8 +114,9 @@ public InventoryEnvironment() throws Exception { testConfig.put(SQLGenerator.class.getName(), SQLGenerator.class); testConfig.put("jndiDataSourceName", jndiPath); - testConfig.put("database", TestUtil.INVENTORY_DATABASE); - testConfig.put("schema", TestUtil.INVENTORY_SCHEMA); + //testConfig.put("database", TestUtil.INVENTORY_DATABASE); + testConfig.put("invSchema", TestUtil.INVENTORY_SCHEMA); + testConfig.put("genSchema", TestUtil.INVENTORY_SCHEMA); storageSiteDAO.setConfig(testConfig); artifactDAO.setConfig(testConfig); @@ -123,9 +125,9 @@ public InventoryEnvironment() throws Exception { deletedStorageLocationEventDAO.setConfig(testConfig); harvestStateDAO.setConfig(testConfig); - new InitDatabase(DBUtil.findJNDIDataSource(jndiPath), + new InitDatabaseSI(DBUtil.findJNDIDataSource(jndiPath), (String) daoConfig.get("database"), - (String) daoConfig.get("schema")).doInit(); + (String) daoConfig.get("invSchema")).doInit(); } void cleanTestEnvironment() throws Exception { diff --git a/fenwick/src/intTest/java/org/opencadc/fenwick/LuskanEnvironment.java b/fenwick/src/intTest/java/org/opencadc/fenwick/LuskanEnvironment.java index ae9f1c25f..ef882cc20 100644 --- a/fenwick/src/intTest/java/org/opencadc/fenwick/LuskanEnvironment.java +++ b/fenwick/src/intTest/java/org/opencadc/fenwick/LuskanEnvironment.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * - * (c) 2020. (c) 2020. + * (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -80,7 +80,7 @@ import org.opencadc.inventory.db.SQLGenerator; import org.opencadc.inventory.db.StorageLocationEventDAO; import org.opencadc.inventory.db.StorageSiteDAO; -import org.opencadc.inventory.db.version.InitDatabase; +import org.opencadc.inventory.db.version.InitDatabaseSI; import org.springframework.jdbc.core.JdbcTemplate; @@ -106,8 +106,9 @@ public LuskanEnvironment() throws Exception { final Map daoConfig = new TreeMap<>(); daoConfig.put(SQLGenerator.class.getName(), SQLGenerator.class); daoConfig.put("jndiDataSourceName", jndiPath); - daoConfig.put("database", TestUtil.LUSKAN_DATABASE); - daoConfig.put("schema", TestUtil.LUSKAN_SCHEMA); + //daoConfig.put("database", TestUtil.LUSKAN_DATABASE); + daoConfig.put("invSchema", TestUtil.LUSKAN_SCHEMA); + daoConfig.put("genSchema", TestUtil.LUSKAN_SCHEMA); storageSiteDAO.setConfig(daoConfig); artifactDAO.setConfig(daoConfig); @@ -115,9 +116,9 @@ public LuskanEnvironment() throws Exception { deletedArtifactEventDAO.setConfig(daoConfig); deletedStorageLocationEventDAO.setConfig(daoConfig); - new InitDatabase(DBUtil.findJNDIDataSource(jndiPath), + new InitDatabaseSI(DBUtil.findJNDIDataSource(jndiPath), (String) daoConfig.get("database"), - (String) daoConfig.get("schema")).doInit(); + (String) daoConfig.get("invSchema")).doInit(); } void cleanTestEnvironment() throws Exception { diff --git a/fenwick/src/main/java/org/opencadc/fenwick/InventoryHarvester.java b/fenwick/src/main/java/org/opencadc/fenwick/InventoryHarvester.java index 157c9dba9..fc827b964 100644 --- a/fenwick/src/main/java/org/opencadc/fenwick/InventoryHarvester.java +++ b/fenwick/src/main/java/org/opencadc/fenwick/InventoryHarvester.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * -* (c) 2022. (c) 2022. +* (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -86,7 +86,7 @@ import org.opencadc.inventory.StorageSite; import org.opencadc.inventory.db.ArtifactDAO; import org.opencadc.inventory.db.StorageSiteDAO; -import org.opencadc.inventory.db.version.InitDatabase; +import org.opencadc.inventory.db.version.InitDatabaseSI; import org.opencadc.inventory.util.ArtifactSelector; /** @@ -155,9 +155,9 @@ public InventoryHarvester(Map daoConfig, ConnectionConfig connec this.artifactDAO = new ArtifactDAO(storageSiteDAO); String database = (String) dconf.get("database"); - String schema = (String) dconf.get("schema"); + String schema = (String) dconf.get("invSchema"); DataSource ds = DBUtil.findJNDIDataSource(dsname); - InitDatabase init = new InitDatabase(ds, database, schema); + InitDatabaseSI init = new InitDatabaseSI(ds, database, schema); init.doInit(); log.info("initDatabase: " + schema + " OK"); diff --git a/fenwick/src/main/java/org/opencadc/fenwick/Main.java b/fenwick/src/main/java/org/opencadc/fenwick/Main.java index a1dcfea12..15670ed78 100644 --- a/fenwick/src/main/java/org/opencadc/fenwick/Main.java +++ b/fenwick/src/main/java/org/opencadc/fenwick/Main.java @@ -160,7 +160,8 @@ public static void main(final String[] args) { final String password = props.getFirstPropertyValue(DB_PASSWORD_CONFIG_KEY); final String dbUrl = props.getFirstPropertyValue(DB_URL_CONFIG_KEY); - daoConfig.put("schema", props.getFirstPropertyValue(DB_SCHEMA_CONFIG_KEY)); + daoConfig.put("invSchema", props.getFirstPropertyValue(DB_SCHEMA_CONFIG_KEY)); + daoConfig.put("genSchema", props.getFirstPropertyValue(DB_SCHEMA_CONFIG_KEY)); final ConnectionConfig cc = new ConnectionConfig(null, null, username, password, "org.postgresql.Driver", dbUrl); diff --git a/luskan/README.md b/luskan/README.md index a436ec89f..90f4ccdf9 100644 --- a/luskan/README.md +++ b/luskan/README.md @@ -63,12 +63,9 @@ See cadc-reg ### cadc-tap-tmp.properties Temporary storage of async results is now handled by the -[cadc-tap-tmp](https://github.com/opencadc/tap/tree/master/cadc-tap-tmp) library. This -library should be configured as follows: -``` -org.opencadc.tap.tmp.TempStorageManager.baseURL = https://{server name}/{luskan path}/results -org.opencadc.tap.tmp.TempStorageManager.baseStorageDir = {local directory} -``` +[cadc-tap-tmp](https://github.com/opencadc/tap/tree/master/cadc-tap-tmp) library. `luskan` is configured +internally to use the `DelegatingStorageManager` to the config file must also specify the storage manager +to use. ### luskan.properties ``` @@ -103,6 +100,14 @@ org.opencadc.luskan.uwsRollover = 180 ``` Assuming instances are restarted regularly, this would cause rollover approximatelty once every 6 months. +### cadc-log.properties (optional) +See cadc-log for common +dynamic logging control. + +### cadc-vosi.properties (optional) +See cadc-vosi for common +service state control. + ### cadcproxy.pem (optional) This client certificate is used to make authenticated server-to-server calls for system-level A&A purposes. diff --git a/luskan/VERSION b/luskan/VERSION index e894843e4..2b8664584 100644 --- a/luskan/VERSION +++ b/luskan/VERSION @@ -1,6 +1,6 @@ ## deployable containers have a semantic and build tag # semantic version tag: major.minor # build version tag: timestamp -VER=0.6.8 +VER=1.0.0 TAGS="${VER} ${VER}-$(date --utc +"%Y%m%dT%H%M%S")" unset VER diff --git a/luskan/src/main/resources/PluginFactory.properties b/luskan/src/main/resources/PluginFactory.properties index 3fca85cde..f8f75cc5a 100644 --- a/luskan/src/main/resources/PluginFactory.properties +++ b/luskan/src/main/resources/PluginFactory.properties @@ -19,10 +19,8 @@ ca.nrc.cadc.tap.db.DatabaseDataType=ca.nrc.cadc.tap.pg.PostgresDataTypeMapper ca.nrc.cadc.tap.writer.format.FormatFactory = org.opencadc.luskan.tap.FormatFactoryImpl -## currently need to pick this at compile -## TODO: modify cadc-tap-server to find this config file at runtime -ca.nrc.cadc.tap.ResultStore = org.opencadc.tap.tmp.TempStorageManager -#ca.nrc.cadc.tap.ResultStore = org.opencadc.tap.tmp.HttpStorageManager +# use cadc-tap-tmp +ca.nrc.cadc.tap.ResultStore = org.opencadc.tap.tmp.DelegatingStorageManager ca.nrc.cadc.tap.schema.TapSchemaDAO = org.opencadc.luskan.tap.TapSchemaDAOImpl diff --git a/minoc/README.md b/minoc/README.md index d4c09685d..99831467e 100644 --- a/minoc/README.md +++ b/minoc/README.md @@ -74,21 +74,43 @@ currently must be "inventory" due to configuration limitations in raven. +The optional _trust.preauth_ key(s) configure `minoc` to trust external service(s) to have performed +authorization checks. Such services may include a signed token in the URL and `minoc` will validate +the request using a public key retrieved from the service instead of performing authorization checks +itself. Currently, only `raven` and `vault` can generate such URLs and provide access to their +public keys. Example: +``` +# trust a SI global inventory +org.opencadc.minoc.trust.preauth = ivo://example.net/raven + +# trust a SI VOSpace service +org.opencadc.minoc.trust.preauth = ivo://example.net/vault +``` +Setting _trust.preauth_ one or more times implicitly sets _readable_ and _writable_ to _true_. + +The optional _readGrantProvider_ and _writeGrantProvider_ keys configure minoc to call other services to +get grants (permissions) for operations. Multiple values of the granting service resourceID(s) may be provided +by including multiple property settings (one per line). All services will be consulted but a single positive +result is sufficient to grant permission for an action. Setting these values implicitly sets _readable_ +and _writable_ to _true_ respectively. -The optional _readGrantProvider_ and _writeGrantProvider_ keys configure minoc to call other services to get grants (permissions) for -operations. Multiple values of the granting service resourceID(s) may be provided by including multiple property -settings (one per line). All services will be consulted but a single positive result is sufficient to grant permission for an -action. +The optional _readable_ and _writable_ keys configure minoc explicitly rather than relying on one or more of +the above trust or grant provider settings. For example, this allows one to configure a read-only minoc +(_writable_ = false) that trusts other services to do the authorization checks. The optional _recoverableNamespace_ key causes `minoc` to configure the storage adapter so that deletions preserve the file content in a recoverable state. This generally means that storage space remains in use @@ -110,7 +132,6 @@ prefixes will be recoverable. Others (e.g. `test:FOO/bar`) will be permanently d Note: Since artifact and stored object deletion can also be performed by the `tantar` file validation tool, all instances of `minoc` and `tantar` that use the same inventory and storage adapter should use the same _recoverableNamespace_ configuration so that preservation and recovery (from mistakes) is consistent. - --- **For developer testing only:** To disable authorization checking (via `readGrantProvider` or `writeGrantProvider` services), add the following configuration entry to minoc.properties: @@ -120,14 +141,13 @@ org.opencadc.minoc.authenticateOnly=true With `authenticateOnly=true`, any authenticated user will be able to read/write/delete files and anonymous users will be able to read files. -### minoc-availability.properties (optional) -The minoc-availability.properties file specifies which users have the authority to change the availability state of the minoc service. Each entry consists of a key=value pair. The key is always "users". The value is the x500 canonical user name. +### cadc-log.properties (optional) +See cadc-log for common +dynamic logging control. -Example: -``` -users = {user identity} -``` -`users` specifies the user(s) who are authorized to make calls to the service. The value is a list of user identities (X500 distingushed name), one line per user. Optional: if the `minoc-availability.properties` is not found or does not list any `users`, the service will function in the default mode (ReadWrite) and the state will not be changeable. +### cadc-vosi.properties (optional) +See cadc-vosi for common +service state control. ### cadcproxy.pem (optional) This client certificate is used to make authenticated server-to-server calls for system-level A&A purposes. @@ -148,15 +168,3 @@ docker run --rm -it minoc:latest /bin/bash docker run --rm --user tomcat:tomcat --volume=/path/to/external/config:/config:ro --name minoc minoc:latest ``` -## using it - -Using `cURL` is possible with Minoc to put a file for testing. - -**Note:** The `content-type` header is important! -```bash -$ curl -v -X PUT \ - --header "content-type: application/fits" \ - --data-binary @myfile.fits \ - -E ~/.ssl/cadcproxy.pem \ - https://myhost.com/minoc/files/test:TEST/myfile.fits -``` diff --git a/minoc/VERSION b/minoc/VERSION index a8d5aa3a2..3d4fab565 100644 --- a/minoc/VERSION +++ b/minoc/VERSION @@ -4,6 +4,6 @@ # tags with and without build number so operators use the versioned # tag but we always keep a timestamped tag in case a semantic tag gets # replaced accidentally -VER=0.9.10 +VER=1.0.0 TAGS="${VER} ${VER}-$(date --utc +"%Y%m%dT%H%M%S")" unset VER diff --git a/minoc/build.gradle b/minoc/build.gradle index 45705d4f2..add0a28c7 100644 --- a/minoc/build.gradle +++ b/minoc/build.gradle @@ -34,8 +34,8 @@ dependencies { compile 'org.opencadc:cadc-cdp:[1.0,)' compile 'org.opencadc:cadc-data-ops-fits:[0.3.0,)' compile 'org.opencadc:cadc-gms:[1.0.0,)' - compile 'org.opencadc:cadc-inventory:[0.9.4,2.0)' - compile 'org.opencadc:cadc-inventory-db:[0.14.5,0.15)' + compile 'org.opencadc:cadc-inventory:[1.0,2.0)' + compile 'org.opencadc:cadc-inventory-db:[1.0,2.0)' compile 'org.opencadc:cadc-inventory-server:[0.2.1,)' compile 'org.opencadc:cadc-soda-server:[1.2.0,2.0.0)' compile 'org.opencadc:cadc-storage-adapter:[0.11.2,)' diff --git a/minoc/src/intTest/java/org/opencadc/minoc/BasicOpsTest.java b/minoc/src/intTest/java/org/opencadc/minoc/BasicOpsTest.java index d13de5e76..36a6552fd 100644 --- a/minoc/src/intTest/java/org/opencadc/minoc/BasicOpsTest.java +++ b/minoc/src/intTest/java/org/opencadc/minoc/BasicOpsTest.java @@ -146,10 +146,12 @@ public void testPutGetUpdateHeadDelete() { long contentLength = get.getContentLength(); String contentType = get.getContentType(); String contentEncoding = get.getContentEncoding(); + String contentDisposition = get.getResponseHeader("content-disposition"); Assert.assertEquals(computeChecksumURI(data), checksumURI); Assert.assertEquals(data.length, contentLength); Assert.assertEquals(type, contentType); Assert.assertEquals(encoding, contentEncoding); + Assert.assertTrue(contentDisposition.contains("filename=") && contentDisposition.contains("file.txt")); Date lastModified = get.getLastModified(); Assert.assertNotNull(lastModified); @@ -181,10 +183,12 @@ public void testPutGetUpdateHeadDelete() { contentLength = head.getContentLength(); contentType = head.getContentType(); contentEncoding = head.getContentEncoding(); + contentDisposition = head.getResponseHeader("content-disposition"); Assert.assertEquals(computeChecksumURI(data), checksumURI); Assert.assertEquals(data.length, contentLength); Assert.assertEquals(newType, contentType); Assert.assertEquals(newEncoding, contentEncoding); + Assert.assertTrue(contentDisposition.contains("filename=") && contentDisposition.contains("file.txt")); lastModified = head.getLastModified(); Assert.assertNotNull(lastModified); @@ -331,6 +335,114 @@ public void testGetRanges() { } } + @Test + public void testFilenameOverride() { + try { + URI artifactURI = URI.create("cadc:TEST/testFilenameOverride.txt"); + URL artifactURL = new URL(filesURL + "/" + artifactURI.toString()); + + String content = "abcdefghijklmnopqrstuvwxyz"; + String encoding = "test-encoding"; + String type = "text/plain"; + byte[] data = content.getBytes(); + URI expectedChecksum = computeChecksumURI(data); + + // put: no length or checksum + InputStream in = new ByteArrayInputStream(data); + HttpUpload put = new HttpUpload(in, artifactURL); + put.setRequestProperty(HttpTransfer.CONTENT_TYPE, type); + put.setRequestProperty(HttpTransfer.CONTENT_ENCODING, encoding); + put.setDigest(expectedChecksum); + + Subject.doAs(userSubject, new RunnableAction(put)); + log.info("put: " + put.getResponseCode() + " " + put.getThrowable()); + log.info("headers: " + put.getResponseHeader("content-length") + " " + put.getResponseHeader("digest")); + Assert.assertNull(put.getThrowable()); + Assert.assertEquals("Created", 201, put.getResponseCode()); + + // head + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + HttpGet head = new HttpGet(artifactURL, bos); + head.setHeadOnly(true); + log.info("head: " + artifactURL.toExternalForm()); + Subject.doAs(userSubject, new RunnableAction(head)); + log.info("head: " + head.getResponseCode() + " " + head.getThrowable()); + log.info("headers: " + head.getResponseHeader("content-length") + " " + head.getResponseHeader("digest")); + log.warn("head output: " + bos.toString()); + Assert.assertNull(head.getThrowable()); + URI checksumURI = head.getDigest(); + long contentLength = head.getContentLength(); + String contentType = head.getContentType(); + String contentEncoding = head.getContentEncoding(); + String contentDisposition = head.getResponseHeader("content-disposition"); + Assert.assertEquals(computeChecksumURI(data), checksumURI); + Assert.assertEquals(data.length, contentLength); + Assert.assertEquals(type, contentType); + Assert.assertEquals(encoding, contentEncoding); + log.info("content-disposition: " + contentDisposition); + Assert.assertTrue(contentDisposition.contains("filename=") && contentDisposition.contains("testFilenameOverride.txt")); + Date lastModified = head.getLastModified(); + Assert.assertNotNull(lastModified); + + URL foURL = new URL(artifactURL.toExternalForm() + ":fo/alternate.txt"); + head = new HttpGet(foURL, bos); + head.setHeadOnly(true); + log.info("head: " + foURL.toExternalForm()); + Subject.doAs(userSubject, new RunnableAction(head)); + log.info("head: " + head.getResponseCode() + " " + head.getThrowable()); + log.info("headers: " + head.getResponseHeader("content-length") + " " + head.getResponseHeader("digest")); + log.warn("head output: " + bos.toString()); + Assert.assertNull(head.getThrowable()); + checksumURI = head.getDigest(); + contentLength = head.getContentLength(); + contentType = head.getContentType(); + contentEncoding = head.getContentEncoding(); + contentDisposition = head.getResponseHeader("content-disposition"); + Assert.assertEquals(computeChecksumURI(data), checksumURI); + Assert.assertEquals(data.length, contentLength); + Assert.assertEquals(type, contentType); + Assert.assertEquals(encoding, contentEncoding); + log.info("content-disposition: " + contentDisposition); + Assert.assertTrue(contentDisposition.contains("filename=") && contentDisposition.contains("alternate.txt")); + Date lastModified2 = head.getLastModified(); + Assert.assertEquals(lastModified, lastModified2); + + // get + bos = new ByteArrayOutputStream(); + log.info("get: " + foURL.toExternalForm()); + HttpGet get = new HttpGet(foURL, bos); + Subject.doAs(userSubject, new RunnableAction(get)); + log.info("get: " + get.getResponseCode() + " " + get.getThrowable()); + log.info("headers: " + get.getResponseHeader("content-length") + " " + get.getResponseHeader("digest")); + log.warn("get output: " + bos.toString()); + Assert.assertNull(get.getThrowable()); + checksumURI = get.getDigest(); + contentLength = get.getContentLength(); + contentType = get.getContentType(); + contentEncoding = get.getContentEncoding(); + contentDisposition = get.getResponseHeader("content-disposition"); + Assert.assertEquals(computeChecksumURI(data), checksumURI); + Assert.assertEquals(data.length, contentLength); + Assert.assertEquals(type, contentType); + Assert.assertEquals(encoding, contentEncoding); + log.info("content-disposition: " + contentDisposition); + Assert.assertTrue(contentDisposition.contains("filename=") && contentDisposition.contains("alternate.txt")); + Date lastModified3 = get.getLastModified(); + Assert.assertEquals(lastModified, lastModified3); + + // delete + HttpDelete delete = new HttpDelete(artifactURL, false); + Subject.doAs(userSubject, new RunnableAction(delete)); + log.info("delete: " + delete.getResponseCode() + " " + delete.getThrowable()); + Assert.assertNull(delete.getThrowable()); + Assert.assertEquals("no content", 204, delete.getResponseCode()); + + } catch (Exception t) { + log.error("unexpected throwable", t); + Assert.fail("unexpected throwable: " + t); + } + } + @Test public void testGetNotFound() { try { diff --git a/minoc/src/intTest/java/org/opencadc/minoc/ReplaceArtifactTest.java b/minoc/src/intTest/java/org/opencadc/minoc/ReplaceArtifactTest.java index 2baae3bf0..938898093 100644 --- a/minoc/src/intTest/java/org/opencadc/minoc/ReplaceArtifactTest.java +++ b/minoc/src/intTest/java/org/opencadc/minoc/ReplaceArtifactTest.java @@ -120,7 +120,8 @@ public ReplaceArtifactTest() throws Exception { Map config = new TreeMap<>(); config.put("jndiDataSourceName", "jdbc/minoc"); config.put(SQLGenerator.class.getName(), SQLGenerator.class); - config.put("schema", "inventory"); + config.put("invSchema", "inventory"); + config.put("genSchema", "inventory"); this.dao = new ArtifactDAO(); dao.setConfig(config); diff --git a/minoc/src/main/java/org/opencadc/minoc/ArtifactAction.java b/minoc/src/main/java/org/opencadc/minoc/ArtifactAction.java index 591f0bf1c..7e51264cf 100644 --- a/minoc/src/main/java/org/opencadc/minoc/ArtifactAction.java +++ b/minoc/src/main/java/org/opencadc/minoc/ArtifactAction.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * -* (c) 2022. (c) 2022. +* (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -76,27 +76,17 @@ import ca.nrc.cadc.rest.RestAction; import ca.nrc.cadc.rest.SyncInput; import ca.nrc.cadc.rest.Version; -import ca.nrc.cadc.util.MultiValuedProperties; -import ca.nrc.cadc.util.PropertiesReader; -import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.security.AccessControlException; import java.security.cert.CertificateException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; import java.util.Map; -import java.util.Set; import javax.security.auth.Subject; import org.apache.log4j.Logger; import org.opencadc.inventory.Artifact; import org.opencadc.inventory.InventoryUtil; -import org.opencadc.inventory.Namespace; import org.opencadc.inventory.db.ArtifactDAO; -import org.opencadc.inventory.db.SQLGenerator; import org.opencadc.inventory.storage.StorageAdapter; import org.opencadc.permissions.Grant; import org.opencadc.permissions.ReadGrant; @@ -126,6 +116,11 @@ public abstract class ArtifactAction extends RestAction { // The target artifact URI artifactURI; + String errMsg; + + // alternmate filename for content-disposition header, usually null + boolean extractFilenameOverride = false; + String filenameOverride; // The (possibly null) authentication token. String authToken; @@ -133,76 +128,28 @@ public abstract class ArtifactAction extends RestAction { // servlet path minus the auth token String loggablePath; - // immutable state set in constructor - protected final MultiValuedProperties config; - protected final File publicKey; - protected final List readGrantServices = new ArrayList<>(); - protected final List writeGrantServices = new ArrayList<>(); + protected MinocConfig config; // lazy init protected ArtifactDAO artifactDAO; protected StorageAdapter storageAdapter; - private final boolean authenticateOnly; - // constructor for unit tests with no config/init ArtifactAction(boolean init) { super(); this.config = null; this.artifactDAO = null; this.storageAdapter = null; - this.authenticateOnly = false; - this.publicKey = null; } protected ArtifactAction() { super(); - this.config = MinocInitAction.getConfig(); - - List readGrants = config.getProperty(MinocInitAction.READ_GRANTS_KEY); - if (readGrants != null) { - for (String s : readGrants) { - try { - URI u = new URI(s); - readGrantServices.add(u); - } catch (URISyntaxException ex) { - throw new IllegalStateException("invalid config: " + MinocInitAction.READ_GRANTS_KEY + "=" + s + " INVALID", ex); - } - } - } - - List writeGrants = config.getProperty(MinocInitAction.WRITE_GRANTS_KEY); - if (writeGrants != null) { - for (String s : writeGrants) { - try { - URI u = new URI(s); - writeGrantServices.add(u); - } catch (URISyntaxException ex) { - throw new IllegalStateException("invalid config: " + MinocInitAction.WRITE_GRANTS_KEY + "=" + s + " INVALID", ex); - } - } - } - - String ao = config.getFirstPropertyValue(MinocInitAction.DEV_AUTH_ONLY_KEY); - if (ao != null) { - try { - this.authenticateOnly = Boolean.valueOf(ao); - if (authenticateOnly) { - log.warn("(configuration) authenticateOnly = " + authenticateOnly); - } - } catch (Exception ex) { - throw new IllegalStateException("invalid config: " + MinocInitAction.DEV_AUTH_ONLY_KEY + "=" + ao + " must be true|false or not set"); - } - } else { - authenticateOnly = false; - } + } - String pubkeyFileName = config.getFirstPropertyValue(MinocInitAction.PUBKEYFILE_KEY); - if (pubkeyFileName != null) { - this.publicKey = new File(System.getProperty("user.home") + "/config/" + pubkeyFileName); - } else { - this.publicKey = null; // no pre-auth - } + @Override + public void initAction() throws Exception { + super.initAction(); + this.config = MinocInitAction.getConfig(appName); } @Override @@ -258,32 +205,45 @@ protected void initAndAuthorize(Class grantClass, boolean allow // do authorization (with token or subject) Subject subject = AuthenticationUtil.getCurrentSubject(); if (authToken != null) { - if (publicKey == null) { - throw new IllegalArgumentException("unexpected pre-auth token in URL"); - } - TokenTool tk = new TokenTool(publicKey); - String tokenUser; - if (allowReadWithWriteGrant && ReadGrant.class.isAssignableFrom(grantClass)) { - // treat a WriteGrant as also granting read permission - tokenUser = tk.validateToken(authToken, artifactURI, grantClass, WriteGrant.class); - } else { - tokenUser = tk.validateToken(authToken, artifactURI, grantClass); - } - subject.getPrincipals().clear(); - if (tokenUser != null) { - subject.getPrincipals().add(new HttpPrincipal(tokenUser)); - } - logInfo.setSubject(subject); - logInfo.setResource(artifactURI); - logInfo.setPath(syncInput.getContextPath() + syncInput.getComponentPath()); - if (ReadGrant.class.isAssignableFrom(grantClass)) { - logInfo.setGrant("read: preauth-token"); - } else if (WriteGrant.class.isAssignableFrom(grantClass)) { - logInfo.setGrant("write: preauth-token"); - } else { - throw new IllegalStateException("Unsupported grant class: " + grantClass); + Map trusted = config.getTrustedServices(); + log.debug("trusted services: " + trusted.size()); + for (Map.Entry me : trusted.entrySet()) { + if (me.getValue() != null) { + TokenTool tk = new TokenTool(me.getValue()); + log.debug("validate preauth with key from " + me.getKey()); + try { + String tokenUser; + if (allowReadWithWriteGrant && ReadGrant.class.isAssignableFrom(grantClass)) { + // treat a WriteGrant as also granting read permission + tokenUser = tk.validateToken(authToken, artifactURI, grantClass, WriteGrant.class); + } else { + tokenUser = tk.validateToken(authToken, artifactURI, grantClass); + } + subject.getPrincipals().clear(); + if (tokenUser != null) { + subject.getPrincipals().add(new HttpPrincipal(tokenUser)); + } + logInfo.setSubject(subject); + logInfo.setResource(artifactURI); + logInfo.setPath(syncInput.getContextPath() + syncInput.getComponentPath()); + if (ReadGrant.class.isAssignableFrom(grantClass)) { + logInfo.setGrant("read:preauth-token:" + me.getKey()); + } else if (WriteGrant.class.isAssignableFrom(grantClass)) { + logInfo.setGrant("write:preauth-token:" + me.getKey()); + } else { + throw new IllegalStateException("Unsupported grant class: " + grantClass); + } + // granted + return; + } catch (AccessControlException ex) { + log.debug("token invalid vs keys from " + me.getKey()); + } + } else { + log.warn("no keys from " + me.getKey() + " -- SKIP"); + } } - return; + // no return from inside check + throw new AccessControlException("invalid auth token"); } // augment subject (minoc is configured so augment is not done in rest library) @@ -291,13 +251,13 @@ protected void initAndAuthorize(Class grantClass, boolean allow logInfo.setSubject(subject); logInfo.setResource(artifactURI); logInfo.setPath(syncInput.getContextPath() + syncInput.getComponentPath()); - PermissionsCheck permissionsCheck = new PermissionsCheck(artifactURI, authenticateOnly, logInfo); + PermissionsCheck permissionsCheck = new PermissionsCheck(artifactURI, config.isAuthenticateOnly(), logInfo); // TODO: allowReadWithWriteGrant could be implemented here, but grant services are probably configured // that way already so it's complexity that probably won't allow/enable any actions if (ReadGrant.class.isAssignableFrom(grantClass)) { - permissionsCheck.checkReadPermission(readGrantServices); + permissionsCheck.checkReadPermission(config.getReadGrantServices()); } else if (WriteGrant.class.isAssignableFrom(grantClass)) { - permissionsCheck.checkWritePermission(writeGrantServices); + permissionsCheck.checkWritePermission(config.getWriteGrantServices()); } else { throw new IllegalStateException("Unsupported grant class: " + grantClass); } @@ -305,13 +265,17 @@ protected void initAndAuthorize(Class grantClass, boolean allow void init() { if (this.artifactURI == null) { + if (errMsg != null) { + throw new IllegalArgumentException(errMsg); + } + // generic throw new IllegalArgumentException("missing or invalid artifact URI"); } } protected void initDAO() { if (artifactDAO == null) { - Map configMap = MinocInitAction.getDaoConfig(config); + Map configMap = config.getDaoConfig(); this.artifactDAO = new ArtifactDAO(); artifactDAO.setConfig(configMap); // connectivity tested } @@ -319,12 +283,10 @@ protected void initDAO() { protected void initStorageAdapter() { if (storageAdapter == null) { - this.storageAdapter = InventoryUtil.loadPlugin(config.getFirstPropertyValue(MinocInitAction.SA_KEY)); - List rec = MinocInitAction.getRecoverableNamespaces(config); - storageAdapter.setRecoverableNamespaces(rec); + this.storageAdapter = config.getStorageAdapter(); } } - + /** * Parse the request path. */ @@ -332,16 +294,29 @@ void parsePath() { String path = this.syncInput.getPath(); log.debug("path: " + path); if (path != null) { - int colonIndex = path.indexOf(":"); - int firstSlashIndex = path.indexOf("/"); - if (colonIndex != -1) { - if (firstSlashIndex < 0 || firstSlashIndex > colonIndex) { - // no auth token--artifact URI is complete path - this.artifactURI = createArtifactURI(path); - } else { - this.artifactURI = createArtifactURI(path.substring(firstSlashIndex + 1)); - this.authToken = path.substring(0, firstSlashIndex); - log.debug("authToken: " + this.authToken); + int colon1 = path.indexOf(":"); + int slash1 = path.indexOf("/"); + if (colon1 != -1) { + if (slash1 >= 0 && slash1 < colon1) { + // auth token in front + this.authToken = path.substring(0, slash1); + path = path.substring(slash1 + 1); + } + try { + int foi = path.indexOf(":fo/"); + if (foi > 0 && extractFilenameOverride) { + // filename override appended + this.filenameOverride = path.substring(foi + 4); + path = path.substring(0, foi); + } else if (foi > 0) { + throw new IllegalArgumentException("detected misuse of :fo/ filename override"); + } + URI auri = new URI(path); + InventoryUtil.validateArtifactURI(ArtifactAction.class, auri); + this.artifactURI = auri; + } catch (URISyntaxException | IllegalArgumentException e) { + this.errMsg = "illegal artifact URI: " + path + " reason: " + e.getMessage(); + log.debug(errMsg, e); } } } @@ -354,85 +329,4 @@ Artifact getArtifact(URI artifactURI) throws ResourceNotFoundException { } return artifact; } - - /** - * Create a valid artifact uri. - * @param uri The input string. - * @return The artifact uri object. - */ - private URI createArtifactURI(String uri) { - log.debug("artifact URI: " + uri); - URI ret; - try { - ret = new URI(uri); - InventoryUtil.validateArtifactURI(ArtifactAction.class, ret); - } catch (URISyntaxException | IllegalArgumentException e) { - ret = null; - log.debug("illegal artifact URI: " + uri, e); - } - return ret; - } - - protected List getReadGrantServices(MultiValuedProperties props) { - String key = ReadGrant.class.getName() + ".resourceID"; - List values = props.getProperty(key); - if (values == null) { - return Collections.emptyList(); - } - return values; - } - - protected List getWriteGrantServices(MultiValuedProperties props) { - String key = WriteGrant.class.getName() + ".resourceID"; - List values = props.getProperty(key); - if (values == null) { - return Collections.emptyList(); - } - return values; - } - - static MultiValuedProperties readConfig() { - PropertiesReader pr = new PropertiesReader("minoc.properties"); - MultiValuedProperties props = pr.getAllProperties(); - - if (log.isDebugEnabled()) { - log.debug("minoc.properties:"); - Set keys = props.keySet(); - for (String key : keys) { - log.debug(" " + key + " = " + props.getProperty(key)); - } - } - return props; - } - - static Map getDaoConfig(MultiValuedProperties props) { - Map config = new HashMap(); - Class cls = null; - List sqlGenList = props.getProperty(MinocInitAction.SQLGEN_KEY); - if (sqlGenList != null && sqlGenList.size() > 0) { - try { - String sqlGenClass = sqlGenList.get(0); - cls = Class.forName(sqlGenClass); - } catch (ClassNotFoundException e) { - throw new IllegalStateException( - "could not load SQLGenerator class: " + e.getMessage(), e); - } - } else { - // use the default SQL generator - cls = SQLGenerator.class; - } - - config.put(MinocInitAction.SQLGEN_KEY, cls); - config.put("jndiDataSourceName", MinocInitAction.JNDI_DATASOURCE); - List schemaList = props.getProperty(MinocInitAction.SCHEMA_KEY); - if (schemaList == null || schemaList.size() < 1) { - throw new IllegalStateException("a value for " + MinocInitAction.SCHEMA_KEY - + " is needed in minoc.properties"); - } - config.put("schema", schemaList.get(0)); - config.put("database", null); - - return config; - } - } diff --git a/minoc/src/main/java/org/opencadc/minoc/DeleteAction.java b/minoc/src/main/java/org/opencadc/minoc/DeleteAction.java index 2ce41e46e..28f18d810 100644 --- a/minoc/src/main/java/org/opencadc/minoc/DeleteAction.java +++ b/minoc/src/main/java/org/opencadc/minoc/DeleteAction.java @@ -98,6 +98,7 @@ public DeleteAction() { */ @Override public void initAction() throws Exception { + super.initAction(); checkWritable(); initAndAuthorize(WriteGrant.class); initDAO(); diff --git a/minoc/src/main/java/org/opencadc/minoc/GetAction.java b/minoc/src/main/java/org/opencadc/minoc/GetAction.java index 9521591d9..e2df6428b 100644 --- a/minoc/src/main/java/org/opencadc/minoc/GetAction.java +++ b/minoc/src/main/java/org/opencadc/minoc/GetAction.java @@ -124,6 +124,7 @@ public class GetAction extends ArtifactAction { // constructor for unit tests with no config/init GetAction(boolean init) { super(init); + this.extractFilenameOverride = true; } /** @@ -131,6 +132,7 @@ public class GetAction extends ArtifactAction { */ public GetAction() { super(); + this.extractFilenameOverride = true; } /** @@ -138,6 +140,7 @@ public GetAction() { */ @Override public void initAction() throws Exception { + super.initAction(); checkReadable(); initAndAuthorize(ReadGrant.class); initDAO(); @@ -193,7 +196,7 @@ public void doAction() throws Exception { } // default: complete download - HeadAction.setHeaders(artifact, syncOutput); + HeadAction.setHeaders(artifact, filenameOverride, syncOutput); bcos = new ByteCountOutputStream(syncOutput.getOutputStream()); // create tmp StorageLocation with expected checksum so adapter can potentially @@ -237,7 +240,7 @@ public void doAction() throws Exception { private ByteCountOutputStream doByteRangeRequest(Artifact artifact, ByteRange byteRange) throws InterruptedException, IOException, ResourceNotFoundException, ReadException, WriteException, StorageEngageException, TransientException { - HeadAction.setHeaders(artifact, syncOutput); + HeadAction.setHeaders(artifact, filenameOverride, syncOutput); syncOutput.setCode(206); long lastByte = byteRange.getOffset() + byteRange.getLength() - 1; syncOutput.setHeader(CONTENT_RANGE, "bytes " + byteRange.getOffset() + "-" diff --git a/minoc/src/main/java/org/opencadc/minoc/HeadAction.java b/minoc/src/main/java/org/opencadc/minoc/HeadAction.java index f80f27564..5cec323e0 100644 --- a/minoc/src/main/java/org/opencadc/minoc/HeadAction.java +++ b/minoc/src/main/java/org/opencadc/minoc/HeadAction.java @@ -93,6 +93,7 @@ public class HeadAction extends ArtifactAction { */ public HeadAction() { super(); + this.extractFilenameOverride = true; } /** @@ -100,6 +101,7 @@ public HeadAction() { */ @Override public void initAction() throws Exception { + super.initAction(); checkReadable(); initAndAuthorize(ReadGrant.class, true); // allowReadWithWriteGrant for head after put initDAO(); @@ -130,7 +132,7 @@ public void doAction() throws Exception { artifact = getArtifact(artifactURI); } if (artifact != null) { - setHeaders(artifact, syncOutput); + setHeaders(artifact, filenameOverride, syncOutput); } } @@ -139,7 +141,7 @@ public void doAction() throws Exception { * @param artifact The artifact with metadata * @param syncOutput The target response */ - static void setHeaders(Artifact artifact, SyncOutput syncOutput) { + static void setHeaders(Artifact artifact, String filenameOverride, SyncOutput syncOutput) { syncOutput.setHeader(ARTIFACT_ID_HDR, artifact.getID().toString()); syncOutput.setDigest(artifact.getContentChecksum()); syncOutput.setLastModified(artifact.getContentLastModified()); @@ -148,8 +150,11 @@ static void setHeaders(Artifact artifact, SyncOutput syncOutput) { DateFormat df = DateUtil.getDateFormat(DateUtil.HTTP_DATE_FORMAT, DateUtil.GMT); syncOutput.setHeader("Last-Modified", df.format(artifact.getContentLastModified())); - String filename = InventoryUtil.computeArtifactFilename(artifact.getURI()); - syncOutput.setHeader("Content-Disposition", "attachment; filename=\"" + filename + "\""); + String filename = filenameOverride; + if (filename == null) { + filename = InventoryUtil.computeArtifactFilename(artifact.getURI()); + } + syncOutput.setHeader("Content-Disposition", "inline; filename=\"" + filename + "\""); if (artifact.contentEncoding != null) { syncOutput.setHeader("Content-Encoding", artifact.contentEncoding); diff --git a/minoc/src/main/java/org/opencadc/minoc/MinocConfig.java b/minoc/src/main/java/org/opencadc/minoc/MinocConfig.java new file mode 100644 index 000000000..437804b4a --- /dev/null +++ b/minoc/src/main/java/org/opencadc/minoc/MinocConfig.java @@ -0,0 +1,440 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2023. (c) 2023. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.minoc; + +import ca.nrc.cadc.auth.AuthMethod; +import ca.nrc.cadc.net.HttpGet; +import ca.nrc.cadc.net.HttpTransfer; +import ca.nrc.cadc.reg.Standards; +import ca.nrc.cadc.reg.client.RegistryClient; +import ca.nrc.cadc.util.InvalidConfigException; +import ca.nrc.cadc.util.MultiValuedProperties; +import ca.nrc.cadc.util.PropertiesReader; +import java.io.ByteArrayOutputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; +import org.apache.log4j.Logger; +import org.opencadc.inventory.InventoryUtil; +import org.opencadc.inventory.Namespace; +import org.opencadc.inventory.db.SQLGenerator; +import org.opencadc.inventory.storage.StorageAdapter; + +/** + * Configuration object that can be stored in JNDI and used by actions. + * + * @author pdowler + */ +public class MinocConfig { + private static final Logger log = Logger.getLogger(MinocConfig.class); + + static final String JNDI_DATASOURCE = "jdbc/inventory"; // context.xml + + // config keys + private static final String MINOC_KEY = "org.opencadc.minoc"; + static final String RESOURCE_ID_KEY = MINOC_KEY + ".resourceID"; + static final String SQLGEN_KEY = SQLGenerator.class.getName(); + static final String SCHEMA_KEY = MINOC_KEY + ".inventory.schema"; + static final String SA_KEY = StorageAdapter.class.getName(); + static final String TRUST_KEY = MINOC_KEY + ".trust.preauth"; + static final String READ_GRANTS_KEY = MINOC_KEY + ".readGrantProvider"; + static final String WRITE_GRANTS_KEY = MINOC_KEY + ".writeGrantProvider"; + static final String READABLE_KEY = MINOC_KEY + ".readable"; + static final String WRITABLE_KEY = MINOC_KEY + ".writable"; + static final String RECOVERABLE_NS_KEY = MINOC_KEY + ".recoverableNamespace"; + static final String DEV_AUTH_ONLY_KEY = MINOC_KEY + ".authenticateOnly"; + + private final MultiValuedProperties configProperties; + + private final Map trustedServices = new TreeMap<>(); + private final List readGrantServices = new ArrayList<>(); + private final List writeGrantServices = new ArrayList<>(); + private final boolean readable; + private final boolean writable; + private final List recoverableNamespaces = new ArrayList<>(); + + final boolean authenticateOnly; + + public MinocConfig() { + PropertiesReader r = new PropertiesReader("minoc.properties"); + this.configProperties = r.getAllProperties(); + + validateConfigProps(configProperties); + + // from here on, fail on invalid config + List readGrants = configProperties.getProperty(READ_GRANTS_KEY); + if (readGrants != null) { + for (String s : readGrants) { + try { + URI u = new URI(s); + readGrantServices.add(u); + } catch (URISyntaxException ex) { + throw new IllegalStateException("invalid config: " + READ_GRANTS_KEY + "=" + s + " INVALID", ex); + } + } + } + + List writeGrants = configProperties.getProperty(WRITE_GRANTS_KEY); + if (writeGrants != null) { + for (String s : writeGrants) { + try { + URI u = new URI(s); + writeGrantServices.add(u); + } catch (URISyntaxException ex) { + throw new IllegalStateException("invalid config: " + WRITE_GRANTS_KEY + "=" + s + " INVALID", ex); + } + } + } + + String ao = configProperties.getFirstPropertyValue(DEV_AUTH_ONLY_KEY); + if (ao != null) { + try { + this.authenticateOnly = Boolean.valueOf(ao); + if (authenticateOnly) { + log.warn("(configuration) authenticateOnly = " + authenticateOnly); + } + } catch (Exception ex) { + throw new IllegalStateException("invalid config: " + DEV_AUTH_ONLY_KEY + "=" + ao + " must be true|false or not set"); + } + } else { + authenticateOnly = false; + } + + List trusted = configProperties.getProperty(TRUST_KEY); + if (trusted != null) { + for (String s : trusted) { + try { + URI u = new URI(s); + trustedServices.put(u, null); + } catch (URISyntaxException ex) { + throw new IllegalStateException("invalid config: " + TRUST_KEY + "=" + s + " INVALID", ex); + } + } + // try to sync keys on startup + syncKeys(); + } + + List recov = configProperties.getProperty(RECOVERABLE_NS_KEY); + if (recov != null) { + for (String s : recov) { + try { + Namespace ns = new Namespace(s); + recoverableNamespaces.add(ns); + } catch (Exception ex) { + throw new IllegalStateException("invalid config: " + RECOVERABLE_NS_KEY + "=" + s + " INVALID", ex); + } + } + } + + // optional + String sread = configProperties.getFirstPropertyValue(MinocConfig.READABLE_KEY); + if (sread != null) { + this.readable = Boolean.parseBoolean(sread); + } else { + this.readable = !readGrantServices.isEmpty() || !trustedServices.isEmpty(); + } + String swrite = configProperties.getFirstPropertyValue(MinocConfig.WRITABLE_KEY); + if (swrite != null) { + this.writable = Boolean.parseBoolean(swrite); + } else { + this.writable = !writeGrantServices.isEmpty() || !trustedServices.isEmpty(); + } + } + + private void validateConfigProps(MultiValuedProperties mvp) { + // plain validate once at startup + StringBuilder sb = new StringBuilder(); + sb.append("incomplete config: "); + boolean ok = true; + + String rid = mvp.getFirstPropertyValue(MinocConfig.RESOURCE_ID_KEY); + sb.append("\n\t" + RESOURCE_ID_KEY + ": "); + if (rid == null) { + sb.append("MISSING"); + ok = false; + } else { + sb.append("OK"); + } + + String sac = mvp.getFirstPropertyValue(SA_KEY); + sb.append("\n\t").append(SA_KEY).append(": "); + if (sac == null) { + sb.append("MISSING"); + ok = false; + } else { + sb.append("OK"); + } + + String sqlgen = mvp.getFirstPropertyValue(SQLGEN_KEY); + sb.append("\n\t").append(SQLGEN_KEY).append(": "); + if (sqlgen == null) { + sb.append("MISSING"); + ok = false; + } else { + try { + Class c = Class.forName(sqlgen); + sb.append("OK"); + } catch (ClassNotFoundException ex) { + sb.append("class not found: " + sqlgen); + ok = false; + } + } + + String schema = mvp.getFirstPropertyValue(SCHEMA_KEY); + sb.append("\n\t").append(SCHEMA_KEY).append(": "); + if (schema == null) { + sb.append("MISSING"); + ok = false; + } else { + sb.append("OK"); + } + + // optional + List trusted = mvp.getProperty(TRUST_KEY); + if (trusted != null) { + for (String s : trusted) { + sb.append("\n\t").append(TRUST_KEY + "=").append(s); + try { + URI uri = new URI(s); + sb.append(" OK"); + } catch (URISyntaxException ex) { + sb.append(" INVALID"); + ok = false; + } + } + } + + // optional + List readGrants = mvp.getProperty(READ_GRANTS_KEY); + if (readGrants != null) { + for (String s : readGrants) { + sb.append("\n\t").append(READ_GRANTS_KEY + "=").append(s); + try { + URI u = new URI(s); + sb.append(" OK"); + } catch (URISyntaxException ex) { + sb.append(" INVALID"); + ok = false; + } + } + } + + // optional + List writeGrants = mvp.getProperty(WRITE_GRANTS_KEY); + if (writeGrants != null) { + for (String s : writeGrants) { + sb.append("\n\t").append(WRITE_GRANTS_KEY + "=").append(s); + try { + URI u = new URI(s); + sb.append(" OK"); + } catch (URISyntaxException ex) { + sb.append(" INVALID"); + ok = false; + } + } + } + + // optional + String sread = mvp.getFirstPropertyValue(MinocConfig.READABLE_KEY); + sb.append("\n\t" + READABLE_KEY + ": "); + if (sread != null) { + if ("true".equals(sread) || "false".equals(sread)) { + sb.append(" OK"); + } else { + sb.append(" INVALID"); + ok = false; + } + } + + // optional + String swrite = mvp.getFirstPropertyValue(MinocConfig.WRITABLE_KEY); + sb.append("\n\t" + WRITABLE_KEY + ": "); + if (sread != null) { + if ("true".equals(swrite) || "false".equals(swrite)) { + sb.append(" OK"); + } else { + sb.append(" INVALID"); + ok = false; + } + } + + // optional + List rawRecNS = mvp.getProperty(RECOVERABLE_NS_KEY); + if (rawRecNS != null) { + for (String s : rawRecNS) { + sb.append("\n\t").append(RECOVERABLE_NS_KEY + "=").append(s); + try { + Namespace ns = new Namespace(s); + } catch (Exception ex) { + sb.append(" INVALID"); + } + } + } + + if (!ok) { + throw new InvalidConfigException(sb.toString()); + } + } + + public MultiValuedProperties getProperties() { + return configProperties; + } + + public Map getTrustedServices() { + // check and try to sync missing keys before request + syncKeys(); + return trustedServices; + } + + private void syncKeys() { + RegistryClient reg = new RegistryClient(); + // check map for null keys and try to retrieve them + // ASSUMPTION: keys never change once generated so if they do then minoc + // needs to be restarted + for (Map.Entry me : trustedServices.entrySet()) { + if (me.getValue() == null) { + try { + log.info("get trusted pubkey: " + me.getKey()); + URL capURL = reg.getAccessURL(RegistryClient.Query.CAPABILITIES, me.getKey()); + String s = capURL.toExternalForm().replace("/capabilities", "/pubkey"); + URL keyURL = new URL(s); + log.info("get trusted pubkey: " + me.getKey() + " -> " + keyURL); + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + HttpGet get = new HttpGet(keyURL, bos); + get.setConnectionTimeout(6000); + get.setReadTimeout(6000); + get.setRetry(0, 0, HttpTransfer.RetryReason.NONE); + get.run(); + if (get.getThrowable() != null) { + throw (Exception) get.getThrowable(); + } + byte[] key = bos.toByteArray(); + trustedServices.put(me.getKey(), key); + log.info("get trusted pubkey: " + me.getKey() + " OK"); + } catch (Exception ex) { + log.warn("failed to get public key from " + me.getKey() + ": " + ex); + } + } + } + } + + public boolean isReadable() { + return readable; + } + + public boolean isWritable() { + return writable; + } + + public List getReadGrantServices() { + return readGrantServices; + } + + public List getWriteGrantServices() { + return writeGrantServices; + } + + public boolean isAuthenticateOnly() { + return authenticateOnly; + } + + public List getRecoverableNamespaces() { + return recoverableNamespaces; + } + + public StorageAdapter getStorageAdapter() { + String cname = configProperties.getFirstPropertyValue(MinocConfig.SA_KEY); + StorageAdapter storageAdapter = InventoryUtil.loadPlugin(cname); + for (Namespace ns : recoverableNamespaces) { + log.info("initStorageAdapter: recoverableNamespace = " + ns.getNamespace()); + } + storageAdapter.setRecoverableNamespaces(recoverableNamespaces); + return storageAdapter; + } + + public Map getDaoConfig() { + String cname = configProperties.getFirstPropertyValue(SQLGenerator.class.getName()); + try { + Map ret = new TreeMap<>(); + Class clz = Class.forName(cname); + ret.put(SQLGenerator.class.getName(), clz); + ret.put("jndiDataSourceName", JNDI_DATASOURCE); + ret.put("invSchema", configProperties.getFirstPropertyValue(SCHEMA_KEY)); + ret.put("genSchema", configProperties.getFirstPropertyValue(SCHEMA_KEY)); + //config.put("vosSchema", null); + //config.put("database", null); + return ret; + } catch (ClassNotFoundException ex) { + throw new IllegalStateException("invalid config: failed to load SQLGenerator: " + cname); + } + } +} diff --git a/minoc/src/main/java/org/opencadc/minoc/MinocInitAction.java b/minoc/src/main/java/org/opencadc/minoc/MinocInitAction.java index 37b70f3fd..b8bdd677d 100644 --- a/minoc/src/main/java/org/opencadc/minoc/MinocInitAction.java +++ b/minoc/src/main/java/org/opencadc/minoc/MinocInitAction.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * -* (c) 2022. (c) 2022. +* (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -79,6 +79,9 @@ import java.util.Map; import java.util.Set; import java.util.TreeMap; +import javax.naming.Context; +import javax.naming.InitialContext; +import javax.naming.NamingException; import javax.sql.DataSource; import org.apache.log4j.Logger; import org.opencadc.inventory.InventoryUtil; @@ -86,7 +89,7 @@ import org.opencadc.inventory.StorageSite; import org.opencadc.inventory.db.SQLGenerator; import org.opencadc.inventory.db.StorageSiteDAO; -import org.opencadc.inventory.db.version.InitDatabase; +import org.opencadc.inventory.db.version.InitDatabaseSI; import org.opencadc.inventory.storage.StorageAdapter; /** @@ -96,31 +99,10 @@ public class MinocInitAction extends InitAction { private static final Logger log = Logger.getLogger(MinocInitAction.class); - static final String JNDI_DATASOURCE = "jdbc/inventory"; // context.xml - - // config keys - private static final String MINOC_KEY = "org.opencadc.minoc"; - static final String RESOURCE_ID_KEY = MINOC_KEY + ".resourceID"; - - static final String SQLGEN_KEY = SQLGenerator.class.getName(); - - static final String SCHEMA_KEY = MINOC_KEY + ".inventory.schema"; - - static final String SA_KEY = StorageAdapter.class.getName(); - - static final String PUBKEYFILE_KEY = MINOC_KEY + ".publicKeyFile"; - static final String READ_GRANTS_KEY = MINOC_KEY + ".readGrantProvider"; - static final String WRITE_GRANTS_KEY = MINOC_KEY + ".writeGrantProvider"; - - static final String RECOVERABLE_NS_KEY = MINOC_KEY + ".recoverableNamespace"; - - static final String DEV_AUTH_ONLY_KEY = MINOC_KEY + ".authenticateOnly"; - // set init initConfig, used by subsequent init methods - - MultiValuedProperties props; + private MinocConfig config; + private String jndiConfigKey; private URI resourceID; - private Map daoConfig; public MinocInitAction() { super(); @@ -133,175 +115,68 @@ public void doInit() { initStorageSite(); initStorageAdapter(); } - - /** - * Read config file and verify that all required entries are present. - * - * @return MultiValuedProperties containing the application config - * @throws IllegalStateException if required config items are missing - */ - static MultiValuedProperties getConfig() { - PropertiesReader r = new PropertiesReader("minoc.properties"); - MultiValuedProperties mvp = r.getAllProperties(); - - StringBuilder sb = new StringBuilder(); - sb.append("incomplete config: "); - boolean ok = true; - - String rid = mvp.getFirstPropertyValue(RESOURCE_ID_KEY); - sb.append("\n\t" + RESOURCE_ID_KEY + ": "); - if (rid == null) { - sb.append("MISSING"); - ok = false; - } else { - sb.append("OK"); - } - - String sac = mvp.getFirstPropertyValue(SA_KEY); - sb.append("\n\t").append(SA_KEY).append(": "); - if (sac == null) { - sb.append("MISSING"); - ok = false; - } else { - sb.append("OK"); - } - - String sqlgen = mvp.getFirstPropertyValue(SQLGEN_KEY); - sb.append("\n\t").append(SQLGEN_KEY).append(": "); - if (sqlgen == null) { - sb.append("MISSING"); - ok = false; - } else { - try { - Class c = Class.forName(sqlgen); - sb.append("OK"); - } catch (ClassNotFoundException ex) { - sb.append("class not found: " + sqlgen); - ok = false; - } - } - - String schema = mvp.getFirstPropertyValue(SCHEMA_KEY); - sb.append("\n\t").append(SCHEMA_KEY).append(": "); - if (schema == null) { - sb.append("MISSING"); - ok = false; - } else { - sb.append("OK"); - } - - // optional - String pubkeyFileName = mvp.getFirstPropertyValue(PUBKEYFILE_KEY); - sb.append("\n\t").append(PUBKEYFILE_KEY).append(": "); - if (pubkeyFileName != null) { - File pk = new File(System.getProperty("user.home") + "/config/" + pubkeyFileName); - if (!pk.exists()) { - sb.append(" NOT FOUND ").append(pk.getAbsolutePath()); - ok = false; - } else { - sb.append("OK"); - } - } - - // optional - List readGrants = mvp.getProperty(READ_GRANTS_KEY); - if (readGrants != null) { - for (String s : readGrants) { - sb.append("\n\t").append(READ_GRANTS_KEY + "=").append(s); - try { - URI u = new URI(s); - sb.append(" OK"); - } catch (URISyntaxException ex) { - sb.append(" INVALID"); - ok = false; - } - } - } - - // optional - List writeGrants = mvp.getProperty(WRITE_GRANTS_KEY); - if (writeGrants != null) { - for (String s : writeGrants) { - sb.append("\n\t").append(WRITE_GRANTS_KEY + "=").append(s); - try { - URI u = new URI(s); - sb.append(" OK"); - } catch (URISyntaxException ex) { - sb.append(" INVALID"); - ok = false; - } - } - } - - // optional - List rawRecNS = mvp.getProperty(RECOVERABLE_NS_KEY); - if (rawRecNS != null) { - for (String s : rawRecNS) { - sb.append("\n\t").append(RECOVERABLE_NS_KEY + "=").append(s); - try { - Namespace ns = new Namespace(s); - } catch (Exception ex) { - sb.append(" INVALID"); - } - } - } - if (!ok) { - throw new IllegalStateException(sb.toString()); + @Override + public void doShutdown() { + super.doShutdown(); + try { + Context ctx = new InitialContext(); + ctx.unbind(jndiConfigKey); + } catch (NamingException ex) { + log.debug("failed to remove config from JNDI", ex); } - - return mvp; } - static Map getDaoConfig(MultiValuedProperties props) { - String cname = props.getFirstPropertyValue(SQLGenerator.class.getName()); + // get config from JNDI + static MinocConfig getConfig(String appName) { + String key = appName + "-" + MinocConfig.class.getName(); try { - Map ret = new TreeMap<>(); - Class clz = Class.forName(cname); - ret.put(SQLGenerator.class.getName(), clz); - ret.put("jndiDataSourceName", MinocInitAction.JNDI_DATASOURCE); - ret.put("schema", props.getFirstPropertyValue(MinocInitAction.SCHEMA_KEY)); - //config.put("database", null); + Context ctx = new InitialContext(); + MinocConfig ret = (MinocConfig) ctx.lookup(key); return ret; - } catch (ClassNotFoundException ex) { - throw new IllegalStateException("invalid config: failed to load SQLGenerator: " + cname); + } catch (NamingException ex) { + throw new RuntimeException("BUG: failed to get config from JNDI", ex); } } - static List getRecoverableNamespaces(MultiValuedProperties props) { - List ret = new ArrayList<>(); - List rawRecNS = props.getProperty(RECOVERABLE_NS_KEY); - if (rawRecNS != null) { - for (String s : rawRecNS) { - ret.add(new Namespace(s)); - } - } - return ret; - } - private void initConfig() { log.info("initConfig: START"); - this.props = getConfig(); - String rid = props.getFirstPropertyValue(RESOURCE_ID_KEY); + this.config = new MinocConfig(); + MultiValuedProperties mvp = config.getProperties(); + String rid = mvp.getFirstPropertyValue(MinocConfig.RESOURCE_ID_KEY); + jndiConfigKey = appName + "-" + MinocConfig.class.getName(); + try { + Context ctx = new InitialContext(); + try { + ctx.unbind(jndiConfigKey); + } catch (NamingException ignore) { + log.debug("unbind previous JNDI key (" + jndiConfigKey + ") failed... ignoring"); + } + ctx.bind(jndiConfigKey, config); + + log.info("created JNDI key: " + jndiConfigKey + " object: " + config.getClass().getName()); + } catch (Exception ex) { + log.error("Failed to create JNDI Key " + jndiConfigKey, ex); + } try { this.resourceID = new URI(rid); - this.daoConfig = getDaoConfig(props); log.info("initConfig: OK"); } catch (URISyntaxException ex) { - throw new IllegalStateException("invalid config: " + RESOURCE_ID_KEY + " must be a valid URI"); + throw new IllegalStateException("invalid config: " + MinocConfig.RESOURCE_ID_KEY + " must be a valid URI"); } } private void initDatabase() { log.info("initDatabase: START"); try { - DataSource ds = DBUtil.findJNDIDataSource(JNDI_DATASOURCE); + Map daoConfig = config.getDaoConfig(); + DataSource ds = DBUtil.findJNDIDataSource(MinocConfig.JNDI_DATASOURCE); String database = (String) daoConfig.get("database"); - String schema = (String) daoConfig.get("schema"); - InitDatabase init = new InitDatabase(ds, database, schema); + String schema = (String) daoConfig.get("invSchema"); + InitDatabaseSI init = new InitDatabaseSI(ds, database, schema); init.doInit(); - log.info("initDatabase: " + JNDI_DATASOURCE + " " + schema + " OK"); + log.info("initDatabase: " + MinocConfig.JNDI_DATASOURCE + " " + schema + " OK"); } catch (Exception ex) { throw new IllegalStateException("check/init database failed", ex); } @@ -309,6 +184,7 @@ private void initDatabase() { private void initStorageSite() { log.info("initStorageSite: START"); + Map daoConfig = config.getDaoConfig(); StorageSiteDAO ssdao = new StorageSiteDAO(); ssdao.setConfig(daoConfig); @@ -320,9 +196,9 @@ private void initStorageSite() { if (name.charAt(0) == '/') { name = name.substring(1); } - - boolean allowRead = !props.getProperty(READ_GRANTS_KEY).isEmpty(); - boolean allowWrite = !props.getProperty(WRITE_GRANTS_KEY).isEmpty(); + + boolean allowRead = config.isReadable(); + boolean allowWrite = config.isWritable(); StorageSite self = null; if (curlist.isEmpty()) { @@ -334,7 +210,6 @@ private void initStorageSite() { self.setName(name); self.setAllowRead(allowRead); self.setAllowWrite(allowWrite); - } else { throw new IllegalStateException("BUG: found " + curlist.size() + " StorageSite entries; expected 0 or 1"); } @@ -345,12 +220,7 @@ private void initStorageSite() { private void initStorageAdapter() { log.info("initStorageAdapter: START"); - StorageAdapter storageAdapter = InventoryUtil.loadPlugin(props.getFirstPropertyValue(MinocInitAction.SA_KEY)); - List rec = MinocInitAction.getRecoverableNamespaces(props); - for (Namespace ns : rec) { - log.info("initStorageAdapter: recoverableNamespace = " + ns.getNamespace()); - } - storageAdapter.setRecoverableNamespaces(rec); - log.info("initStorageAdapter: " + storageAdapter.getClass().getName() + " OK"); + StorageAdapter sa = config.getStorageAdapter(); + log.info("initStorageAdapter: " + sa.getClass().getName() + " OK"); } } diff --git a/minoc/src/main/java/org/opencadc/minoc/PostAction.java b/minoc/src/main/java/org/opencadc/minoc/PostAction.java index b383feadf..e6a6cb7eb 100644 --- a/minoc/src/main/java/org/opencadc/minoc/PostAction.java +++ b/minoc/src/main/java/org/opencadc/minoc/PostAction.java @@ -97,6 +97,7 @@ public PostAction() { */ @Override public void initAction() throws Exception { + super.initAction(); checkWritable(); initAndAuthorize(WriteGrant.class); initDAO(); @@ -192,7 +193,7 @@ public void doAction() throws Exception { log.debug("commit txn: OK"); syncOutput.setCode(202); // Accepted - HeadAction.setHeaders(existing, syncOutput); + HeadAction.setHeaders(existing, null, syncOutput); syncOutput.setHeader("content-length", 0); } catch (Exception e) { log.error("failed to persist " + artifactURI, e); diff --git a/minoc/src/main/java/org/opencadc/minoc/PutAction.java b/minoc/src/main/java/org/opencadc/minoc/PutAction.java index 4e4aec402..44c099dc7 100644 --- a/minoc/src/main/java/org/opencadc/minoc/PutAction.java +++ b/minoc/src/main/java/org/opencadc/minoc/PutAction.java @@ -123,6 +123,7 @@ public Content accept(String name, String contentType, InputStream inputStream) @Override public void initAction() throws Exception { + super.initAction(); checkWritable(); initAndAuthorize(WriteGrant.class); initDAO(); diff --git a/minoc/src/main/java/org/opencadc/minoc/ServiceAvailability.java b/minoc/src/main/java/org/opencadc/minoc/ServiceAvailability.java index 2dad0c9e8..d5139b84a 100644 --- a/minoc/src/main/java/org/opencadc/minoc/ServiceAvailability.java +++ b/minoc/src/main/java/org/opencadc/minoc/ServiceAvailability.java @@ -142,15 +142,15 @@ public Availability getStatus() { String note = "service is accepting requests"; try { - MultiValuedProperties props = MinocInitAction.getConfig(); - Map config = MinocInitAction.getDaoConfig(props); - ArtifactDAO dao = new ArtifactDAO(); - dao.setConfig(config); // connectivity tested - String state = getState(); if (RestAction.STATE_OFFLINE.equals(state)) { return new Availability(false, RestAction.STATE_OFFLINE_MSG); } + + MinocConfig config = MinocInitAction.getConfig(appName); + ArtifactDAO dao = new ArtifactDAO(); + dao.setConfig(config.getDaoConfig()); // connectivity tested + if (RestAction.STATE_READ_ONLY.equals(state)) { return new Availability(false, RestAction.STATE_READ_ONLY_MSG); } @@ -213,6 +213,9 @@ public Availability getStatus() { } } + // TODO: check grant providers + // TODO: check for null pubkeys for trusted services + } catch (CheckException ce) { // tests determined that the resource is not working isGood = false; diff --git a/minoc/src/main/webapp/WEB-INF/web.xml b/minoc/src/main/webapp/WEB-INF/web.xml index 6616cfdca..d58ecc74c 100644 --- a/minoc/src/main/webapp/WEB-INF/web.xml +++ b/minoc/src/main/webapp/WEB-INF/web.xml @@ -25,6 +25,7 @@ ca.nrc.cadc.util ca.nrc.cadc.vosi ca.nrc.cadc.db + ca.nrc.cadc.reg diff --git a/minoc/src/test/java/org/opencadc/minoc/ArtifactActionTest.java b/minoc/src/test/java/org/opencadc/minoc/ArtifactActionTest.java index 8b38e6f50..cf9807c66 100644 --- a/minoc/src/test/java/org/opencadc/minoc/ArtifactActionTest.java +++ b/minoc/src/test/java/org/opencadc/minoc/ArtifactActionTest.java @@ -122,10 +122,19 @@ public void doAction() throws Exception { } private void assertCorrectPath(String path, String expURI, String expToken) { + assertCorrectPath(path, expURI, expToken, null); + } + + private void assertCorrectPath(String path, String expURI, String expToken, String expFilenameOverride) { ArtifactAction action = new TestArtifactAction(path); + if (expFilenameOverride != null) { + action.extractFilenameOverride = true; + } action.parsePath(); + log.info(path + " -> " + action.artifactURI + " - " + action.authToken + " - " + action.filenameOverride); Assert.assertEquals("artifactURI", URI.create(expURI), action.artifactURI); Assert.assertEquals("authToken", expToken, action.authToken); + Assert.assertEquals("filenameOverride", expFilenameOverride, action.filenameOverride); if (action.artifactURI == null) { Assert.fail("Failed to parse legal path: " + path); } @@ -134,9 +143,7 @@ private void assertCorrectPath(String path, String expURI, String expToken) { private void assertIllegalPath(String path) { ArtifactAction action = new TestArtifactAction(path); action.parsePath(); - if (action.artifactURI != null) { - Assert.fail("Should have failed to parse path: " + path); - } + Assert.assertNull(action.artifactURI); } @Test @@ -147,10 +154,14 @@ public void testParsePath() { assertCorrectPath("token/cadc:TEST/myartifact", "cadc:TEST/myartifact", "token"); assertCorrectPath("cadc:TEST/myartifact", "cadc:TEST/myartifact", null); assertCorrectPath("token/cadc:TEST/myartifact", "cadc:TEST/myartifact", "token"); - assertCorrectPath("mast:long/uri/with/segments/fits.fits", "mast:long/uri/with/segments/fits.fits", null); + assertCorrectPath("mast:long/uri/with/segments/something.fits", "mast:long/uri/with/segments/something.fits", null); assertCorrectPath("token/mast:long/uri/with/segments/fits.fits", "mast:long/uri/with/segments/fits.fits", "token"); assertCorrectPath("token-with-dashes/cadc:TEST/myartifact", "cadc:TEST/myartifact", "token-with-dashes"); + assertCorrectPath("cadc:vault/uuid:fo/something.fits", "cadc:vault/uuid", null, "something.fits"); + assertCorrectPath("token/cadc:vault/uuid:fo/something.fits", "cadc:vault/uuid", "token", "something.fits"); + + assertIllegalPath(null); assertIllegalPath(""); assertIllegalPath("noschemeinuri"); assertIllegalPath("token/noschemeinuri"); @@ -159,11 +170,9 @@ public void testParsePath() { assertIllegalPath("cadc:path#fragment?query"); assertIllegalPath("cadc://host/path"); assertIllegalPath("cadc://:port/path"); - assertIllegalPath("artifacts/token1/token2/cadc:FOO/bar"); - assertIllegalPath("artifacts/token/cadc:ccda:FOO/bar"); - - assertIllegalPath(null); - + assertIllegalPath("artifacts/token1/token2/cadc:FOO/bar"); // sketchy scheme + assertIllegalPath("cadc:ccda:FOO/bar"); // sketchy extra colons + assertIllegalPath("cadc:vault/uuid:/something.fits"); // extra colons in path component } catch (Exception unexpected) { log.error("unexpected exception", unexpected); Assert.fail("unexpected exception: " + unexpected); diff --git a/ratik/VERSION b/ratik/VERSION index 79fec5904..38b1547ae 100644 --- a/ratik/VERSION +++ b/ratik/VERSION @@ -4,6 +4,6 @@ # tags with and without build number so operators use the versioned # tag but we always keep a timestamped tag in case a semantic tag gets # replaced accidentally -VER=0.1.9 +VER=1.0.0 TAGS="${VER} ${VER}-$(date --utc +"%Y%m%dT%H%M%S")" unset VER diff --git a/ratik/build.gradle b/ratik/build.gradle index feaeb51bd..79137835c 100644 --- a/ratik/build.gradle +++ b/ratik/build.gradle @@ -16,9 +16,8 @@ group = 'org.opencadc' dependencies { compile 'org.opencadc:cadc-util:[1.10.2,2.0)' - compile 'org.opencadc:cadc-inventory:[0.9.4,2.0)' - // temporarily limit this lib because cadc-inventory-db-0.15.0 is from the vos2 feature branch - compile 'org.opencadc:cadc-inventory-db:[0.14.6,0.15)' + compile 'org.opencadc:cadc-inventory:[1.0,2.0)' + compile 'org.opencadc:cadc-inventory-db:[1.0,2.0)' compile 'org.opencadc:cadc-inventory-util:[0.1.8,1.0)' compile 'org.opencadc:cadc-registry:[1.5,2.0)' compile 'org.opencadc:cadc-tap:[1.1.15,2.0)' diff --git a/ratik/src/intTest/java/org/opencadc/ratik/InventoryEnvironment.java b/ratik/src/intTest/java/org/opencadc/ratik/InventoryEnvironment.java index 8fdd021d7..95820b9e2 100644 --- a/ratik/src/intTest/java/org/opencadc/ratik/InventoryEnvironment.java +++ b/ratik/src/intTest/java/org/opencadc/ratik/InventoryEnvironment.java @@ -78,7 +78,7 @@ import org.opencadc.inventory.db.DeletedStorageLocationEventDAO; import org.opencadc.inventory.db.SQLGenerator; import org.opencadc.inventory.db.StorageSiteDAO; -import org.opencadc.inventory.db.version.InitDatabase; +import org.opencadc.inventory.db.version.InitDatabaseSI; import org.springframework.jdbc.core.JdbcTemplate; public class InventoryEnvironment { @@ -101,7 +101,8 @@ public InventoryEnvironment() throws Exception { daoConfig.put(SQLGenerator.class.getName(), SQLGenerator.class); daoConfig.put("database", TestUtil.INVENTORY_DATABASE); - daoConfig.put("schema", TestUtil.INVENTORY_SCHEMA); + daoConfig.put("invSchema", TestUtil.INVENTORY_SCHEMA); + daoConfig.put("genSchema", TestUtil.INVENTORY_SCHEMA); daoConfig.put("jndiDataSourceName", jndiPath); storageSiteDAO.setConfig(daoConfig); @@ -110,9 +111,9 @@ public InventoryEnvironment() throws Exception { deletedArtifactEventDAO.setConfig(daoConfig); deletedStorageLocationEventDAO.setConfig(daoConfig); - new InitDatabase(DBUtil.findJNDIDataSource(jndiPath), + new InitDatabaseSI(DBUtil.findJNDIDataSource(jndiPath), (String) daoConfig.get("database"), - (String) daoConfig.get("schema")).doInit(); + (String) daoConfig.get("invSchema")).doInit(); daoConfig.remove("jndiDataSourceName"); } diff --git a/ratik/src/intTest/java/org/opencadc/ratik/LuskanEnvironment.java b/ratik/src/intTest/java/org/opencadc/ratik/LuskanEnvironment.java index 965391993..b15026e02 100644 --- a/ratik/src/intTest/java/org/opencadc/ratik/LuskanEnvironment.java +++ b/ratik/src/intTest/java/org/opencadc/ratik/LuskanEnvironment.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * - * (c) 2020. (c) 2020. + * (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -71,19 +71,17 @@ import ca.nrc.cadc.db.ConnectionConfig; import ca.nrc.cadc.db.DBConfig; import ca.nrc.cadc.db.DBUtil; - import java.net.URI; import java.util.Map; import java.util.TreeMap; - import org.apache.log4j.Logger; import org.opencadc.inventory.StorageSite; +import org.opencadc.inventory.db.ArtifactDAO; import org.opencadc.inventory.db.DeletedArtifactEventDAO; import org.opencadc.inventory.db.DeletedStorageLocationEventDAO; -import org.opencadc.inventory.db.ArtifactDAO; import org.opencadc.inventory.db.SQLGenerator; import org.opencadc.inventory.db.StorageSiteDAO; -import org.opencadc.inventory.db.version.InitDatabase; +import org.opencadc.inventory.db.version.InitDatabaseSI; import org.springframework.jdbc.core.JdbcTemplate; public class LuskanEnvironment { @@ -109,7 +107,8 @@ public LuskanEnvironment() throws Exception { daoConfig.put(SQLGenerator.class.getName(), SQLGenerator.class); daoConfig.put("jndiDataSourceName", jndiPath); daoConfig.put("database", TestUtil.LUSKAN_DATABASE); - daoConfig.put("schema", TestUtil.LUSKAN_SCHEMA); + daoConfig.put("invSchema", TestUtil.LUSKAN_SCHEMA); + daoConfig.put("genSchema", TestUtil.LUSKAN_SCHEMA); storageSiteDAO.setConfig(daoConfig); artifactDAO.setConfig(daoConfig); @@ -118,9 +117,9 @@ public LuskanEnvironment() throws Exception { globalArtifactDAO.setConfig(daoConfig); - new InitDatabase(DBUtil.findJNDIDataSource(jndiPath), + new InitDatabaseSI(DBUtil.findJNDIDataSource(jndiPath), (String) daoConfig.get("database"), - (String) daoConfig.get("schema")).doInit(); + (String) daoConfig.get("invSchema")).doInit(); } void cleanTestEnvironment() throws Exception { diff --git a/ratik/src/main/java/org/opencadc/ratik/InventoryValidator.java b/ratik/src/main/java/org/opencadc/ratik/InventoryValidator.java index f8dcc78c5..db4b17291 100644 --- a/ratik/src/main/java/org/opencadc/ratik/InventoryValidator.java +++ b/ratik/src/main/java/org/opencadc/ratik/InventoryValidator.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * - * (c) 2023. (c) 2023. + * (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -79,7 +79,6 @@ import ca.nrc.cadc.reg.client.RegistryClient; import ca.nrc.cadc.util.BucketSelector; import ca.nrc.cadc.util.StringUtil; - import java.io.File; import java.io.IOException; import java.net.URI; @@ -95,18 +94,16 @@ import java.util.Map; import java.util.TreeMap; import java.util.UUID; - import javax.naming.NamingException; import javax.security.auth.Subject; import javax.sql.DataSource; - import org.apache.log4j.Logger; import org.opencadc.inventory.Artifact; import org.opencadc.inventory.InventoryUtil; import org.opencadc.inventory.StorageSite; import org.opencadc.inventory.db.ArtifactDAO; import org.opencadc.inventory.db.StorageSiteDAO; -import org.opencadc.inventory.db.version.InitDatabase; +import org.opencadc.inventory.db.version.InitDatabaseSI; import org.opencadc.inventory.query.ArtifactRowMapper; import org.opencadc.inventory.util.ArtifactSelector; import org.opencadc.tap.TapClient; @@ -185,9 +182,9 @@ public InventoryValidator(ConnectionConfig connectionConfig, Map try { String jndiDataSourceName = (String) daoConfig.get("jndiDataSourceName"); String database = (String) daoConfig.get("database"); - String schema = (String) daoConfig.get("schema"); + String schema = (String) daoConfig.get("invSchema"); DataSource ds = DBUtil.findJNDIDataSource(jndiDataSourceName); - InitDatabase init = new InitDatabase(ds, database, schema); + InitDatabaseSI init = new InitDatabaseSI(ds, database, schema); init.doInit(); log.info(String.format("initDatabase: %s %s", jndiDataSourceName, schema)); } catch (Exception ex) { diff --git a/ratik/src/main/java/org/opencadc/ratik/Main.java b/ratik/src/main/java/org/opencadc/ratik/Main.java index e4fb592ab..c3d2afdd8 100644 --- a/ratik/src/main/java/org/opencadc/ratik/Main.java +++ b/ratik/src/main/java/org/opencadc/ratik/Main.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * - * (c) 2020. (c) 2020. + * (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -158,7 +158,9 @@ public static void main(final String[] args) { final String password = props.getFirstPropertyValue(DB_PASSWORD_CONFIG_KEY); final String dbUrl = props.getFirstPropertyValue(DB_URL_CONFIG_KEY); - daoConfig.put("schema", props.getFirstPropertyValue(DB_SCHEMA_CONFIG_KEY)); + //daoConfig.put("database",...); + daoConfig.put("invSchema", props.getFirstPropertyValue(DB_SCHEMA_CONFIG_KEY)); + daoConfig.put("genSchema", props.getFirstPropertyValue(DB_SCHEMA_CONFIG_KEY)); final ConnectionConfig cc = new ConnectionConfig(null, null, username, password, "org.postgresql.Driver", dbUrl); diff --git a/raven/README.md b/raven/README.md index ecc19202b..cac828ddc 100644 --- a/raven/README.md +++ b/raven/README.md @@ -11,14 +11,23 @@ Runtime configuration must be made available via the `/config` directory. When running raven.war in tomcat, parameters of the connection pool in META-INF/context.xml need to be configured in catalina.properties: ``` -# database connection pools +# query pool for user requests org.opencadc.raven.query.maxActive={max connections for query pool} org.opencadc.raven.query.username={database username for query pool} org.opencadc.raven.query.password={database password for query pool} org.opencadc.raven.query.url=jdbc:postgresql://{server}/{database} +# admin pool for setup +org.opencadc.raven.inventory.maxActive={max connections for query pool} +org.opencadc.raven.inventory.username={database username for query pool} +org.opencadc.raven.inventory.password={database password for query pool} +org.opencadc.raven.inventory.url=jdbc:postgresql://{server}/{database} ``` -The _query_ pool is used to query inventory for the requested Artifact. +The _query_ account is used to query inventory for the requested Artifact; this pool can be +configured with a read-only database account. + +The _inventory_ account owns and manages (create, alter, drop) inventory database objects and +(optional) URL signing keys (see _keys.preauth_ below). ### cadc-registry.properties @@ -34,29 +43,31 @@ org.opencadc.raven.inventory.schema={schema} # consistency settings org.opencadc.raven.consistency.preventNotFound=true|false ``` -`raven` can be configured prevent artifact-not-found errors that might result due to the eventual consistency nature of -the system by directly checking for the artifact at the sites (`preventNotFound=true`). This however introduces an -overhead for the genuine not-found cases. +The _preventNotFound_ key can be used to configure `raven` to prevent artifact-not-found errors that might +result due to the eventual consistency nature of the system by directly checking for the artifact at +_all known_ sites. This feature introduces an overhead for the genuine not-found cases. + + The following optional keys configure raven to use external service(s) to obtain grant information in order -to perform authorization checks: +to perform authorization checks and generate signed URLs: ``` -# keys to generate pre-auth URLs to minoc -org.opencadc.raven.publicKeyFile={public key file name} -org.opencadc.raven.privateKeyFile={private key file name} - org.opencadc.raven.readGrantProvider={resourceID of a permission granting service} org.opencadc.raven.writeGrantProvider={resourceID of a permission granting service} -``` -The optional _privateKeyFile_ is used to sign pre-auth URLs (one-time token included in URL) so that a `minoc` service does not -have to repeat permission checks. The _publicKeyFile_ is not currently used but may be required in future (either exported via URL -or used to check if `minoc` services have the right key before generating pre-auth URLs: TBD). +# url signing key usage +org.opencadc.raven.keys.preauth={true|false} +``` The optional _readGrantProvider_ and _writeGrantProvider_ keys configure minoc to call other services to get grants (permissions) for operations. Multiple values of the permission granting service resourceID(s) may be provided by including multiple property settings. All services will be consulted but a single positive result is sufficient to grant permission for an action. +The _keys.preauth_ key (default: false) configures `raven` to use URL-signing. When enabled, `raven` can generate a signed token +and embed it into the URL; `minoc` services that are configured to trust a `raven` service will download the public key and can +validate the token and grant access without further permission checks. With transfer negotiation, the signed URL gets added as +an additional "anonymous" URL. + The following optional keys configure raven to prioritize sites returned in transfer negotiation, with higher priority sites first in the list of transfer URL's. Multiple values of _namespace_ may be specified for a single _resourceID_. The _namespace_ value(s) must end with a colon (:) or slash (/) so one namespace cannot accidentally match (be a @@ -97,6 +108,13 @@ org.opencadc.raven.authenticateOnly=true When _authenticateOnly_ is `true`, any authenticated user will be able to read/write/delete files and anonymous users will be able to read files. +### cadc-log.properties (optional) +See cadc-log for common +dynamic logging control. + +### cadc-vosi.properties (optional) +See cadc-vosi for common +service state control. ### cadcproxy.pem (optional) This client certificate is used to make authenticated server-to-server calls for system-level A&A purposes. diff --git a/raven/VERSION b/raven/VERSION index b73f3577f..6a39bdba3 100644 --- a/raven/VERSION +++ b/raven/VERSION @@ -2,6 +2,6 @@ # semantic version tag: major.minor[.patch] # build version tag: timestamp # tag: {semantic}-{build} -VER=0.7.10 +VER=1.0.0 TAGS="${VER} ${VER}-$(date --utc +"%Y%m%dT%H%M%S")" unset VER diff --git a/raven/build.gradle b/raven/build.gradle index c5069bfb4..fec3cf4f1 100644 --- a/raven/build.gradle +++ b/raven/build.gradle @@ -26,17 +26,17 @@ war { } dependencies { - compile 'org.opencadc:cadc-util:[1.10.2,2.0)' + compile 'org.opencadc:cadc-util:[1.10.3,2.0)' compile 'org.opencadc:cadc-log:[1.1.6,2.0)' compile 'org.opencadc:cadc-registry:[1.7,)' compile 'org.opencadc:cadc-vosi:[1.4.3,2.0)' compile 'org.opencadc:cadc-rest:[1.3.14,)' compile 'org.opencadc:cadc-cdp:[1.0,)' compile 'org.opencadc:cadc-gms:[1.0.4,)' - compile 'org.opencadc:cadc-inventory:[0.9.4,2.0)' - compile 'org.opencadc:cadc-inventory-db:[0.14.5,0.15)' - compile 'org.opencadc:cadc-inventory-server:[0.2.1,)' - compile 'org.opencadc:cadc-permissions:[0.3.1,)' + compile 'org.opencadc:cadc-inventory:[1.0,2.0)' + compile 'org.opencadc:cadc-inventory-db:[1.0,2.0)' + compile 'org.opencadc:cadc-inventory-server:[0.3.0,)' + compile 'org.opencadc:cadc-permissions:[0.3.5,)' compile 'org.opencadc:cadc-permissions-client:[0.3,)' compile 'org.opencadc:cadc-vos:[1.2,2.0)' diff --git a/raven/src/intTest/README.md b/raven/src/intTest/README.md index a37bbdca0..fabf61573 100644 --- a/raven/src/intTest/README.md +++ b/raven/src/intTest/README.md @@ -46,5 +46,5 @@ org.opencadc.raven.putPreference=@SITE3 org.opencadc.raven.consistency.preventNotFound=true # external resolvers -ca.nrc.cadc.net.StorageResolver=mast ca.nrc.cadc.caom2.artifact.resolvers.MastResolver +ca.nrc.cadc.net.StorageResolver=ca.nrc.cadc.caom2.artifact.resolvers.MastResolver ``` diff --git a/raven/src/intTest/java/org/opencadc/raven/FilesTest.java b/raven/src/intTest/java/org/opencadc/raven/FilesTest.java index b0658db85..2e02e7453 100644 --- a/raven/src/intTest/java/org/opencadc/raven/FilesTest.java +++ b/raven/src/intTest/java/org/opencadc/raven/FilesTest.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * - * (c) 2022. (c) 2022. + * (c) 2023. (c) 2023. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -76,10 +76,6 @@ import ca.nrc.cadc.reg.Standards; import ca.nrc.cadc.reg.client.RegistryClient; import ca.nrc.cadc.util.Log4jInit; -import ca.nrc.cadc.vos.Direction; -import ca.nrc.cadc.vos.Protocol; -import ca.nrc.cadc.vos.Transfer; -import ca.nrc.cadc.vos.VOS; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.InputStream; @@ -104,7 +100,12 @@ import org.opencadc.inventory.db.ArtifactDAO; import org.opencadc.inventory.db.DeletedArtifactEventDAO; import org.opencadc.inventory.db.StorageSiteDAO; -import org.opencadc.inventory.db.version.InitDatabase; +import org.opencadc.inventory.db.version.InitDatabaseSI; +import org.opencadc.inventory.transfer.ProtocolsGenerator; +import org.opencadc.vospace.VOS; +import org.opencadc.vospace.transfer.Direction; +import org.opencadc.vospace.transfer.Protocol; +import org.opencadc.vospace.transfer.Transfer; /** * Test files endpoint. @@ -132,7 +133,7 @@ public FilesTest() throws Exception { artifactDAO.setConfig(config); this.siteDAO = new StorageSiteDAO(artifactDAO); - InitDatabase init = new InitDatabase(artifactDAO.getDataSource(), DATABASE, SCHEMA); + InitDatabaseSI init = new InitDatabaseSI(artifactDAO.getDataSource(), DATABASE, SCHEMA); init.doInit(); } @@ -413,7 +414,7 @@ public void checkHeadResult(HttpGet request, URI artifactURI, long size, URI che Assert.assertEquals("HEAD response code", 200, request.getResponseCode()); Assert.assertEquals("File length", size, Long.valueOf(request.getResponseHeader("Content-Length")).longValue()); Assert.assertNotNull("File last-modified", request.getResponseHeader("Last-Modified")); - Assert.assertEquals("File name", "attachment; filename=\"" + InventoryUtil.computeArtifactFilename(artifactURI) + "\"", + Assert.assertEquals("File name", "inline; filename=\"" + InventoryUtil.computeArtifactFilename(artifactURI) + "\"", request.getResponseHeader("Content-Disposition")); Assert.assertEquals("File digest", checksum, request.getDigest()); Assert.assertEquals("File type", contentType, request.getResponseHeader("Content-Type")); diff --git a/raven/src/intTest/java/org/opencadc/raven/NegotiationTest.java b/raven/src/intTest/java/org/opencadc/raven/NegotiationTest.java index d50c70d15..c1191493d 100644 --- a/raven/src/intTest/java/org/opencadc/raven/NegotiationTest.java +++ b/raven/src/intTest/java/org/opencadc/raven/NegotiationTest.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * - * (c) 2022. (c) 2022. + * (c) 2023. (c) 2023. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -76,10 +76,6 @@ import ca.nrc.cadc.reg.Standards; import ca.nrc.cadc.reg.client.RegistryClient; import ca.nrc.cadc.util.Log4jInit; -import ca.nrc.cadc.vos.Direction; -import ca.nrc.cadc.vos.Protocol; -import ca.nrc.cadc.vos.Transfer; -import ca.nrc.cadc.vos.VOS; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.net.URI; @@ -103,7 +99,14 @@ import org.opencadc.inventory.db.ArtifactDAO; import org.opencadc.inventory.db.DeletedArtifactEventDAO; import org.opencadc.inventory.db.StorageSiteDAO; -import org.opencadc.inventory.db.version.InitDatabase; +import org.opencadc.inventory.db.version.InitDatabaseSI; +import org.opencadc.inventory.transfer.ProtocolsGenerator; +import org.opencadc.permissions.TokenTool; +import org.opencadc.permissions.WriteGrant; +import org.opencadc.vospace.VOS; +import org.opencadc.vospace.transfer.Direction; +import org.opencadc.vospace.transfer.Protocol; +import org.opencadc.vospace.transfer.Transfer; /** * Test transfer negotiation. @@ -128,7 +131,7 @@ public NegotiationTest() throws Exception { artifactDAO.setConfig(config); this.siteDAO = new StorageSiteDAO(artifactDAO); - InitDatabase init = new InitDatabase(artifactDAO.getDataSource(), DATABASE, SCHEMA); + InitDatabaseSI init = new InitDatabaseSI(artifactDAO.getDataSource(), DATABASE, SCHEMA); init.doInit(); } @@ -749,7 +752,7 @@ public void testGetSites() throws Exception { // can deliver the file List requested = new ArrayList<>(); - Protocol files = new Protocol(Standards.SI_FILES.toASCIIString()); + Protocol files = new Protocol(Standards.SI_FILES); requested.add(files); try { @@ -834,4 +837,59 @@ public Object run() throws Exception { Assert.fail("unexpected exception: " + e); } } + + @Test + public void testPreauthURL() throws Exception { + + StorageSite site = new StorageSite(CONSIST_RESOURCE_ID, "site1", true, true); + try { + // get raven pub key + URL pubKeyURL = anonURL.toURI().resolve("./pubkey").toURL(); + log.debug("raven pub key URL: " + pubKeyURL); + HttpGet getPubKey = new HttpGet(pubKeyURL, true); + getPubKey.run(); + Assert.assertEquals(200, getPubKey.getResponseCode()); + Assert.assertNull(getPubKey.getThrowable()); + final byte[] buffer = new byte[64 * 1024]; + final InputStream inputStream = getPubKey.getInputStream(); + int bytesRead = inputStream.read(buffer); + if (bytesRead == buffer.length) { + // might be incomplete + throw new RuntimeException("BUG - pubkey input buffer is too small"); + } + byte[] pubKey = new byte[bytesRead]; + System.arraycopy(buffer, 0, pubKey, 0, bytesRead); + + TokenTool tokenTool = new TokenTool(pubKey); + siteDAO.put(site); + Subject.doAs(userSubject, new PrivilegedExceptionAction() { + public Object run() throws Exception { + URI artifactURI = URI.create("cadc:TEST/" + UUID.randomUUID() + ".fits"); + Protocol p = new Protocol(VOS.PROTOCOL_HTTPS_PUT); + p.setSecurityMethod(Standards.SECURITY_METHOD_ANON); + Transfer transfer = new Transfer(artifactURI, Direction.pushToVoSpace); + transfer.getProtocols().add(p); + transfer.version = VOS.VOSPACE_21; + Transfer negotiated = negotiate(transfer); + List endPoints = negotiated.getAllEndpoints(VOS.PROTOCOL_HTTPS_PUT.toASCIIString()); + Assert.assertEquals(1, endPoints.size()); + URI endPoint = URI.create(endPoints.get(0)); + String path = endPoint.getPath(); + int columnIndex = path.indexOf(":"); + Assert.assertTrue(columnIndex>0); + String tmp = path.substring(0, columnIndex); // ignore artifact URI slashes + String[] pathComp = tmp.split("/"); + String token = pathComp[pathComp.length - 2]; + URI resArtifactURI = URI.create(pathComp[pathComp.length - 1] + path.substring(columnIndex)); + log.debug("Result artifact URI: " + resArtifactURI); + Assert.assertEquals(artifactURI, resArtifactURI); + log.debug("token: " + token); + tokenTool.validateToken(token, artifactURI, WriteGrant.class); + return negotiated; + } + }); + } finally { + siteDAO.delete(site.getID()); + } + } } diff --git a/raven/src/intTest/java/org/opencadc/raven/RavenTest.java b/raven/src/intTest/java/org/opencadc/raven/RavenTest.java index 587ca7370..58d4bb27f 100644 --- a/raven/src/intTest/java/org/opencadc/raven/RavenTest.java +++ b/raven/src/intTest/java/org/opencadc/raven/RavenTest.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * - * (c) 2019. (c) 2019. + * (c) 2023. (c) 2023. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -81,10 +81,6 @@ import ca.nrc.cadc.reg.client.RegistryClient; import ca.nrc.cadc.util.FileUtil; import ca.nrc.cadc.util.Log4jInit; -import ca.nrc.cadc.vos.Transfer; -import ca.nrc.cadc.vos.TransferParsingException; -import ca.nrc.cadc.vos.TransferReader; -import ca.nrc.cadc.vos.TransferWriter; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; @@ -96,8 +92,11 @@ import javax.security.auth.Subject; import org.apache.log4j.Level; import org.apache.log4j.Logger; -import org.junit.Assert; import org.opencadc.inventory.db.SQLGenerator; +import org.opencadc.vospace.transfer.Transfer; +import org.opencadc.vospace.transfer.TransferParsingException; +import org.opencadc.vospace.transfer.TransferReader; +import org.opencadc.vospace.transfer.TransferWriter; /** * Abstract integration test class with general setup and test support. @@ -144,7 +143,8 @@ public RavenTest() throws Exception { config = new TreeMap(); config.put(SQLGenerator.class.getName(), SQLGenerator.class); config.put("jndiDataSourceName", "jdbc/inventory"); - config.put("schema", SCHEMA); + config.put("invSchema", SCHEMA); + config.put("genSchema", SCHEMA); } catch (Exception ex) { log.error("setup failed", ex); diff --git a/raven/src/main/java/org/opencadc/raven/ArtifactAction.java b/raven/src/main/java/org/opencadc/raven/ArtifactAction.java index 6e6483b7b..b49032c64 100644 --- a/raven/src/main/java/org/opencadc/raven/ArtifactAction.java +++ b/raven/src/main/java/org/opencadc/raven/ArtifactAction.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * -* (c) 2022. (c) 2022. +* (c) 2023. (c) 2023. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -73,12 +73,11 @@ import ca.nrc.cadc.rest.RestAction; import ca.nrc.cadc.rest.Version; import ca.nrc.cadc.util.MultiValuedProperties; -import ca.nrc.cadc.vos.Direction; -import ca.nrc.cadc.vos.Transfer; +import ca.nrc.cadc.util.RsaSignatureGenerator; import ca.nrc.cadc.vosi.Availability; -import java.io.File; import java.net.URI; import java.net.URISyntaxException; +import java.security.KeyPair; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -87,10 +86,17 @@ import javax.naming.InitialContext; import javax.naming.NamingException; import org.apache.log4j.Logger; +import org.opencadc.inventory.PreauthKeyPair; import org.opencadc.inventory.db.ArtifactDAO; +import org.opencadc.inventory.db.PreauthKeyPairDAO; +import org.opencadc.inventory.transfer.StorageSiteAvailabilityCheck; +import org.opencadc.inventory.transfer.StorageSiteRule; import org.opencadc.permissions.ReadGrant; +import org.opencadc.permissions.TokenTool; import org.opencadc.permissions.WriteGrant; import org.opencadc.permissions.client.PermissionsCheck; +import org.opencadc.vospace.transfer.Direction; +import org.opencadc.vospace.transfer.Transfer; /** * Abstract class for all that raven action classes have in common, @@ -109,8 +115,7 @@ public abstract class ArtifactAction extends RestAction { // immutable state set in constructor protected final ArtifactDAO artifactDAO; - protected final File publicKeyFile; - protected final File privateKeyFile; + protected TokenTool tokenGen; protected final List readGrantServices = new ArrayList<>(); protected final List writeGrantServices = new ArrayList<>(); protected StorageResolver storageResolver; @@ -120,15 +125,15 @@ public abstract class ArtifactAction extends RestAction { protected Map siteRules; protected final boolean preventNotFound; + protected final boolean preauthKeys; // constructor for unit tests with no config/init ArtifactAction(boolean init) { super(); this.authenticateOnly = false; - this.publicKeyFile = null; - this.privateKeyFile = null; this.artifactDAO = null; this.preventNotFound = false; + this.preauthKeys = false; this.storageResolver = null; } @@ -172,33 +177,13 @@ protected ArtifactAction() { authenticateOnly = false; } - initResolver(); - - // technically, raven only needs the private key to generate pre-auth tokens - // but both are specified here for clarity - // - in principle, raven could export it's public key and minoc(s) could retrieve it - // - for now, minoc(s) need to be configured with the public key to validate pre-auth - - String pubkeyFileName = props.getFirstPropertyValue(RavenInitAction.PUBKEYFILE_KEY); - String privkeyFileName = props.getFirstPropertyValue(RavenInitAction.PRIVKEYFILE_KEY); - if (pubkeyFileName == null && privkeyFileName == null) { - log.debug("public/private key preauth not enabled by config"); - this.publicKeyFile = null; - this.privateKeyFile = null; - } else { - this.publicKeyFile = new File(System.getProperty("user.home") + "/config/" + pubkeyFileName); - this.privateKeyFile = new File(System.getProperty("user.home") + "/config/" + privkeyFileName); - if (!publicKeyFile.exists() || !privateKeyFile.exists()) { - throw new IllegalStateException("invalid config: missing public/private key pair files -- " + publicKeyFile + " | " + privateKeyFile); - } - } - - Map config = RavenInitAction.getDaoConfig(props); + Map config = RavenInitAction.getDaoConfig(props, RavenInitAction.JNDI_QUERY_DATASOURCE); this.artifactDAO = new ArtifactDAO(); artifactDAO.setConfig(config); // connectivity tested // get the storage site rules this.siteRules = RavenInitAction.getStorageSiteRules(props); + String pnf = props.getFirstPropertyValue(RavenInitAction.PREVENT_NOT_FOUND_KEY); if (pnf != null) { this.preventNotFound = Boolean.valueOf(pnf); @@ -206,9 +191,36 @@ protected ArtifactAction() { } else { throw new IllegalStateException("invalid config: missing preventNotFound configuration"); } + + String pak = props.getFirstPropertyValue(RavenInitAction.PREAUTH_KEY); + if (pak != null) { + this.preauthKeys = Boolean.valueOf(pak); + log.debug("Using preauth keys: " + this.preauthKeys); + } else { + this.preauthKeys = false; + } } - protected void initResolver() { + @Override + public void initAction() throws Exception { + super.initAction(); + initResolver(); + + if (preauthKeys) { + String jndiPreauthKeys = appName + "-" + PreauthKeyPair.class.getName(); + Context ctx = new InitialContext(); + try { + log.debug("lookup: " + jndiPreauthKeys); + PreauthKeyPair keys = (PreauthKeyPair) ctx.lookup(jndiPreauthKeys); + log.debug("found: " + keys); + this.tokenGen = new TokenTool(keys.getPublicKey(), keys.getPrivateKey()); + } catch (NamingException ex) { + throw new RuntimeException("BUG: failed to find keys via JNDI", ex); + } + } + } + + void initResolver() { MultiValuedProperties props = RavenInitAction.getConfig(); String resolverName = props.getFirstPropertyValue(RavenInitAction.RESOLVER_ENTRY); if (resolverName != null) { @@ -265,7 +277,7 @@ void init() throws Exception { throw new IllegalArgumentException("Missing artifact URI from path or request content"); } - String siteAvailabilitiesKey = this.appName + RavenInitAction.JNDI_AVAILABILITY_NAME; + String siteAvailabilitiesKey = this.appName + "-" + StorageSiteAvailabilityCheck.class.getName(); log.debug("siteAvailabilitiesKey: " + siteAvailabilitiesKey); try { Context initContext = new InitialContext(); @@ -283,7 +295,6 @@ void init() throws Exception { protected String getServerImpl() { // no null version checking because fail to build correctly can't get past basic testing Version v = getVersionFromResource(); - String ret = "storage-inventory/raven-" + v.getMajorMinor(); - return ret; + return "storage-inventory/raven-" + v.getMajorMinor(); } } diff --git a/raven/src/main/java/org/opencadc/raven/GetFilesAction.java b/raven/src/main/java/org/opencadc/raven/GetFilesAction.java index 7ecaf48e2..37df53fd1 100644 --- a/raven/src/main/java/org/opencadc/raven/GetFilesAction.java +++ b/raven/src/main/java/org/opencadc/raven/GetFilesAction.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * -* (c) 2021. (c) 2021. +* (c) 2023. (c) 2023. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -69,10 +69,6 @@ import ca.nrc.cadc.net.ResourceNotFoundException; import ca.nrc.cadc.reg.Standards; -import ca.nrc.cadc.vos.Direction; -import ca.nrc.cadc.vos.Protocol; -import ca.nrc.cadc.vos.Transfer; -import ca.nrc.cadc.vos.VOS; import java.io.IOException; import java.net.HttpURLConnection; import java.net.URI; @@ -80,6 +76,11 @@ import java.util.Iterator; import java.util.List; import org.apache.log4j.Logger; +import org.opencadc.inventory.transfer.ProtocolsGenerator; +import org.opencadc.vospace.VOS; +import org.opencadc.vospace.transfer.Direction; +import org.opencadc.vospace.transfer.Protocol; +import org.opencadc.vospace.transfer.Transfer; /** * Class to execute a "files" GET action. @@ -139,16 +140,18 @@ URI getFirstURL() throws ResourceNotFoundException, IOException { proto.setSecurityMethod(Standards.SECURITY_METHOD_ANON); transfer.getProtocols().add(proto); - ProtocolsGenerator pg = new ProtocolsGenerator(this.artifactDAO, this.publicKeyFile, this.privateKeyFile, - this.user, this.siteAvailabilities, this.siteRules, - this.preventNotFound, this.storageResolver); + ProtocolsGenerator pg = new ProtocolsGenerator(this.artifactDAO, this.siteAvailabilities, this.siteRules); + pg.tokenGen = this.tokenGen; + pg.user = this.user; + pg.preventNotFound = this.preventNotFound; + pg.storageResolver = this.storageResolver; List protos = pg.getProtocols(transfer); if (protos.isEmpty()) { throw new ResourceNotFoundException("not available: " + artifactURI); } URI ret = URI.create(protos.get(0).getEndpoint()); - if (pg.storageResolverAdded && protos.size() == 1) { + if (pg.getStorageResolverAdded() && protos.size() == 1) { logInfo.setMessage("external redirect: " + ret.toASCIIString()); } // for now return the first URL in the list diff --git a/raven/src/main/java/org/opencadc/raven/HeadFilesAction.java b/raven/src/main/java/org/opencadc/raven/HeadFilesAction.java index d0a671855..69cadf54d 100644 --- a/raven/src/main/java/org/opencadc/raven/HeadFilesAction.java +++ b/raven/src/main/java/org/opencadc/raven/HeadFilesAction.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * -* (c) 2021. (c) 2021. +* (c) 2023. (c) 2023. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -70,18 +70,20 @@ import ca.nrc.cadc.net.ResourceNotFoundException; import ca.nrc.cadc.reg.Standards; import ca.nrc.cadc.rest.SyncOutput; -import ca.nrc.cadc.vos.Direction; -import ca.nrc.cadc.vos.Protocol; -import ca.nrc.cadc.vos.Transfer; -import ca.nrc.cadc.vos.VOS; import java.net.HttpURLConnection; import java.net.URL; +import java.util.Set; import org.apache.log4j.Logger; import org.opencadc.inventory.Artifact; import org.opencadc.inventory.InventoryUtil; +import org.opencadc.inventory.StorageSite; import org.opencadc.inventory.db.StorageSiteDAO; +import org.opencadc.inventory.transfer.ProtocolsGenerator; import org.opencadc.permissions.ReadGrant; -import org.opencadc.permissions.TokenTool; +import org.opencadc.vospace.VOS; +import org.opencadc.vospace.transfer.Direction; +import org.opencadc.vospace.transfer.Protocol; +import org.opencadc.vospace.transfer.Transfer; /** * Interface with inventory to get the metadata of an artifact. @@ -108,19 +110,27 @@ public void doAction() throws Exception { log.debug("Starting HEAD action for " + artifactURI.toASCIIString()); Artifact artifact = artifactDAO.get(artifactURI); - if (artifact == null) { - if (this.preventNotFound) { + if (artifact == null && preventNotFound) { + StorageSiteDAO storageSiteDAO = new StorageSiteDAO(artifactDAO); + Set sites = storageSiteDAO.list(); + if (!sites.isEmpty()) { // check known storage sites - ProtocolsGenerator pg = new ProtocolsGenerator(this.artifactDAO, this.publicKeyFile, this.privateKeyFile, - this.user, this.siteAvailabilities, this.siteRules, this.preventNotFound, this.storageResolver); - StorageSiteDAO storageSiteDAO = new StorageSiteDAO(artifactDAO); + ProtocolsGenerator pg = new ProtocolsGenerator( + this.artifactDAO, this.siteAvailabilities, this.siteRules); + pg.tokenGen = this.tokenGen; + pg.user = this.user; + pg.preventNotFound = this.preventNotFound; + pg.storageResolver = this.storageResolver; + Transfer transfer = new Transfer(artifactURI, Direction.pullFromVoSpace); Protocol proto = new Protocol(VOS.PROTOCOL_HTTPS_GET); proto.setSecurityMethod(Standards.SECURITY_METHOD_ANON); transfer.getProtocols().add(proto); - TokenTool tk = new TokenTool(publicKeyFile, privateKeyFile); - String authToken = tk.generateToken(artifactURI, ReadGrant.class, user); - artifact = pg.getUnsyncedArtifact(artifactURI, transfer, storageSiteDAO.list(), authToken); + String authToken = null; + if (tokenGen != null) { + authToken = tokenGen.generateToken(artifactURI, ReadGrant.class, user); + } + artifact = pg.getUnsyncedArtifact(artifactURI, transfer, sites, authToken); } } @@ -158,7 +168,7 @@ public static void setHeaders(Artifact artifact, SyncOutput syncOutput) { syncOutput.setLastModified(artifact.getContentLastModified()); syncOutput.setHeader("Content-Length", artifact.getContentLength()); String filename = InventoryUtil.computeArtifactFilename(artifact.getURI()); - syncOutput.setHeader("Content-Disposition", "attachment; filename=\"" + filename + "\""); + syncOutput.setHeader("Content-Disposition", "inline; filename=\"" + filename + "\""); if (artifact.contentEncoding != null) { syncOutput.setHeader("Content-Encoding", artifact.contentEncoding); } diff --git a/raven/src/main/java/org/opencadc/raven/PostAction.java b/raven/src/main/java/org/opencadc/raven/PostAction.java index 66cec4129..152191b67 100644 --- a/raven/src/main/java/org/opencadc/raven/PostAction.java +++ b/raven/src/main/java/org/opencadc/raven/PostAction.java @@ -70,11 +70,6 @@ import ca.nrc.cadc.net.ResourceNotFoundException; import ca.nrc.cadc.rest.InlineContentException; import ca.nrc.cadc.rest.InlineContentHandler; -import ca.nrc.cadc.vos.Direction; -import ca.nrc.cadc.vos.Transfer; -import ca.nrc.cadc.vos.TransferReader; -import ca.nrc.cadc.vos.TransferWriter; -import ca.nrc.cadc.vos.VOS; import java.io.IOException; import java.io.InputStream; import java.net.URI; @@ -82,6 +77,12 @@ import java.util.List; import org.apache.log4j.Logger; import org.opencadc.inventory.InventoryUtil; +import org.opencadc.inventory.transfer.ProtocolsGenerator; +import org.opencadc.vospace.VOS; +import org.opencadc.vospace.transfer.Direction; +import org.opencadc.vospace.transfer.Transfer; +import org.opencadc.vospace.transfer.TransferReader; +import org.opencadc.vospace.transfer.TransferWriter; /** * Given a transfer request object return a transfer response object with all @@ -159,9 +160,11 @@ public InlineContentHandler.Content accept(String name, String contentType, Inpu public void doAction() throws Exception { initAndAuthorize(); - ProtocolsGenerator pg = new ProtocolsGenerator(this.artifactDAO, this.publicKeyFile, this.privateKeyFile, - this.user, this.siteAvailabilities, this.siteRules, - this.preventNotFound, this.storageResolver); + ProtocolsGenerator pg = new ProtocolsGenerator(this.artifactDAO, this.siteAvailabilities, this.siteRules); + pg.tokenGen = this.tokenGen; + pg.user = this.user; + pg.preventNotFound = this.preventNotFound; + pg.storageResolver = this.storageResolver; Transfer ret = new Transfer(artifactURI, transfer.getDirection()); // TODO: change from pg.getProtocols(transfer) to pg.getResolvedTransfer(transfer)?? ret.getProtocols().addAll(pg.getProtocols(transfer)); diff --git a/raven/src/main/java/org/opencadc/raven/RavenInitAction.java b/raven/src/main/java/org/opencadc/raven/RavenInitAction.java index b4e09db31..53bffab91 100644 --- a/raven/src/main/java/org/opencadc/raven/RavenInitAction.java +++ b/raven/src/main/java/org/opencadc/raven/RavenInitAction.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * -* (c) 2021. (c) 2021. +* (c) 2023. (c) 2023. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -67,33 +67,35 @@ package org.opencadc.raven; +import ca.nrc.cadc.db.DBUtil; import ca.nrc.cadc.rest.InitAction; import ca.nrc.cadc.util.MultiValuedProperties; import ca.nrc.cadc.util.PropertiesReader; +import ca.nrc.cadc.util.RsaSignatureGenerator; import ca.nrc.cadc.util.StringUtil; -import ca.nrc.cadc.vosi.Availability; -import ca.nrc.cadc.vosi.AvailabilityClient; - -import java.io.File; import java.net.URI; import java.net.URISyntaxException; +import java.security.KeyPair; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.TreeMap; - import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.NamingException; - +import javax.sql.DataSource; import org.apache.log4j.Logger; import org.opencadc.inventory.Namespace; -import org.opencadc.inventory.StorageSite; +import org.opencadc.inventory.PreauthKeyPair; import org.opencadc.inventory.db.ArtifactDAO; +import org.opencadc.inventory.db.PreauthKeyPairDAO; import org.opencadc.inventory.db.SQLGenerator; import org.opencadc.inventory.db.StorageSiteDAO; +import org.opencadc.inventory.db.version.InitDatabaseSI; +import org.opencadc.inventory.transfer.StorageSiteAvailabilityCheck; +import org.opencadc.inventory.transfer.StorageSiteRule; +import org.springframework.dao.DataIntegrityViolationException; /** * @@ -102,30 +104,28 @@ public class RavenInitAction extends InitAction { private static final Logger log = Logger.getLogger(RavenInitAction.class); + static String KEY_PAIR_NAME = "raven-preauth-keys"; + // config keys private static final String RAVEN_KEY = "org.opencadc.raven"; - private static final String RAVEN_CONSIST_KEY = "org.opencadc.raven.consistency"; - static final String JNDI_DATASOURCE = "jdbc/inventory"; // context.xml - static final String JNDI_AVAILABILITY_NAME = ".availabilities"; + static final String JNDI_QUERY_DATASOURCE = "jdbc/query"; // context.xml + static final String JNDI_ADMIN_DATASOURCE = "jdbc/inventory"; // context.xml static final String SCHEMA_KEY = RAVEN_KEY + ".inventory.schema"; - - static final String PUBKEYFILE_KEY = RAVEN_KEY + ".publicKeyFile"; - static final String PRIVKEYFILE_KEY = RAVEN_KEY + ".privateKeyFile"; + static final String PREVENT_NOT_FOUND_KEY = RAVEN_KEY + ".consistency.preventNotFound"; + static final String PREAUTH_KEY = RAVEN_KEY + ".keys.preauth"; + static final String READ_GRANTS_KEY = RAVEN_KEY + ".readGrantProvider"; static final String WRITE_GRANTS_KEY = RAVEN_KEY + ".writeGrantProvider"; - static final String RESOLVER_ENTRY = "ca.nrc.cadc.net.StorageResolver"; - static final String PREVENT_NOT_FOUND_KEY = RAVEN_CONSIST_KEY + ".preventNotFound"; - + static final String DEV_AUTH_ONLY_KEY = RAVEN_KEY + ".authenticateOnly"; - static final int AVAILABILITY_CHECK_TIMEOUT = 30; //secs - static final int AVAILABILITY_FULL_CHECK_TIMEOUT = 300; //secs - // set init initConfig, used by subsequent init methods MultiValuedProperties props; + + private String jndiPreauthKeys; private String siteAvailabilitiesKey; private Thread availabilityCheck; @@ -136,16 +136,24 @@ public RavenInitAction() { @Override public void doInit() { initConfig(); - initDAO(); + initDatabase(); + initKeyPair(); + initQueryDAO(); initGrantProviders(); - initKeys(); initStorageSiteRules(); initAvailabilityCheck(); } @Override public void doShutdown() { - terminate(); + try { + Context ctx = new InitialContext(); + ctx.unbind(jndiPreauthKeys); + } catch (Exception oops) { + log.error("unbind failed during destroy", oops); + } + + terminateAvailabilityCheck(); } void initConfig() { @@ -154,9 +162,71 @@ void initConfig() { log.info("initConfig: OK"); } - void initDAO() { + private void initDatabase() { + log.info("initDatabase: START"); + try { + Map daoConfig = getDaoConfig(props, JNDI_ADMIN_DATASOURCE); + String jndiDataSourceName = (String) daoConfig.get("jndiDataSourceName"); + String database = (String) daoConfig.get("database"); + String schema = (String) daoConfig.get("invSchema"); + DataSource ds = DBUtil.findJNDIDataSource(jndiDataSourceName); + InitDatabaseSI init = new InitDatabaseSI(ds, database, schema); + init.doInit(); + log.info("initDatabase: " + jndiDataSourceName + " " + schema + " OK"); + } catch (Exception ex) { + throw new IllegalStateException("check/init database failed", ex); + } + } + + private void initKeyPair() { + String enablePreauthKeys = props.getFirstPropertyValue(PREAUTH_KEY); + if (enablePreauthKeys == null || !"true".equals(enablePreauthKeys)) { + log.info("initKeyPair: " + PREAUTH_KEY + " == " + enablePreauthKeys + " - SKIP"); + return; + } + + log.info("initKeyPair: START"); + jndiPreauthKeys = appName + "-" + PreauthKeyPair.class.getName(); + try { + Map daoConfig = getDaoConfig(props, JNDI_ADMIN_DATASOURCE); + PreauthKeyPairDAO dao = new PreauthKeyPairDAO(); + dao.setConfig(daoConfig); + PreauthKeyPair keys = dao.get(KEY_PAIR_NAME); + if (keys == null) { + KeyPair kp = RsaSignatureGenerator.getKeyPair(4096); + keys = new PreauthKeyPair(KEY_PAIR_NAME, kp.getPublic().getEncoded(), kp.getPrivate().getEncoded()); + try { + dao.put(keys); + log.info("initKeyPair: new keys created - OK"); + + } catch (DataIntegrityViolationException oops) { + log.warn("persist new " + PreauthKeyPair.class.getSimpleName() + " failed (" + oops + ") -- probably race condition"); + keys = dao.get(KEY_PAIR_NAME); + if (keys != null) { + log.info("race condition confirmed: another instance created keys - OK"); + } else { + throw new RuntimeException("check/init " + KEY_PAIR_NAME + " failed", oops); + } + } + } else { + log.info("initKeyPair: re-use existing keys - OK"); + } + Context ctx = new InitialContext(); + try { + ctx.unbind(jndiPreauthKeys); + } catch (NamingException ignore) { + log.debug("unbind previous JNDI key (" + jndiPreauthKeys + ") failed... ignoring"); + } + ctx.bind(jndiPreauthKeys, keys); + log.info("initKeyPair: created JNDI key: " + jndiPreauthKeys); + } catch (Exception ex) { + throw new RuntimeException("check/init " + KEY_PAIR_NAME + " failed", ex); + } + } + + void initQueryDAO() { log.info("initDAO: START"); - Map dc = getDaoConfig(props); + Map dc = getDaoConfig(props, JNDI_QUERY_DATASOURCE); ArtifactDAO artifactDAO = new ArtifactDAO(); artifactDAO.setConfig(dc); // connectivity tested log.info("initDAO: OK"); @@ -189,22 +259,6 @@ void initGrantProviders() { } log.info("initGrantProviders: OK"); } - - void initKeys() { - log.info("initKeys: START"); - String pubkeyFileName = props.getFirstPropertyValue(RavenInitAction.PUBKEYFILE_KEY); - String privkeyFileName = props.getFirstPropertyValue(RavenInitAction.PRIVKEYFILE_KEY); - if (pubkeyFileName == null && privkeyFileName == null) { - log.info("initKeys: disabled OK"); - return; - } - File publicKeyFile = new File(System.getProperty("user.home") + "/config/" + pubkeyFileName); - File privateKeyFile = new File(System.getProperty("user.home") + "/config/" + privkeyFileName); - if (!publicKeyFile.exists() || !privateKeyFile.exists()) { - throw new IllegalStateException("invalid config: missing public/private key pair files -- " + publicKeyFile + " | " + privateKeyFile); - } - log.info("initKeys: OK"); - } void initStorageSiteRules() { log.info("initStorageSiteRules: START"); @@ -214,16 +268,16 @@ void initStorageSiteRules() { void initAvailabilityCheck() { StorageSiteDAO storageSiteDAO = new StorageSiteDAO(); - storageSiteDAO.setConfig(getDaoConfig(props)); + storageSiteDAO.setConfig(getDaoConfig(props, JNDI_QUERY_DATASOURCE)); - this.siteAvailabilitiesKey = this.appName + RavenInitAction.JNDI_AVAILABILITY_NAME; - terminate(); - this.availabilityCheck = new Thread(new AvailabilityCheck(storageSiteDAO, this.siteAvailabilitiesKey)); + this.siteAvailabilitiesKey = appName + "-" + StorageSiteAvailabilityCheck.class.getName(); + terminateAvailabilityCheck(); + this.availabilityCheck = new Thread(new StorageSiteAvailabilityCheck(storageSiteDAO, siteAvailabilitiesKey)); this.availabilityCheck.setDaemon(true); this.availabilityCheck.start(); } - private final void terminate() { + private final void terminateAvailabilityCheck() { if (this.availabilityCheck != null) { try { log.info("terminating AvailabilityCheck Thread..."); @@ -275,24 +329,6 @@ static MultiValuedProperties getConfig() { } else { sb.append("OK"); } - - // optional - String pub = mvp.getFirstPropertyValue(RavenInitAction.PUBKEYFILE_KEY); - sb.append("\n\t").append(RavenInitAction.PUBKEYFILE_KEY).append(": "); - if (pub == null) { - sb.append("MISSING"); - } else { - sb.append("OK"); - } - - String priv = mvp.getFirstPropertyValue(RavenInitAction.PRIVKEYFILE_KEY); - sb.append("\n\t").append(RavenInitAction.PRIVKEYFILE_KEY).append(": "); - if (priv == null) { - sb.append("MISSING"); - } else { - sb.append("OK"); - } - if (!ok) { throw new IllegalStateException(sb.toString()); @@ -301,14 +337,15 @@ static MultiValuedProperties getConfig() { return mvp; } - static Map getDaoConfig(MultiValuedProperties props) { + static Map getDaoConfig(MultiValuedProperties props, String pool) { String cname = props.getFirstPropertyValue(SQLGenerator.class.getName()); try { Map ret = new TreeMap<>(); Class clz = Class.forName(cname); ret.put(SQLGenerator.class.getName(), clz); - ret.put("jndiDataSourceName", RavenInitAction.JNDI_DATASOURCE); - ret.put("schema", props.getFirstPropertyValue(RavenInitAction.SCHEMA_KEY)); + ret.put("jndiDataSourceName", pool); + ret.put("invSchema", props.getFirstPropertyValue(RavenInitAction.SCHEMA_KEY)); + ret.put("genSchema", props.getFirstPropertyValue(RavenInitAction.SCHEMA_KEY)); //config.put("database", null); return ret; } catch (ClassNotFoundException ex) { @@ -372,111 +409,4 @@ static Map getStorageSiteRules(MultiValuedProperties props } return prefs; } - - private static class AvailabilityCheck implements Runnable { - private final StorageSiteDAO storageSiteDAO; - private final Map siteStates; - private final Map siteAvailabilities; - - public AvailabilityCheck(StorageSiteDAO storageSiteDAO, String siteAvailabilitiesKey) { - this.storageSiteDAO = storageSiteDAO; - this.siteStates = new HashMap(); - this.siteAvailabilities = new HashMap(); - - try { - Context initialContext = new InitialContext(); - // check if key already bound, if so unbind - try { - initialContext.unbind(siteAvailabilitiesKey); - } catch (NamingException ignore) { - // ignore - } - initialContext.bind(siteAvailabilitiesKey, this.siteAvailabilities); - } catch (NamingException e) { - throw new IllegalStateException(String.format("unable to bind %s to initial context: %s", - siteAvailabilitiesKey, e.getMessage()), e); - } - } - - @Override - public void run() { - int lastSiteQuerySecs = 0; - while (true) { - Set sites = storageSiteDAO.list(); - if (lastSiteQuerySecs >= AVAILABILITY_FULL_CHECK_TIMEOUT) { - sites = storageSiteDAO.list(); - lastSiteQuerySecs = 0; - } else { - lastSiteQuerySecs += AVAILABILITY_CHECK_TIMEOUT; - } - - for (StorageSite site: sites) { - URI resourceID = site.getResourceID(); - log.debug("checking site: " + resourceID); - SiteState siteState = this.siteStates.get(resourceID); - if (siteState == null) { - siteState = new SiteState(false, 0); - } - boolean minDetail = siteState.isMinDetail(); - Availability availability; - try { - availability = getAvailability(resourceID, minDetail); - } catch (Exception e) { - availability = new Availability(false, e.getMessage()); - log.debug(String.format("availability check failed %s - %s", resourceID, e.getMessage())); - } - final boolean prev = siteState.available; - siteState.available = availability.isAvailable(); - this.siteStates.put(resourceID, siteState); - this.siteAvailabilities.put(resourceID, availability); - String message = String.format("availability check %s %s - %s", minDetail ? "MIN" : "FULL", - resourceID, siteState.available ? "UP" : "DOWN"); - if (!siteState.available) { - log.warn(message); - } else if (prev != siteState.available) { - log.info(message); - } else { - log.debug(message); - } - } - - try { - log.debug(String.format("sleep availability checks for %d secs", AVAILABILITY_CHECK_TIMEOUT)); - Thread.sleep(AVAILABILITY_CHECK_TIMEOUT * 1000); - } catch (InterruptedException e) { - throw new IllegalStateException("AvailabilityCheck thread interrupted during sleep"); - } - } - } - - private Availability getAvailability(URI resourceID, boolean minDetail) { - AvailabilityClient client = new AvailabilityClient(resourceID, minDetail); - return client.getAvailability(); - } - - private class SiteState { - - public boolean available; - public int lastFullCheckSecs; - - public SiteState(boolean available, int lastFullCheckSecs) { - this.available = available; - this.lastFullCheckSecs = lastFullCheckSecs; - } - - public boolean isMinDetail() { - log.debug(String.format("isMinDetail() available=%b, lastFullCheckSecs=%d", - available, lastFullCheckSecs)); - if (this.available && this.lastFullCheckSecs < AVAILABILITY_FULL_CHECK_TIMEOUT) { - this.lastFullCheckSecs += AVAILABILITY_CHECK_TIMEOUT; - return true; - } - this.lastFullCheckSecs = 0; - return false; - } - - } - - } - } diff --git a/raven/src/main/webapp/META-INF/context.xml b/raven/src/main/webapp/META-INF/context.xml index 6e60e23c9..ad1a38896 100644 --- a/raven/src/main/webapp/META-INF/context.xml +++ b/raven/src/main/webapp/META-INF/context.xml @@ -3,7 +3,8 @@ WEB-INF/web.xml - + + + + diff --git a/raven/src/main/webapp/WEB-INF/web.xml b/raven/src/main/webapp/WEB-INF/web.xml index 6b40172f3..c76bf745d 100644 --- a/raven/src/main/webapp/WEB-INF/web.xml +++ b/raven/src/main/webapp/WEB-INF/web.xml @@ -47,6 +47,20 @@ 2 + + PubKeyServlet + ca.nrc.cadc.rest.RestServlet + + augmentSubject + false + + + get + org.opencadc.inventory.transfer.GetKeyAction + + 3 + + FilesServlet ca.nrc.cadc.rest.RestServlet @@ -106,6 +120,11 @@ FilesServlet /files/* + + + PubKeyServlet + /pubkey + diff --git a/raven/src/test/java/org/opencadc/raven/RavenInitActionTest.java b/raven/src/test/java/org/opencadc/raven/RavenInitActionTest.java index 5a12e69d1..7305eb8be 100644 --- a/raven/src/test/java/org/opencadc/raven/RavenInitActionTest.java +++ b/raven/src/test/java/org/opencadc/raven/RavenInitActionTest.java @@ -124,8 +124,6 @@ public void doInit() { String message = e.getMessage(); log.debug(message); Assert.assertTrue(message.contains(String.format("%s: MISSING", RavenInitAction.SCHEMA_KEY))); - Assert.assertTrue(message.contains(String.format("%s: MISSING", RavenInitAction.PUBKEYFILE_KEY))); - Assert.assertTrue(message.contains(String.format("%s: MISSING", RavenInitAction.PRIVKEYFILE_KEY))); } finally { System.setProperty("user.home", USER_HOME); } diff --git a/raven/src/test/java/org/opencadc/raven/StorageResolverTest.java b/raven/src/test/java/org/opencadc/raven/StorageResolverTest.java index 44745f092..a89b880f2 100644 --- a/raven/src/test/java/org/opencadc/raven/StorageResolverTest.java +++ b/raven/src/test/java/org/opencadc/raven/StorageResolverTest.java @@ -86,9 +86,8 @@ public class StorageResolverTest { private static final String DEFAULT_RAVEN_CONFIG = "org.opencadc.raven.inventory.schema=inventory\n" - + "org.opencadc.raven.publicKeyFile=raven-pub.key\n" - + "org.opencadc.raven.privateKeyFile=raven-priv.key\n" - + "org.opencadc.raven.consistency.preventNotFound=true"; + + "org.opencadc.raven.consistency.preventNotFound=true\n" + + "org.opencadc.raven.keys.preauth=false\n"; static { Log4jInit.setLevel("org.opencadc.raven", Level.DEBUG); diff --git a/raven/src/test/resources/testInvalidStorageSiteRules/raven.properties b/raven/src/test/resources/testInvalidStorageSiteRules/raven.properties index 15e735dcb..925216004 100644 --- a/raven/src/test/resources/testInvalidStorageSiteRules/raven.properties +++ b/raven/src/test/resources/testInvalidStorageSiteRules/raven.properties @@ -1,10 +1,8 @@ # valid config org.opencadc.raven.inventory.schema=inventory - -org.opencadc.raven.publicKeyFile=raven-pub.key -org.opencadc.raven.privateKeyFile=raven-priv.key -org.opencadc.raven.consistency.preventNotFound=true +org.opencadc.raven.consistency.preventNotFound=false +org.opencadc.raven.keys.preauth=true # invalid storage site rules diff --git a/raven/src/test/resources/testValidConfig/raven.properties b/raven/src/test/resources/testValidConfig/raven.properties index d349689d8..c01ddaa79 100644 --- a/raven/src/test/resources/testValidConfig/raven.properties +++ b/raven/src/test/resources/testValidConfig/raven.properties @@ -1,11 +1,8 @@ # valid config org.opencadc.inventory.db.SQLGenerator=org.opencadc.inventory.db.SQLGenerator org.opencadc.raven.inventory.schema=inventory - -org.opencadc.raven.publicKeyFile=raven-pub.key -org.opencadc.raven.privateKeyFile=raven-priv.key - org.opencadc.raven.consistency.preventNotFound=true +org.opencadc.raven.keys.preauth=false org.opencadc.raven.readGrantProvider=ivo://cadc.nrc.ca/baldur org.opencadc.raven.writeGrantProvider=ivo://cadc.nrc.ca/baldur diff --git a/ringhold/README.md b/ringhold/README.md index 9f23c1b8f..6c919fde2 100644 --- a/ringhold/README.md +++ b/ringhold/README.md @@ -1,7 +1,8 @@ -# Storage Inventory local artifact removal process (ringhold) +# Storage Inventory local artifact deletion process (ringhold) -Process to remove local artifacts that are no longer being synchronised by fenwick. This tool is used -to perform quick cleanup at a storage site after changing the fenwick artifact-filter policy. +Process to remove the local copy of artifacts from a storage site inventory database and +generate DeletedStorageLocationEvent(s) so the removal will propagate correctly to a global inventory. +This does not remove the files from storage (see `tantar`). ## configuration See the [cadc-java](https://github.com/opencadc/docker-base/tree/master/cadc-java) image docs for general config requirements. @@ -18,17 +19,25 @@ org.opencadc.ringhold.inventory.schema={schema for inventory database objects} org.opencadc.ringhold.inventory.username={username for inventory admin} org.opencadc.ringhold.inventory.password={password for inventory admin} org.opencadc.ringhold.inventory.url=jdbc:postgresql://{server}/{database} + +# artifact namespace(s) to remove +org.opencadc.ringhold.namespace={storage site namespace} + +# artifact uri bucket filter (optional) +org.opencadc.ringhold.buckets={uriBucket prefix or range of prefixes} ``` The `inventory` account owns and manages all the content (insert, update, delete) in the inventory schema. Unlike other components that modify inventory content, this component **does not initialise** the database objects because it never makes sense to run this in a new/empty database. The database is specified in the JDBC URL. Failure to connect to a pre-initialised database will show up in logs. -### artifact-deselector.sql -Contains a SQL clause used as a WHERE constraint. The clause returns Artifact's that match the URI pattern. -``` -WHERE uri LIKE 'cadc:CFHT/%' -``` +The `namespace` is the prefix of the Artifact URI's to be deleted. The `namespace` must end with a colon (:) +or slash (/) so one namespace cannot accidentally match (be a prefix of) another namespace. Multiple values +of `namespace` may be specified, one per line. + +The `buckets` value indicates a subset of artifacts to delete. The range of uri bucket prefixes is specified +with two values separated by a single - (dash) character; whitespace is ignored. Multiple instances of `ringhold` +can be run (in parallel) to subdivide the work as long as the range of buckets do not overlap. ## building it ``` diff --git a/ringhold/VERSION b/ringhold/VERSION index 51807fa89..3b882d6eb 100644 --- a/ringhold/VERSION +++ b/ringhold/VERSION @@ -1,4 +1,6 @@ ## deployable containers have a semantic and build tag # semantic version tag: major.minor[.patch] # build version tag: timestamp -TAGS="0.2-$(date --utc +"%Y%m%dT%H%M%S")" +VER=0.3.0 +TAGS="${VER} ${VER}-$(date --utc +"%Y%m%dT%H%M%S")" +unset VER diff --git a/ringhold/build.gradle b/ringhold/build.gradle index e205a6e76..685a569da 100644 --- a/ringhold/build.gradle +++ b/ringhold/build.gradle @@ -16,8 +16,8 @@ group = 'org.opencadc' dependencies { compile 'org.opencadc:cadc-util:[1.6,2.0)' - compile 'org.opencadc:cadc-inventory:[0.9,2.0)' - compile 'org.opencadc:cadc-inventory-db:[0.14,1.0)' + compile 'org.opencadc:cadc-inventory:[1.0,2.0)' + compile 'org.opencadc:cadc-inventory-db:[1.0,2.0)' testCompile 'junit:junit:[4.12,5.0)' } diff --git a/ringhold/src/intTest/java/org/opencadc/ringhold/InventoryValidatorTest.java b/ringhold/src/intTest/java/org/opencadc/ringhold/InventoryValidatorTest.java index 37fffaaa4..47e963b1d 100644 --- a/ringhold/src/intTest/java/org/opencadc/ringhold/InventoryValidatorTest.java +++ b/ringhold/src/intTest/java/org/opencadc/ringhold/InventoryValidatorTest.java @@ -72,6 +72,8 @@ import ca.nrc.cadc.db.ConnectionConfig; import ca.nrc.cadc.db.DBConfig; import ca.nrc.cadc.db.DBUtil; +import ca.nrc.cadc.db.version.InitDatabase; +import ca.nrc.cadc.util.BucketSelector; import ca.nrc.cadc.util.FileUtil; import ca.nrc.cadc.util.HexUtil; import ca.nrc.cadc.util.Log4jInit; @@ -83,31 +85,28 @@ import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; -import java.util.Date; -import java.util.Map; -import java.util.MissingResourceException; -import java.util.Properties; -import java.util.TreeMap; -import java.util.UUID; +import java.util.*; import javax.sql.DataSource; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.junit.Assert; import org.junit.Before; +import org.junit.Ignore; import org.junit.Test; import org.opencadc.inventory.Artifact; import org.opencadc.inventory.DeletedStorageLocationEvent; +import org.opencadc.inventory.Namespace; import org.opencadc.inventory.StorageLocation; import org.opencadc.inventory.db.ArtifactDAO; import org.opencadc.inventory.db.DeletedStorageLocationEventDAO; import org.opencadc.inventory.db.SQLGenerator; -import org.opencadc.inventory.db.version.InitDatabase; +import org.opencadc.inventory.db.version.InitDatabaseSI; import org.springframework.jdbc.core.JdbcTemplate; /** * Various versions of: * Insert artifacts more than uri pattern - * Run tool with one uri deselector + * Run tool with different Namespaces specified * Confirm delete storage location event creation and absence of artifacts in inventory */ public class InventoryValidatorTest { @@ -118,7 +117,7 @@ public class InventoryValidatorTest { Log4jInit.setLevel("org.opencadc.inventory", Level.INFO); Log4jInit.setLevel("org.opencadc.inventory.db", Level.INFO); Log4jInit.setLevel("ca.nrc.cadc.db", Level.INFO); - Log4jInit.setLevel("org.opencadc.ringhold", Level.DEBUG); + Log4jInit.setLevel("org.opencadc.ringhold", Level.INFO); } static String INVENTORY_SERVER = "RINGHOLD_TEST"; @@ -168,7 +167,7 @@ public InventoryValidatorTest() throws Exception { try { DataSource dataSource = DBUtil.findJNDIDataSource(jndiPath); - InitDatabase init = new InitDatabase(dataSource, INVENTORY_DATABASE, INVENTORY_SCHEMA); + InitDatabase init = new InitDatabaseSI(dataSource, INVENTORY_DATABASE, INVENTORY_SCHEMA); init.doInit(); log.debug("initDatabase: " + jndiPath + " " + INVENTORY_SCHEMA + " OK"); } catch (Exception ex) { @@ -178,7 +177,8 @@ public InventoryValidatorTest() throws Exception { daoConfig.put(SQLGenerator.class.getName(), SQLGenerator.class); daoConfig.put("jndiDataSourceName", jndiPath); daoConfig.put("database", INVENTORY_DATABASE); - daoConfig.put("schema", INVENTORY_SCHEMA); + daoConfig.put("invSchema", INVENTORY_SCHEMA); + daoConfig.put("genSchema", INVENTORY_SCHEMA); artifactDAO.setConfig(daoConfig); deletedStorageLocationEventDAO.setConfig(daoConfig); @@ -186,80 +186,12 @@ public InventoryValidatorTest() throws Exception { @Before public void setup() throws Exception { - writeConfig(); truncateTables(); } @Test - public void missingConfigTest() throws Exception { - final Path includePath = new File(TMP_DIR + "/config").toPath(); - Files.createDirectories(includePath); - final File includeFile = new File(includePath.toFile(), "artifact-deselector.sql"); - boolean deleted = includeFile.delete(); - Assert.assertTrue("include file not deleted", deleted); - - configTest(); - } - - @Test - public void emptyConfigTest() throws Exception { - final Path includePath = new File(TMP_DIR + "/config").toPath(); - Files.createDirectories(includePath); - final File includeFile = new File(includePath.toFile(), "artifact-deselector.sql"); - - final FileWriter fileWriter = new FileWriter(includeFile); - fileWriter.write(""); - fileWriter.flush(); - fileWriter.close(); - - configTest(); - } - - @Test - public void onlyCommentsConfigTest() throws Exception { - final Path includePath = new File(TMP_DIR + "/config").toPath(); - Files.createDirectories(includePath); - final File includeFile = new File(includePath.toFile(), "artifact-deselector.sql"); - - final FileWriter fileWriter = new FileWriter(includeFile); - fileWriter.write("# WHERE uri LIKE 'cadc:INTTEST/%'"); - fileWriter.flush(); - fileWriter.close(); - - configTest(); - } - - @Test - public void doesNotStartWithWhereConfigTest() throws Exception { - final Path includePath = new File(TMP_DIR + "/config").toPath(); - Files.createDirectories(includePath); - final File includeFile = new File(includePath.toFile(), "artifact-deselector.sql"); - - final FileWriter fileWriter = new FileWriter(includeFile); - fileWriter.write("uri LIKE 'cadc:INTTEST/%'\r\n"); - fileWriter.flush(); - fileWriter.close(); - - configTest(); - } - - @Test - public void multipleWhereConfigTest() throws Exception { - final Path includePath = new File(TMP_DIR + "/config").toPath(); - Files.createDirectories(includePath); - final File includeFile = new File(includePath.toFile(), "artifact-deselector.sql"); - - final FileWriter fileWriter = new FileWriter(includeFile); - fileWriter.write("WHERE uri LIKE 'cadc:INTTEST/%'\r\n"); - fileWriter.write("WHERE uri LIKE 'cadc:TEST/%'"); - fileWriter.flush(); - fileWriter.close(); - - configTest(); - } - - public void configTest() { - StorageLocation storageLocation = new StorageLocation(URI.create("ivo://cadc.nrc.ca/foo")); + public void noArtifactsMatchNamespace() throws Exception { + StorageLocation storageLocation = new StorageLocation(URI.create("cadc:foo")); Artifact a1 = getTestArtifact("cadc:TEST/one.txt"); a1.storageLocation = storageLocation; @@ -271,48 +203,11 @@ public void configTest() { a3.storageLocation = storageLocation; this.artifactDAO.put(a3); - try { - System.setProperty("user.home", TMP_DIR); - InventoryValidator testSubject = new InventoryValidator(this.daoConfig, this.daoConfig); - testSubject.run(); - Assert.fail("should throw an exception for invalid config"); - } catch (Exception expected) { - // exception expected - } finally { - System.setProperty("user.home", USER_HOME); - } - - a1 = this.artifactDAO.get(a1.getID()); - Assert.assertNotNull(a1); - a2 = this.artifactDAO.get(a2.getID()); - Assert.assertNotNull(a2); - a3 = this.artifactDAO.get(a3.getID()); - Assert.assertNotNull(a3); - - DeletedStorageLocationEvent dsle1 = this.deletedStorageLocationEventDAO.get(a1.getID()); - Assert.assertNull(dsle1); - DeletedStorageLocationEvent dsle2 = this.deletedStorageLocationEventDAO.get(a2.getID()); - Assert.assertNull(dsle2); - DeletedStorageLocationEvent dsle3 = this.deletedStorageLocationEventDAO.get(a3.getID()); - } - - @Test - public void noArtifactsMatchFilter() throws Exception { - StorageLocation storageLocation = new StorageLocation(URI.create("ivo://cadc.nrc.ca/foo")); - - Artifact a1 = getTestArtifact("cadc:TEST/one.txt"); - a1.storageLocation = storageLocation; - this.artifactDAO.put(a1); - Artifact a2 = getTestArtifact("cadc:INT/two.txt"); - a2.storageLocation = storageLocation; - this.artifactDAO.put(a2); - Artifact a3 = getTestArtifact("cadc:CADC/three.txt"); - a3.storageLocation = storageLocation; - this.artifactDAO.put(a3); + List namespaces = Collections.singletonList(new Namespace("cadc:NOMATCH/")); try { System.setProperty("user.home", TMP_DIR); - InventoryValidator testSubject = new InventoryValidator(this.daoConfig, this.daoConfig); + InventoryValidator testSubject = new InventoryValidator(daoConfig, daoConfig, namespaces, null); testSubject.run(); } finally { System.setProperty("user.home", USER_HOME); @@ -334,9 +229,9 @@ public void noArtifactsMatchFilter() throws Exception { } @Test - public void someArtifactsMatchFilter() throws Exception { - StorageLocation a_storageLocation = new StorageLocation(URI.create("ivo://cadc.nrc.ca/foo")); - StorageLocation b_storageLocation = new StorageLocation(URI.create("ivo://cadc.nrc.ca/bar")); + public void someArtifactsMatchNamespace() throws Exception { + StorageLocation a_storageLocation = new StorageLocation(URI.create("cadc:foo")); + StorageLocation b_storageLocation = new StorageLocation(URI.create("cadc:bar")); Artifact b1 = getTestArtifact("cadc:INT/one.txt"); b1.storageLocation = b_storageLocation; @@ -357,9 +252,12 @@ public void someArtifactsMatchFilter() throws Exception { b3.storageLocation = b_storageLocation; this.artifactDAO.put(b3); + List namespaces = Collections.singletonList(new Namespace("cadc:INTTEST/")); + BucketSelector buckets = new BucketSelector("0-f"); + try { System.setProperty("user.home", TMP_DIR); - InventoryValidator testSubject = new InventoryValidator(this.daoConfig, this.daoConfig); + InventoryValidator testSubject = new InventoryValidator(daoConfig, daoConfig, namespaces, buckets); testSubject.run(); } finally { System.setProperty("user.home", USER_HOME); @@ -395,51 +293,67 @@ public void someArtifactsMatchFilter() throws Exception { } @Test - public void allArtifactsMatchFilter() throws Exception { - StorageLocation storageLocation = new StorageLocation(URI.create("ivo://cadc.nrc.ca/foo")); + public void allArtifactsMatchNamespace() throws Exception { + StorageLocation a_storageLocation = new StorageLocation(URI.create("cadc:foo")); + StorageLocation b_storageLocation = new StorageLocation(URI.create("cadc:bar")); - Artifact a1 = getTestArtifact("cadc:INTTEST/one.txt"); - a1.storageLocation = storageLocation; + Artifact b1 = getTestArtifact("cadc:INT/one.txt"); + b1.storageLocation = b_storageLocation; + this.artifactDAO.put(b1); + Artifact b2 = getTestArtifact("cadc:INT_TEST/two.txt"); + b2.storageLocation = b_storageLocation; + this.artifactDAO.put(b2); + Artifact a1 = getTestArtifact("cadc:INTTEST/three.txt"); + a1.storageLocation = a_storageLocation; this.artifactDAO.put(a1); - Artifact a2 = getTestArtifact("cadc:INTTEST/two.txt"); - a2.storageLocation = storageLocation; + Artifact a2 = getTestArtifact("cadc:INTTEST/four.txt"); + a2.storageLocation = a_storageLocation; this.artifactDAO.put(a2); - Artifact a3 = getTestArtifact("cadc:INTTEST/three.txt"); - a3.storageLocation = storageLocation; + Artifact a3 = getTestArtifact("cadc:INTTEST/five.txt"); + a3.storageLocation = a_storageLocation; this.artifactDAO.put(a3); + Artifact b3 = getTestArtifact("cadc:TEST/six.txt"); + b3.storageLocation = b_storageLocation; + this.artifactDAO.put(b3); + + List namespaces = Arrays.asList(new Namespace("cadc:INT/"), + new Namespace("cadc:INT_TEST/"), new Namespace("cadc:INTTEST/"), + new Namespace("cadc:TEST/")); + BucketSelector buckets = new BucketSelector("0-f"); try { System.setProperty("user.home", TMP_DIR); - InventoryValidator testSubject = new InventoryValidator(this.daoConfig, this.daoConfig); + InventoryValidator testSubject = new InventoryValidator(daoConfig, daoConfig, namespaces, buckets); testSubject.run(); } finally { System.setProperty("user.home", USER_HOME); } + DeletedStorageLocationEvent b_dsle1 = this.deletedStorageLocationEventDAO.get(b1.getID()); + Assert.assertNotNull(b_dsle1); + DeletedStorageLocationEvent b_dsle2 = this.deletedStorageLocationEventDAO.get(b2.getID()); + Assert.assertNotNull(b_dsle2); DeletedStorageLocationEvent a_dsle1 = this.deletedStorageLocationEventDAO.get(a1.getID()); Assert.assertNotNull(a_dsle1); DeletedStorageLocationEvent a_dsle2 = this.deletedStorageLocationEventDAO.get(a2.getID()); Assert.assertNotNull(a_dsle2); DeletedStorageLocationEvent a_dsle3 = this.deletedStorageLocationEventDAO.get(a3.getID()); Assert.assertNotNull(a_dsle3); + DeletedStorageLocationEvent b_dsle3 = this.deletedStorageLocationEventDAO.get(b3.getID()); + Assert.assertNotNull(b_dsle3); + b1 = this.artifactDAO.get(b1.getID()); + Assert.assertNull(b1); + b2 = this.artifactDAO.get(b2.getID()); + Assert.assertNull(b2); a1 = this.artifactDAO.get(a1.getID()); Assert.assertNull(a1); a2 = this.artifactDAO.get(a2.getID()); Assert.assertNull(a2); a3 = this.artifactDAO.get(a3.getID()); Assert.assertNull(a3); - } - - private void writeConfig() throws IOException { - final Path includePath = new File(TMP_DIR + "/config").toPath(); - Files.createDirectories(includePath); - final File includeFile = new File(includePath.toFile(), "artifact-deselector.sql"); - - final FileWriter fileWriter = new FileWriter(includeFile); - fileWriter.write("WHERE uri LIKE 'cadc:INTTEST/%'"); - fileWriter.flush(); - fileWriter.close(); + b3 = this.artifactDAO.get(b3.getID()); + Assert.assertNull(b3); } private Artifact getTestArtifact(final String uri) { @@ -448,7 +362,6 @@ private Artifact getTestArtifact(final String uri) { return new Artifact(URI.create(uri), checkSum, new Date(), 512L); } - private void truncateTables() throws Exception { final JdbcTemplate jdbcTemplate = new JdbcTemplate(DBUtil.findJNDIDataSource(jndiPath)); jdbcTemplate.execute("TRUNCATE TABLE " + INVENTORY_SCHEMA + ".deletedArtifactEvent"); @@ -459,4 +372,134 @@ private void truncateTables() throws Exception { jdbcTemplate.execute("TRUNCATE TABLE " + INVENTORY_SCHEMA + ".Artifact"); } + + // below are tests for the ArtifactDeselector, which is not currently used, + // but preserved in case one day it is again. + + @Ignore + @Test + public void missingConfigTest() throws Exception { + final Path includePath = new File(TMP_DIR + "/config").toPath(); + Files.createDirectories(includePath); + final File includeFile = new File(includePath.toFile(), "artifact-deselector.sql"); + boolean deleted = includeFile.delete(); + Assert.assertTrue("include file not deleted", deleted); + + configTest(); + } + + @Ignore + @Test + public void emptyConfigTest() throws Exception { + final Path includePath = new File(TMP_DIR + "/config").toPath(); + Files.createDirectories(includePath); + final File includeFile = new File(includePath.toFile(), "artifact-deselector.sql"); + + final FileWriter fileWriter = new FileWriter(includeFile); + fileWriter.write(""); + fileWriter.flush(); + fileWriter.close(); + + configTest(); + } + + @Ignore + @Test + public void onlyCommentsConfigTest() throws Exception { + final Path includePath = new File(TMP_DIR + "/config").toPath(); + Files.createDirectories(includePath); + final File includeFile = new File(includePath.toFile(), "artifact-deselector.sql"); + + final FileWriter fileWriter = new FileWriter(includeFile); + fileWriter.write("# WHERE uri LIKE 'cadc:INTTEST/%'"); + fileWriter.flush(); + fileWriter.close(); + + configTest(); + } + + @Ignore + @Test + public void doesNotStartWithWhereConfigTest() throws Exception { + final Path includePath = new File(TMP_DIR + "/config").toPath(); + Files.createDirectories(includePath); + final File includeFile = new File(includePath.toFile(), "artifact-deselector.sql"); + + final FileWriter fileWriter = new FileWriter(includeFile); + fileWriter.write("uri LIKE 'cadc:INTTEST/%'\r\n"); + fileWriter.flush(); + fileWriter.close(); + + configTest(); + } + + @Ignore + @Test + public void multipleWhereConfigTest() throws Exception { + final Path includePath = new File(TMP_DIR + "/config").toPath(); + Files.createDirectories(includePath); + final File includeFile = new File(includePath.toFile(), "artifact-deselector.sql"); + + final FileWriter fileWriter = new FileWriter(includeFile); + fileWriter.write("WHERE uri LIKE 'cadc:INTTEST/%'\r\n"); + fileWriter.write("WHERE uri LIKE 'cadc:TEST/%'"); + fileWriter.flush(); + fileWriter.close(); + + configTest(); + } + + @Ignore + @Test + public void configTest() { + StorageLocation storageLocation = new StorageLocation(URI.create("ivo://cadc.nrc.ca/foo")); + + Artifact a1 = getTestArtifact("cadc:TEST/one.txt"); + a1.storageLocation = storageLocation; + this.artifactDAO.put(a1); + Artifact a2 = getTestArtifact("cadc:INT/two.txt"); + a2.storageLocation = storageLocation; + this.artifactDAO.put(a2); + Artifact a3 = getTestArtifact("cadc:CADC/three.txt"); + a3.storageLocation = storageLocation; + this.artifactDAO.put(a3); + + try { + System.setProperty("user.home", TMP_DIR); + List namespaces = Collections.singletonList(new Namespace("cadc:FOO/")); + BucketSelector buckets = new BucketSelector("0-f"); + InventoryValidator testSubject = new InventoryValidator(this.daoConfig, this.daoConfig, namespaces, buckets); + testSubject.run(); + Assert.fail("should throw an exception for invalid config"); + } catch (Exception expected) { + // exception expected + } finally { + System.setProperty("user.home", USER_HOME); + } + + a1 = this.artifactDAO.get(a1.getID()); + Assert.assertNotNull(a1); + a2 = this.artifactDAO.get(a2.getID()); + Assert.assertNotNull(a2); + a3 = this.artifactDAO.get(a3.getID()); + Assert.assertNotNull(a3); + + DeletedStorageLocationEvent dsle1 = this.deletedStorageLocationEventDAO.get(a1.getID()); + Assert.assertNull(dsle1); + DeletedStorageLocationEvent dsle2 = this.deletedStorageLocationEventDAO.get(a2.getID()); + Assert.assertNull(dsle2); + DeletedStorageLocationEvent dsle3 = this.deletedStorageLocationEventDAO.get(a3.getID()); + } + + private void writeConfig() throws IOException { + final Path includePath = new File(TMP_DIR + "/config").toPath(); + Files.createDirectories(includePath); + final File includeFile = new File(includePath.toFile(), "artifact-deselector.sql"); + + final FileWriter fileWriter = new FileWriter(includeFile); + fileWriter.write("WHERE uri LIKE 'cadc:INTTEST/%'"); + fileWriter.flush(); + fileWriter.close(); + } + } diff --git a/ringhold/src/main/java/org/opencadc/ringhold/ArtifactDeselector.java b/ringhold/src/main/java/org/opencadc/ringhold/ArtifactDeselector.java index 893331669..2570e29b3 100644 --- a/ringhold/src/main/java/org/opencadc/ringhold/ArtifactDeselector.java +++ b/ringhold/src/main/java/org/opencadc/ringhold/ArtifactDeselector.java @@ -81,6 +81,9 @@ import org.apache.log4j.Logger; /** + *

Class is no longer used, switched to using configured namespace and bucketUri to query + * for artifacts to remove. But left in this package if a use case is found at a future date. +

* Implementation of ArtifactSelector that includes artifacts via selective queries. * This class requires one or more fragments of SQL (a WHERE clause), each in a separate * file located in {user.home}/config/include and named {something}.sql -- see the diff --git a/ringhold/src/main/java/org/opencadc/ringhold/InventoryValidator.java b/ringhold/src/main/java/org/opencadc/ringhold/InventoryValidator.java index 5956e368d..cda660098 100644 --- a/ringhold/src/main/java/org/opencadc/ringhold/InventoryValidator.java +++ b/ringhold/src/main/java/org/opencadc/ringhold/InventoryValidator.java @@ -69,12 +69,17 @@ import ca.nrc.cadc.db.TransactionManager; import ca.nrc.cadc.io.ResourceIterator; -import ca.nrc.cadc.net.ResourceNotFoundException; +import ca.nrc.cadc.net.TransientException; +import ca.nrc.cadc.util.BucketSelector; import java.io.IOException; +import java.util.Iterator; +import java.util.List; import java.util.Map; import org.apache.log4j.Logger; import org.opencadc.inventory.Artifact; import org.opencadc.inventory.DeletedStorageLocationEvent; +import org.opencadc.inventory.InventoryUtil; +import org.opencadc.inventory.Namespace; import org.opencadc.inventory.db.ArtifactDAO; import org.opencadc.inventory.db.DeletedStorageLocationEventDAO; @@ -89,28 +94,27 @@ public class InventoryValidator implements Runnable { private final ArtifactDAO artifactIteratorDAO; private final ArtifactDAO artifactDAO; - private final String deselector; + private final List namespaces; + private final BucketSelector bucketSelector; - public InventoryValidator(Map txnConfig, Map iterConfig) { + public InventoryValidator(Map txnConfig, Map iterConfig, + List namespaces, BucketSelector bucketSelector) { + InventoryUtil.assertNotNull(InventoryValidator.class, "txnConfig", txnConfig); + InventoryUtil.assertNotNull(InventoryValidator.class, "iterConfig", iterConfig); + InventoryUtil.assertNotNull(InventoryValidator.class, "namespaces", namespaces); + this.artifactDAO = new ArtifactDAO(); artifactDAO.setConfig(txnConfig); this.artifactIteratorDAO = new ArtifactDAO(); artifactIteratorDAO.setConfig(iterConfig); - ArtifactDeselector artifactDeselector = new ArtifactDeselector(); - try { - this.deselector = artifactDeselector.getConstraint(); - } catch (ResourceNotFoundException ex) { - throw new IllegalArgumentException("missing required configuration: " - + ArtifactDeselector.SQL_FILTER_FILE_NAME, ex); - } catch (IOException ex) { - throw new IllegalArgumentException("unable to read config: " + ArtifactDeselector.SQL_FILTER_FILE_NAME, ex); - } + this.namespaces = namespaces; + this.bucketSelector = bucketSelector; } /** - * Find an artifact with a uri pattern in the deselector, + * Find an artifact for the given namespace(s) and optional bucketUri, * delete the artifact and generate a deleted storage location event. */ @Override @@ -119,31 +123,68 @@ public void run() { final DeletedStorageLocationEventDAO deletedStorageLocationEventDAO = new DeletedStorageLocationEventDAO(this.artifactDAO); - try (final ResourceIterator artifactIterator = - this.artifactIteratorDAO.iterator(this.deselector, null, false)) { + for (Namespace namespace : namespaces) { + if (bucketSelector == null) { + iterateBucket(transactionManager, deletedStorageLocationEventDAO, namespace,null); + } else { + Iterator bucketIter = bucketSelector.getBucketIterator(); + while (bucketIter.hasNext()) { + String bucket = bucketIter.next(); + log.info(InventoryValidator.class.getSimpleName() + ".START bucket=" + bucket); + int retries = 0; + boolean done = false; + while (!done && retries < 3) { + try { + iterateBucket(transactionManager, deletedStorageLocationEventDAO, namespace, bucket); + log.info(InventoryValidator.class.getSimpleName() + ".END bucket=" + bucket); + done = true; + } catch (TransientException ex) { + log.error(InventoryValidator.class.getSimpleName() + ".FAIL bucket=" + bucket, ex); + retries++; + } catch (IllegalArgumentException ex) { + log.error(InventoryValidator.class.getSimpleName() + ".FAIL bucket=" + bucket, ex); + throw ex; + } catch (RuntimeException ex) { + // TODO: probably not a great idea to retry on these... + log.error(InventoryValidator.class.getSimpleName() + ".FAIL bucket=" + bucket, ex); + retries++; + } catch (Exception ex) { + log.error(InventoryValidator.class.getSimpleName() + ".FAIL bucket=" + bucket, ex); + throw ex; + } + } + } + } + } + } + + private void iterateBucket(TransactionManager transactionManager, + DeletedStorageLocationEventDAO deletedStorageLocationEventDAO, + Namespace namespace, String bucket) { + try (final ResourceIterator artifactIterator = this.artifactIteratorDAO.iterator(namespace, bucket, false)) { while (artifactIterator.hasNext()) { - Artifact deselectorArtifact = artifactIterator.next(); - log.debug("START: Process Artifact " + deselectorArtifact.getID() + " " + deselectorArtifact.getURI()); + Artifact artifact = artifactIterator.next(); + log.debug("START: Process Artifact " + artifact.getID() + " " + artifact.getURI()); try { transactionManager.startTransaction(); - Artifact cur = this.artifactDAO.lock(deselectorArtifact); + Artifact cur = this.artifactDAO.lock(artifact); if (cur != null) { DeletedStorageLocationEvent deletedStorageLocationEvent = new DeletedStorageLocationEvent(cur.getID()); deletedStorageLocationEventDAO.put(deletedStorageLocationEvent); - + this.artifactDAO.delete(cur.getID()); - + transactionManager.commitTransaction(); log.info("DELETE: Artifact " + cur.getID() + " " + cur.getURI()); } else { transactionManager.rollbackTransaction(); log.debug("Artifact not found"); } - - log.debug("END: Process Artifact " + deselectorArtifact.getID() + " " - + deselectorArtifact.getURI()); + + log.debug("END: Process Artifact " + artifact.getID() + " " + + artifact.getURI()); } catch (Exception exception) { if (transactionManager.isOpen()) { log.error("Exception in transaction. Rolling back..."); @@ -164,4 +205,5 @@ public void run() { log.error("Error closing iterator: " + e.getMessage()); } } + } diff --git a/ringhold/src/main/java/org/opencadc/ringhold/Main.java b/ringhold/src/main/java/org/opencadc/ringhold/Main.java index e09a3410c..f4d64954d 100644 --- a/ringhold/src/main/java/org/opencadc/ringhold/Main.java +++ b/ringhold/src/main/java/org/opencadc/ringhold/Main.java @@ -69,17 +69,21 @@ import ca.nrc.cadc.db.ConnectionConfig; import ca.nrc.cadc.db.DBUtil; +import ca.nrc.cadc.util.BucketSelector; import ca.nrc.cadc.util.Log4jInit; import ca.nrc.cadc.util.MultiValuedProperties; import ca.nrc.cadc.util.PropertiesReader; import ca.nrc.cadc.util.StringUtil; +import java.util.ArrayList; import java.util.Arrays; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import javax.naming.NamingException; import org.apache.log4j.Level; import org.apache.log4j.Logger; +import org.opencadc.inventory.Namespace; import org.opencadc.inventory.db.SQLGenerator; /** @@ -99,6 +103,8 @@ public class Main { private static final String DB_USERNAME_CONFIG_KEY = CONFIG_PREFIX + ".inventory.username"; private static final String DB_PASSWORD_CONFIG_KEY = CONFIG_PREFIX + ".inventory.password"; private static final String DB_URL_CONFIG_KEY = CONFIG_PREFIX + ".inventory.url"; + private static final String NAMESPACE_CONFIG_KEY = CONFIG_PREFIX + ".namespace"; + private static final String BUCKETS_CONFIG_KEY = CONFIG_PREFIX + ".buckets"; // Used to verify configuration items. See the README for descriptions. private static final String[] MANDATORY_PROPERTY_KEYS = { @@ -107,7 +113,8 @@ public class Main { DB_URL_CONFIG_KEY, DB_USERNAME_CONFIG_KEY, LOGGING_CONFIG_KEY, - SQLGENERATOR_CONFIG_KEY + SQLGENERATOR_CONFIG_KEY, + NAMESPACE_CONFIG_KEY }; public static void main(final String[] args) { @@ -151,17 +158,33 @@ public static void main(final String[] args) { } final Map daoConfig = new TreeMap<>(); - daoConfig.put("schema", props.getFirstPropertyValue(DB_SCHEMA_CONFIG_KEY)); + daoConfig.put("invSchema", props.getFirstPropertyValue(DB_SCHEMA_CONFIG_KEY)); + daoConfig.put("genSchema", props.getFirstPropertyValue(DB_SCHEMA_CONFIG_KEY)); daoConfig.put("jndiDataSourceName", "jdbc/inventory-txn"); final String configuredSQLGenerator = props.getFirstPropertyValue(SQLGENERATOR_CONFIG_KEY); daoConfig.put(SQLGENERATOR_CONFIG_KEY, Class.forName(configuredSQLGenerator)); final Map iterConfig = new TreeMap<>(); - iterConfig.put("schema", daoConfig.get("schema")); + iterConfig.put("invSchema", daoConfig.get("schema")); + iterConfig.put("genSchema", daoConfig.get("schema")); iterConfig.put("jndiDataSourceName", "jdbc/inventory-iter"); iterConfig.put(SQLGENERATOR_CONFIG_KEY, Class.forName(configuredSQLGenerator)); - final InventoryValidator doit = new InventoryValidator(daoConfig, iterConfig); + // check namespaces are valid + final List configuredNamespaces = props.getProperty(NAMESPACE_CONFIG_KEY); + final List namespaces = new ArrayList<>(); + for (String namespace : configuredNamespaces) { + namespaces.add(new Namespace(namespace)); + } + + // uri buckets + BucketSelector bucketSelector = null; + final String buckets = props.getFirstPropertyValue(BUCKETS_CONFIG_KEY); + if (buckets != null) { + bucketSelector = new BucketSelector(buckets); + } + + final InventoryValidator doit = new InventoryValidator(daoConfig, iterConfig, namespaces, bucketSelector); doit.run(); } catch (Throwable unexpected) { log.fatal("Unexpected failure", unexpected); diff --git a/tantar/VERSION b/tantar/VERSION index ba99f57c6..d849b8f34 100644 --- a/tantar/VERSION +++ b/tantar/VERSION @@ -1,6 +1,6 @@ ## deployable containers have a semantic and build tag # semantic version tag: major.minor[.patch] # build version tag: timestamp -VER=0.4.5 +VER=1.0.0 TAGS="${VER} ${VER}-$(date --utc +"%Y%m%dT%H%M%S")" unset VER diff --git a/tantar/build.gradle b/tantar/build.gradle index c59d7786e..154e5309d 100644 --- a/tantar/build.gradle +++ b/tantar/build.gradle @@ -21,8 +21,8 @@ mainClassName = 'org.opencadc.tantar.Main' dependencies { compile 'org.opencadc:cadc-util:[1.10.2,2.0)' compile 'org.opencadc:cadc-log:[1.1.2,2.0)' - compile 'org.opencadc:cadc-inventory:[0.9.4,2.0)' - compile 'org.opencadc:cadc-inventory-db:[0.14.5,1.0)' + compile 'org.opencadc:cadc-inventory:[1.0.0,2.0)' + compile 'org.opencadc:cadc-inventory-db:[1.0.0,2.0)' compile 'org.opencadc:cadc-inventory-util:[0.1.8,1.0)' compile 'org.opencadc:cadc-storage-adapter:[0.11.1,1.0)' diff --git a/tantar/src/intTest/java/org/opencadc/tantar/TantarTest.java b/tantar/src/intTest/java/org/opencadc/tantar/TantarTest.java index 41c290ed1..d4c362da9 100644 --- a/tantar/src/intTest/java/org/opencadc/tantar/TantarTest.java +++ b/tantar/src/intTest/java/org/opencadc/tantar/TantarTest.java @@ -142,7 +142,8 @@ protected TantarTest(ResolutionPolicy policy, boolean includeRecoverable) throws Map daoConfig = new TreeMap<>(); daoConfig.put(SQLGenerator.class.getName(), SQLGenerator.class); - daoConfig.put("schema", "inventory"); + daoConfig.put("invSchema", "inventory"); + daoConfig.put("genSchema", "inventory"); this.validator = new BucketValidator(daoConfig, cc, preservingAdapter, policy, "0-f", false); diff --git a/tantar/src/main/java/org/opencadc/tantar/BucketValidator.java b/tantar/src/main/java/org/opencadc/tantar/BucketValidator.java index 291c623e1..91498c464 100644 --- a/tantar/src/main/java/org/opencadc/tantar/BucketValidator.java +++ b/tantar/src/main/java/org/opencadc/tantar/BucketValidator.java @@ -4,7 +4,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * - * (c) 2022. (c) 2022. + * (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -95,7 +95,7 @@ import org.opencadc.inventory.db.DeletedStorageLocationEventDAO; import org.opencadc.inventory.db.ObsoleteStorageLocationDAO; import org.opencadc.inventory.db.StorageLocationEventDAO; -import org.opencadc.inventory.db.version.InitDatabase; +import org.opencadc.inventory.db.version.InitDatabaseSI; import org.opencadc.inventory.storage.StorageAdapter; import org.opencadc.inventory.storage.StorageEngageException; import org.opencadc.inventory.storage.StorageMetadata; @@ -193,9 +193,9 @@ public BucketValidator(Map daoConfig, ConnectionConfig connectio try { String database = (String) daoConfig.get("database"); - String schema = (String) daoConfig.get("schema"); + String schema = (String) daoConfig.get("invSchema"); DataSource ds = ca.nrc.cadc.db.DBUtil.findJNDIDataSource("jdbc/inventory"); - InitDatabase init = new InitDatabase(ds, database, schema); + InitDatabaseSI init = new InitDatabaseSI(ds, database, schema); init.doInit(); log.info("initDatabase: " + schema + " OK"); } catch (Exception ex) { diff --git a/tantar/src/main/java/org/opencadc/tantar/Main.java b/tantar/src/main/java/org/opencadc/tantar/Main.java index 228d3d438..a326e2832 100644 --- a/tantar/src/main/java/org/opencadc/tantar/Main.java +++ b/tantar/src/main/java/org/opencadc/tantar/Main.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * - * (c) 2020. (c) 2020. + * (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -193,7 +193,8 @@ public static void main(final String[] args) { final String jdbcDriverClassname = "org.postgresql.Driver"; final String schemaName = props.getFirstPropertyValue(DB_SCHEMA_KEY); if (StringUtil.hasLength(schemaName)) { - daoConfig.put("schema", schemaName); + daoConfig.put("invSchema", schemaName); + daoConfig.put("genSchema", schemaName); } else { throw new InvalidConfigException("required config property missing: " + DB_SCHEMA_KEY); } diff --git a/vault-quota/Design.md b/vault-quota/Design.md new file mode 100644 index 000000000..769f85039 --- /dev/null +++ b/vault-quota/Design.md @@ -0,0 +1,179 @@ +# vault quota design/algorithms + +The definitive source of content-length (file size) of a DataNode comes from the +`inventory.Artifact` table and is not known until a PUT to storage is completed. +In the case of a `vault` service co-located with a single storage site (`minoc`), +the new Artifact is visible in the database as soon as the PUT to `minoc` is +completed. In the case of a `vault` service co-located with a global SI, the new +Artifact is visible in the database once it is synced from the site of the PUT to +the global database by `fenwick` (or worst case: `ratik`). + +## NOTE +This design was for supporting propagation of space used up the tree so that +allocation space used was dynamically updated as content was modified. While the +algorithm for doing that is nominally viable, the algorithm to validate and repair +incorrect container node sizes in a live system is excessively complex and probably +impossible to implement in practice (deadlocks, excessive database load and query +processing, etc). + +**This design will not be completed and implemented** and is retained here for future +reference. + +## TODO +The design below only takes into account incremental propagation of space used +by stored files. It is not complete/verified until we also come up with a validation +algorithm that can detect and fix discrepancies in a live `vault`. + +## operations that effect node size +The following operations effect node size: +* delete node removed the node and applies a negative delta to parent +* recursive delete will need to update twice as many nodes in small transactions +* move node will applies a negative delta to previous parent and positive delta to new parent +* copy applies a positive delta to new parent +* transfer negotiation needs to check allocationNode quota vs size to allow a put to proceed + +This has to be done entirely inside the NodePersistence implementation; that should be feasible +since the argument of NodePersistence methods is the previously retrieved node with the full parent +tree intact. It's not clear which of these will require changes in cadc-vos-server, but if they do +it will need to be possible for them to be optional and/or gracefully not do anything. + +In any case, any solution with container node delta(s) is inherently multi-threaded because +user requests can modify them. + +## DataNode size algorithm: +This is an event watcher that gets Artifact events (after a PUT) and intiates the +propagation of sizes (space used). +``` +track progress using HarvestState (source: `db:{bucket range}`, name: TBD) +incremental query for new artifacts in lastModified order +for each new Artifact: + query for DataNode (storageID = artifact.uri) + if Artifact.contentLength != Node.size: + start txn + lock parent + lock datanode + compute delta + apply delta to parent.delta + set dataNode.size + update HarvestState + commit txn +``` +Optimization: The above sequence does the first step of propagation from DataNode to +parent ContainerNode so the maximum work can be done in parallel using bucket ranges +(smaller than 0-f). It also means the propagation below only has to consider +ContainerNode.delta since DataNode(s) never have a delta. + +## ContainerNode size propagation algorithm: +``` +query for ContainerNode with non-zero delta +for each ContainerNode: + start txn + lock parent + lock containernode + re-check delta + apply delta to parent.delta + apply delta containernode.size, set containernode.delta=0 + commit txn +``` +The above sequence finds candidate propagations, locks (order: parent-child), +and applies the propagation. This moves the outstanding delta up the tree one level. If the +sequence acts on multiple child containers before the parent, the delta(s) naturally +_merge_ and fewer delta propagations occur in the upper part of the tree. + +The most generic implementation is to iterate over container nodes: +``` +Iterator iter = nodeDAO.containerIterator(boolean nonZeroDelta); +``` +It would be optimal to do propagations from the bottom upwards in order to "merge" them, +but it doesn't seem practical to forcibly accomplish that ordering. Container size propagation +will be implemented as a single sequence (thread). We could add something to the vospace.Node +table to support subdividing work and enable multiple threads, but there is nothing there right +now and it might not be necessary. + +## validation + +### DataNode vs Artifact discrepancies +These can be validated in parallel by multiple threads, subdivide work by bucket if we add +DataNode.storageBucket (== Artifact.uriBucket). + +``` +discrepancy: Artifact exists but DataNode does not +explanation: DataNode created, transfer negotiated, DataNode removed, transfer executed +evidence: DeletedNodeEvent exists +action: remove artifact, create DeletedArtifactEvent + +discrepancy: Artifact exists but DataNode does not +explanation: DataNode created, Artifact put, DataNode deleted, Artifact delete failed +evidence: only possible with singlePool==false +action: remove artifact, create DeletedArtifactEvent + +discrepancy: DataNode exists but Artifact does not +explanation: DataNode created, Artifact never (successfully) put (normal) +evidence: DataNode.nodeSize == 0 or null +action: none + +discrepancy: DataNode exists but Artifact does not +explanation: deleted or lost Artifact +evidence: DataNode.nodeSize != 0 (deleted vs lost: DeletedArtifactEvent exists) +action: lock nodes, fix dataNode and propagate delta to parent + +discrepancy: DataNode.nodeSize != Artifact.contentLength +explanation: artifact written (if DataNode.size > 0: replaced) +action: lock nodes, fix DataNode and propagate delta to parent +``` +Required lock order: child-parent or parent-child OK. + +The most generic implementation is a merge join of two iterators (see ratik, tantar): +``` +Iterator aiter = artifactDAO.iterator(vaultNamespace, bucket); // uriBucket,uri order +Iterator niter = nodeDAO.iterator(bucket); // storageBucket,storageID order +``` + +### ContainerNode vs child nodes discrepancies +These can be validated in +``` +discrepancy 1: container size != sum(child size) +explanation: un-propagated delta from put or delete +evidence: sum(child delta) != 0 +action: none + +discrepancy 1: container size != sum(child size) +explanation: bug +evidence: sum(child delta) == 0 +action: fix?? container size, set container.delta +``` +Required lock order: locks the parent of a parent-children relationship so propagations are blocked, +then do the aggregate query (select sum(child size), sum(child delta) where parentID=?) +but child state is still not stable (delete child node, move child out, copy/move node in, +sync child nodes from remote) so all of these would have to lock in the same order to avoid deadlock. +I don't see any way to avoid deadlocks when user requests can lock multiple nodes. + +Recursive delete, container size propagation, datanode validation, and container validation can will +all potentially modify child delta(s). + +The most generic implementation is to iterate over container nodes: +``` +Iterator iter = nodeDAO.containerIterator(false); // order not relevant +``` + +## database changes required +note: all field and column names TBD +* add `transient Long bytesUsed` to ContainerNode and DataNode +* add `transient long delta` field to ContainerNode +* add `bytesUsed` to the `vospace.Node` table +* add `delta` to the `vospace.Node` table +* add `storageBucket` to DataNode?? TBD +* add `storageBucket` to `vospace.Node` table + +## cadc-inventory-db API required immediately +* incremental sync query/iterator: ArtifactDAO.iterator(Namespace ns, String uriBucketPrefix, Date minLastModified, boolean ordered) + order by lastModified if set +* lookup DataNode by storageID: NodeDAO.getDataNode(URI storageID) +* indices to support new queries + +## cadc-inventory-db API required later +* validate-by-bucket: use ArtifactDAO.iterator(String uriBucketPrefix, boolean ordered, Namespace ns) +* validate-by-bucket: NodeDAO.dataNodeIterator(String storageBucketPrefix, boolean ordered) +* incremental and validate containers: NodeDAO.containerIterator(boolean nonZeroDelta) +* indices to support new queries + diff --git a/vault-quota/NodeSize.md b/vault-quota/NodeSize.md new file mode 100644 index 000000000..fd48db421 --- /dev/null +++ b/vault-quota/NodeSize.md @@ -0,0 +1,85 @@ +# vault quota design/algorithms + +The definitive source of content-length (file size) of a DataNode comes from the +`inventory.Artifact` table and is not known until a PUT to storage is completed. +In the case of a `vault` service co-located with a single storage site (`minoc`), +the new Artifact is visible in the database as soon as the PUT to `minoc` is +completed. In the case of a `vault` service co-located with a global SI, the new +Artifact is visible in the database once it is synced from the site of the PUT to +the global database by `fenwick` (or worst case: `ratik`). + +## incremental DataNode size algorithm +DataNode(s) require the `bytesUsed` be set so that sizes can be output from listing +container nodes without a join or query to the artifact table. + +This is an event watcher that gets Artifact events (after a PUT) and intiates the +propagation of sizes (space used). +``` +track progress using HarvestState (source, name: TBD) +incremental query for new artifacts in lastModified order +for each new Artifact: + query for DataNode (storageID = artifact.uri) + if Artifact.contentLength != Node.size: + start txn + lock datanode + recheck size diff + set dataNode.size + update HarvestState + commit txn +``` + +## validate DataNode vs Artifact discrepancies +These can be validated in parallel by multiple threads, subdivide work by bucket if we add +DataNode.storageBucket (== Artifact.uriBucket). + +``` +discrepancy: Artifact exists but DataNode does not +explanation: DataNode created, transfer negotiated, DataNode removed, transfer executed +evidence: DeletedNodeEvent exists +action: remove artifact, create DeletedArtifactEvent + +discrepancy: Artifact exists but DataNode does not +explanation: DataNode created, Artifact put, DataNode deleted, Artifact delete failed +evidence: only possible with singlePool==false +action: remove artifact, create DeletedArtifactEvent + +discrepancy: DataNode exists but Artifact does not +explanation: DataNode created, Artifact never (successfully) put (normal) +evidence: DataNode.nodeSize == 0 or null +action: none + +discrepancy: DataNode exists but Artifact does not +explanation: deleted or lost Artifact +evidence: DataNode.nodeSize != 0 (deleted vs lost: DeletedArtifactEvent exists) +action: lock nodes, fix dataNode and propagate delta to parent + +discrepancy: DataNode.nodeSize != Artifact.contentLength +explanation: artifact written (if DataNode.size > 0: replaced) +action: lock nodes, fix DataNode and propagate delta to parent +``` +Required lock order: child-parent or parent-child OK. + +The most generic implementation is a merge join of two iterators (see ratik, tantar): +``` +Iterator aiter = artifactDAO.iterator(vaultNamespace, bucket); // uriBucket,uri order +Iterator niter = nodeDAO.iterator(bucket); // storageBucket,storageID order +``` + +## database changes required +note: all field and column names TBD +* add `transient Long bytesUsed` to ContainerNode and DataNode +* add `bytesUsed` to the `vospace.Node` table +* add `storageBucket` to DataNode?? TBD +* add `storageBucket` to `vospace.Node` table + +## cadc-inventory-db API required immediately +* incremental sync query/iterator: ArtifactDAO.iterator(Namespace ns, String uriBucketPrefix, Date minLastModified, boolean ordered) + order by lastModified if set +* lookup DataNode by storageID: NodeDAO.getDataNode(URI storageID) +* indices to support new queries + +## cadc-inventory-db API required later (tentative) +* validate-by-bucket: use ArtifactDAO.iterator(String uriBucketPrefix, boolean ordered, Namespace ns) +* validate-by-bucket: NodeDAO.dataNodeIterator(String storageBucketPrefix, boolean ordered) +* indices to support new queries + diff --git a/vault-quota/README.md b/vault-quota/README.md new file mode 100644 index 000000000..c0f2db0eb --- /dev/null +++ b/vault-quota/README.md @@ -0,0 +1,59 @@ +# Storage Inventory VOSpace quota support process (vault-quota) + +Process to maintain container node sizes so that quota limits can be enforced by the +main `vault` service. This process runs in incremental mode (single process running +continuously) to update a local vospace database. + +`vault-quota` is an optional process that is only needed if `vault` is configured to +enforce quotas, although it could be used to maintain container node sizes without +quota enforcement. + +## configuration +See the [cadc-java](https://github.com/opencadc/docker-base/tree/master/cadc-java) image +docs for general config requirements. + +Runtime configuration must be made available via the `/config` directory. + +### vault-quota.properties +``` +org.opencadc.vault.quota.logging = {info|debug} + +# inventory database settings +org.opencadc.inventory.db.SQLGenerator=org.opencadc.inventory.db.SQLGenerator +org.opencadc.vault.quota.nodes.schema={schema for inventory database objects} +org.opencadc.vault.quota.nodes.username={username for inventory admin} +org.opencadc.vault.quota.nodes.password={password for inventory admin} +org.opencadc.vault.quota.nodes.url=jdbc:postgresql://{server}/{database} + +org.opencadc.vault.quota.threads={number of threads to watch for artifact events} + +# storage namespace +org.opencadc.vault.storage.namespace = {a storage inventory namespace to use} +``` +The _nodes_ account owns and manages (create, alter, drop) vospace database objects and updates +content in the vospace schema. The database is specified in the JDBC URL. Failure to connect or +initialize the database will show up in logs. + +The _threads_ key configures the number of threads that watch for new Artifact events and initiate +the propagation of sizes to parent containers. These threads each monitor a subset of artifacts using +`Artifact.uriBucket` filtering; for simplicity, the following values are allowed: 1, 2, 4, 8, 16. + +In addition to the above threads, there is one additional thread that propagates size changes up +the tree of container nodes to the container node(s) where quotas are specified. + +## building it +``` +gradle clean build +docker build -t vault-quota -f Dockerfile . +``` + +## checking it +``` +docker run -it vault-quota:latest /bin/bash +``` + +## running it +``` +docker run --user opencadc:opencadc -v /path/to/external/config:/config:ro --name vault-quota vault-quota:latest +``` + diff --git a/vault/Dockerfile b/vault/Dockerfile index f3bc94674..4f35f9ff5 100644 --- a/vault/Dockerfile +++ b/vault/Dockerfile @@ -1,3 +1,4 @@ -FROM cadc-tomcat:1 +FROM images.opencadc.org/library/cadc-tomcat:1 + +COPY build/libs/vault.war /usr/share/tomcat/webapps/vault.war -COPY build/libs/vault.war /usr/share/tomcat/webapps/ diff --git a/vault/README.md b/vault/README.md index 3dd77529a..f2ef10863 100644 --- a/vault/README.md +++ b/vault/README.md @@ -1,51 +1,146 @@ -# Storage Inventory storage management service (vault) +# Storage Inventory VOSpace-2.1 service (vault) -## configuration -See the [cadc-tomcat](https://github.com/opencadc/docker-base/tree/master/cadc-tomcat) image docs -for expected deployment and general config requirements. The `vault` war file can be renamed -at deployment time in order to support an alternate service name, including introducing -additional path elements (see war-rename.conf). +The `vault` service is an implementation of the IVOA VOSpace +specification designed to co-exist with other storage-inventory components. It provides a hierarchical data +organization layer on top of the storage management of storage-inventory. + +The simplest configuration would be to deploy `vault` with `minoc` with a single metadata database and single +back end storage system. Details: TBD. + +The other option would be to deploy `vault` with `raven` and `luskan` in a global inventory database and make +use of one or more of the network of known storage sites to store files. Details: TBD. + +## deployment -Runtime configuration must be made available via the `/config` directory. +The `vault` war file can be renamed at deployment time in order to support an alternate service name, +including introducing additional path elements. See +cadc-tomcat (war-rename.conf). + +## configuration +The following runtime configuration must be made available via the `/config` directory. ### catalina.properties -When running vault.war in tomcat, parameters of the connection pool in META-INF/context.xml need -to be configured in catalina.properties: +This file contains java system properties to configure the tomcat server and some of the java libraries used in the service. + +See cadc-tomcat +for system properties related to the deployment environment. + +See cadc-util +for common system properties. + +`vault` includes multiple IdentityManager implementations to support authenticated access: +- See cadc-access-control-identity for CADC access-control system support. +- See cadc-gms for OIDC token support. + +`vault` requires a connection pool to the local database: ``` # database connection pools org.opencadc.vault.nodes.maxActive={max connections for vospace pool} org.opencadc.vault.nodes.username={username for vospace pool} org.opencadc.vault.nodes.password={password for vospace pool} org.opencadc.vault.nodes.url=jdbc:postgresql://{server}/{database} + +org.opencadc.vault.inventory.maxActive={max connections for inventory pool} +org.opencadc.vault.inventory.username={username for inventory pool} +org.opencadc.vault.inventory.password={password for inventory pool} +org.opencadc.vault.inventory.url=jdbc:postgresql://{server}/{database} + +org.opencadc.vault.uws.maxActive={max connections for uws pool} +org.opencadc.vault.uws.username={username for uws pool} +org.opencadc.vault.uws.password={password for uws pool} +org.opencadc.vault.uws.url=jdbc:postgresql://{server}/{database} ``` -The `nodes` account owns and manages (create, alter, drop) vault database objects and manages +The _nodes_ account owns and manages (create, alter, drop) vospace database objects and manages all the content (insert, update, delete). The database is specified in the JDBC URL and the schema name is specified in the vault.properties (below). Failure to connect or initialize the database will show up in logs and in the VOSI-availability output. +The _inventory_ account owns and manages (create, alter, drop) inventory database objects and manages +all the content (update and delete Artifact, insert DeletedArtifactEvent). The database is specified +in the JDBC URL and the schema name is specified in the vault.properties (below). Failure to connect or +initialize the database will show up in logs and in the VOSI-availability output. The _inventory_ content +may be in the same database as the _nodes_, in a different database in the same server, or in a different +server entirely. See `org.opencadc.vault.singlePool` below for the pros and cons. The _inventory_ pool must +be functional for initialization, availability checks (`maxActive` = 1 with `singlePool` is sufficient), and +the connection information is re-used by an internal background thread that synchronizes data node sizes. + +The _uws_ account owns and manages (create, alter, drop) uws database objects in the `uws` schema and manages all +the content (insert, update, delete). The database is specified in the JDBC URLFailure to connect or initialize the +database will show up in logs and in the VOSI-availability output. + +### cadc-registry.properties + +See cadc-registry. + ### vault.properties A vault.properties file in /config is required to run this service. The following keys are required: ``` # service identity -org.opencadc.vault.resourceID=ivo://{authority}/{name} +org.opencadc.vault.resourceID = ivo://{authority}/{name} + +# consistency settings +org.opencadc.vault.consistency.preventNotFound=true|false + +# (optional) identify which container nodes are allocations +org.opencadc.vault.allocationParent = {top level node} # vault database settings -org.opencadc.vault.nodes.schema={schema name} +org.opencadc.vault.inventory.schema = {inventory schema name} +org.opencadc.vault.vospace.schema = {vospace schema name} +org.opencadc.vault.singlePool = {true|false} + +# root container nodes +org.opencadc.vault.root.owner = {owner of root node} + +# storage namespace +org.opencadc.vault.storage.namespace = {a storage inventory namespace to use} ``` The vault _resourceID_ is the resourceID of _this_ vault service. -The nodes _schema_ name is the name of the database schema used for all created database objects (tables, indices, etc). +The _preventNotFound_ key can be used to configure `vault` to prevent artifact-not-found errors that might +result due to the eventual consistency nature of the storage system by directly checking for the artifact at +_all known_ sites. It only makes sense to enable this when `vault` is running in a global inventory (along with +`raven` and/or `fenwick` instances syncing artifact metadata. This feature introduces an overhead for the +genuine not-found cases: transfer negotiation to GET the file that was never PUT. -### vault-availability.properties (optional) -``` -The vault-availability.properties file specifies which users have the authority to change the availability state of the vault service. Each entry consists of a key=value pair. The key is always "users". The value is the x500 canonical user name. -``` +The _allocationParent_ is a path to a container node (directory) which contains space allocations. An allocation +is owned by a user (usually different from the _rootOwner_ admin user) who is responsible for the allocation +and all conntent therein. The owner of an allocation is granted additional permissions within their +allocation (they can read/write/delete anything) so the owner cannot be blocked from access to any content +within their allocation. This probably only matters for multi-user projects. Multiple _allocationParent_(s) may +be configured to organise the top level of the content (e.g. /home and /projects). Paths configured to be +_allocationParent_(s) will be automatically created (if necessary), owned by the _rootOwner_, and will be +anonymously readable (public). Limitation: only a single level of top-level _allocationParent_(s) are supported. -Example: -``` -users = {user identity} -``` -`users` specifies the user(s) who are authorized to make calls to the service. The value is a list of user identities (X500 distingushed name), one line per user. Optional: if the `vault-availability.properties` is not found or does not list any `users`, the service will function in the default mode (ReadWrite) and the state will not be changeable. +The _inventory.schema_ name is the name of the database schema used for all inventory database objects. This +currently must be "inventory" due to configuration limitations in luskan. + +The _vospace.schema_ name is the name of the database schema used for all vospace database objects. Note that +with a single connection pool, the two schemas must be in the same database. + +The _singlePool_ key configures `vault` to use a single pool (the _nodes_ pool) for both vospace and inventory +operations. The inventory and vospace content must be in the same database for this to work. When configured +to use a single pool, delete node operations can delete a DataNode and the associated Artifact and create the +DeletedArtifactEvent in a single transaction. When configured to use separate pools, the delete Artifact and create +DeletedArtifactEvent are done in a separate transaction and if that fails the Artifact will be left behind and +orphaned until the vault validation (see ???) runs and fixes such a discrepancy. However, _singlePool_ = `false` +allows the content to be stored in two separate databases or servers. + +The _root.owner_ key configures the owner of the root node; the owner and has full read and write permission +in the root container, so it can create and delete container nodes at the root and assign container node properties +that are normally read-only to normal users: owner, quota, etc. This must be set to the username of the admin. + +The _storage.namespace_ key configures `vault` to use the specified namespace in storage-inventory to store files. +This only applies to new data nodes that are created and will not effect previously created nodes and artifacts. +Probably don't want to change this... prevent change? TBD. + +### cadc-log.properties (optional) +See cadc-log for common +dynamic logging control. + +### cadc-vosi.properties (optional) +See cadc-vosi for common +service state control. ## building it ``` @@ -63,12 +158,3 @@ docker run --rm -it vault:latest /bin/bash docker run --rm --user tomcat:tomcat --volume=/path/to/external/config:/config:ro --name vault vault:latest ``` -## apply semantic version tags -```bash -. VERSION && echo "tags: $TAGS" -for t in $TAGS; do - docker image tag vault:latest vault:$t -done -unset TAGS -docker image list vault -``` diff --git a/vault/TODO b/vault/TODO new file mode 100644 index 000000000..1481088d1 --- /dev/null +++ b/vault/TODO @@ -0,0 +1,22 @@ + +* NodePersistenceImpl: review for necessary transactions and locks + +* NodePersistenceImpl: reconcile with NodePersistence API and assign responsibilities +- property update checking +- permission checking +- link node resolution + +* files endpoint: +- if coexist with minoc: generate pre-auth URL to it and redirect +- if coexist with raven: need raven ProtocolsGenerator + +* transfer negotiation: +- review cadc-vos-server and cavern implementations +- probably a complete TransferRunner; maybe separate sync and async runners +- if co-exist with minoc: generate pre-auth URL to it +- if co-exist with raven: need raven ProtocolsGenerator +- figure out if/how vault can have it's own uws tables in db or share with inventory (luskan) + +* pre-auth URL keys -- what to support? recommend? +- vault has it's own key pair && minoc(s) have multiple pub keys? +- vault and raven share private key? diff --git a/vault/VERSION b/vault/VERSION index 71963f952..3d4fab565 100644 --- a/vault/VERSION +++ b/vault/VERSION @@ -4,6 +4,6 @@ # tags with and without build number so operators use the versioned # tag but we always keep a timestamped tag in case a semantic tag gets # replaced accidentally -VER=0.1.0 +VER=1.0.0 TAGS="${VER} ${VER}-$(date --utc +"%Y%m%dT%H%M%S")" unset VER diff --git a/vault/build.gradle b/vault/build.gradle index dc09ec290..e6ac8689f 100644 --- a/vault/build.gradle +++ b/vault/build.gradle @@ -25,15 +25,39 @@ war { } } +description = 'OpenCADC vault service' +def git_url = 'https://github.com/opencadc/storage-inventory' + dependencies { + compile 'javax.servlet:javax.servlet-api:[3.1,4.0)' + + compile 'org.opencadc:cadc-util:[1.11.0,2.0)' compile 'org.opencadc:cadc-log:[1.1.6,2.0)' - compile 'org.opencadc:cadc-vosi:[1.4.3,2.0)' + compile 'org.opencadc:cadc-gms:[1.0.5,)' + compile 'org.opencadc:cadc-rest:[1.3.16,)' + compile 'org.opencadc:cadc-vos:[2.0.6,)' + compile 'org.opencadc:cadc-vos-server:[2.0.9,)' + compile 'org.opencadc:cadc-vosi:[1.3.2,)' + compile 'org.opencadc:cadc-uws:[1.0,)' + compile 'org.opencadc:cadc-uws-server:[1.2.19,)' + compile 'org.opencadc:cadc-access-control:[1.1.1,2.0)' + compile 'org.opencadc:cadc-cdp:[1.2.3,)' + compile 'org.opencadc:cadc-registry:[1.7.6,)' + compile 'org.opencadc:cadc-inventory:[1.0.0,2.0)' + compile 'org.opencadc:cadc-inventory-db:[1.0.0,2.0)' + compile 'org.opencadc:cadc-inventory-server:[0.3,1.0)' + compile 'org.opencadc:cadc-permissions:[0.3.5,1.0)' testCompile 'junit:junit:[4.0,)' + runtime 'org.opencadc:cadc-access-control-identity:[1.2.1,)' + runtime 'org.opencadc:cadc-gms:[1.0.5,)' + intTestCompile 'org.opencadc:cadc-test-vosi:[1.0.11,)' + intTestCompile 'org.opencadc:cadc-test-vos:[2.1.6,)' } configurations { + compile.exclude group: 'org.restlet.jee' runtime.exclude group: 'org.postgresql:postgresql' } diff --git a/vault/src/intTest/README.md b/vault/src/intTest/README.md new file mode 100644 index 000000000..b03af25ac --- /dev/null +++ b/vault/src/intTest/README.md @@ -0,0 +1,30 @@ +# Storage Inventory VOSpace-2.1 service (vault) + +The simplest configuration that deploys `vault` with `minoc` with a single metadata database and single +back end storage system is sufficient to run the `vault` integration tests. The tests also relly on the +presence of the root owner X509 certificate in `build/classes/java/intTest/vault-test.pem`. +Some tests (primarily permission tests) will be skipped unless the certificate of a second user is present +in `build/classes/java/intTest/vault-auth-test.pem`. This user has to be member of the `ivo://cadc.nrc.ca/gms?opencadc-vospace-test` +group. The names of these certificates and groups are hardcoded in the `vault` int tests classes. + +The int tests suite also relies on a specific configuration of the `vault` service: +### vault.properties +``` +# service identity +org.opencadc.vault.resourceID = ivo://opencadc.org/vault + +# (optional) identify which container nodes are allocations +org.opencadc.vault.allocationParent = / + +# consistency settings +org.opencadc.vault.consistency.preventNotFound=true + +# vault database settings +org.opencadc.vault.inventory.schema = inventory +org.opencadc.vault.vospace.schema = vault +org.opencadc.vault.singlePool = true + +# root container nodes +org.opencadc.vault.root.owner = {owner of root node} + +``` diff --git a/vault/src/intTest/java/org/opencadc/vault/Constants.java b/vault/src/intTest/java/org/opencadc/vault/Constants.java new file mode 100644 index 000000000..c07bdad08 --- /dev/null +++ b/vault/src/intTest/java/org/opencadc/vault/Constants.java @@ -0,0 +1,92 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2024. (c) 2024. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.vault; + +import ca.nrc.cadc.util.FileUtil; +import java.io.File; +import java.net.URI; +import org.apache.log4j.Logger; +import org.opencadc.gms.GroupURI; + +/** + * + * @author pdowler + */ +public class Constants { + private static final Logger log = Logger.getLogger(Constants.class); + + static URI RESOURCE_ID = URI.create("ivo://opencadc.org/vault"); + + static File ADMIN_CERT = FileUtil.getFileFromResource("vault-test.pem", Constants.class); + static File ALT_CERT = FileUtil.getFileFromResource("vault-auth-test.pem", Constants.class); + + static GroupURI ALT_GROUP = new GroupURI(URI.create("ivo://cadc.nrc.ca/gms?opencadc-vospace-test")); + + private Constants() { + } +} diff --git a/vault/src/intTest/java/org/opencadc/vault/FilesTest.java b/vault/src/intTest/java/org/opencadc/vault/FilesTest.java new file mode 100644 index 000000000..5002c5fe6 --- /dev/null +++ b/vault/src/intTest/java/org/opencadc/vault/FilesTest.java @@ -0,0 +1,95 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2024. (c) 2024i. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.vault; + +import ca.nrc.cadc.util.FileUtil; +import ca.nrc.cadc.util.Log4jInit; +import java.io.File; +import java.net.URI; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; + +/** + * Test the nodes endpoint. + * + * @author pdowler + */ +public class FilesTest extends org.opencadc.conformance.vos.FilesTest { + private static final Logger log = Logger.getLogger(FilesTest.class); + + static { + Log4jInit.setLevel("org.opencadc.conformance.vos", Level.DEBUG); + Log4jInit.setLevel("org.opencadc.vospace", Level.DEBUG); + } + + public FilesTest() { + super(Constants.RESOURCE_ID, Constants.ADMIN_CERT); + + enableTestDataNodePermission(Constants.ALT_CERT); + } +} diff --git a/vault/src/intTest/java/org/opencadc/vault/NodesTest.java b/vault/src/intTest/java/org/opencadc/vault/NodesTest.java new file mode 100644 index 000000000..3d8ec5cab --- /dev/null +++ b/vault/src/intTest/java/org/opencadc/vault/NodesTest.java @@ -0,0 +1,99 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2023. (c) 2023. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.vault; + +import ca.nrc.cadc.util.FileUtil; +import ca.nrc.cadc.util.Log4jInit; +import java.io.File; +import java.net.URI; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.opencadc.gms.GroupURI; + +/** + * Test the nodes endpoint. + * + * @author pdowler + */ +public class NodesTest extends org.opencadc.conformance.vos.NodesTest { + private static final Logger log = Logger.getLogger(NodesTest.class); + + static { + Log4jInit.setLevel("org.opencadc.conformance.vos", Level.DEBUG); + Log4jInit.setLevel("org.opencadc.vospace", Level.DEBUG); + } + + public NodesTest() { + super(Constants.RESOURCE_ID, Constants.ADMIN_CERT); + + enablePermissionTests(Constants.ALT_GROUP, Constants.ALT_CERT); + + // vault does not check the actual groups in the permission props tests, hence they can be made up. + enablePermissionPropsTest(new GroupURI(URI.create("ivo://myauth/gms?gr1")), new GroupURI(URI.create("ivo://myauth/gms?gr2"))); + } +} diff --git a/vault/src/intTest/java/org/opencadc/vault/RecursiveNodeDeleteTest.java b/vault/src/intTest/java/org/opencadc/vault/RecursiveNodeDeleteTest.java new file mode 100644 index 000000000..e66d0ed0a --- /dev/null +++ b/vault/src/intTest/java/org/opencadc/vault/RecursiveNodeDeleteTest.java @@ -0,0 +1,104 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2023. (c) 2023. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.vault; + +import ca.nrc.cadc.util.FileUtil; +import ca.nrc.cadc.util.Log4jInit; +import java.io.File; +import java.net.URI; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.opencadc.gms.GroupURI; + +/** + * Test the async endpoint. + * + * @author pdowler + */ +public class RecursiveNodeDeleteTest extends org.opencadc.conformance.vos.RecursiveNodeDeleteTest { + private static final Logger log = Logger.getLogger(RecursiveNodeDeleteTest.class); + + static { + Log4jInit.setLevel("org.opencadc.conformance.vos", Level.DEBUG); + Log4jInit.setLevel("org.opencadc.vospace", Level.INFO); + //Log4jInit.setLevel("ca.nrc.cadc.auth", Level.DEBUG); + } + + private static File ADMIN_CERT = FileUtil.getFileFromResource("vault-test.pem", RecursiveNodeDeleteTest.class); + + public RecursiveNodeDeleteTest() { + super(URI.create("ivo://opencadc.org/vault"), ADMIN_CERT); + + File altCert = FileUtil.getFileFromResource("vault-auth-test.pem", RecursiveNodeDeleteTest.class); + enablePermissionTests(new GroupURI(URI.create("ivo://cadc.nrc.ca/gms?opencadc-vospace-test")), altCert); + + // vault does not check the actual groups in the permission props tests, hence they can be made up. + enablePermissionPropsTest(new GroupURI(URI.create("ivo://myauth/gms?gr1")), new GroupURI(URI.create("ivo://myauth/gms?gr2"))); + } + +} diff --git a/vault/src/intTest/java/org/opencadc/vault/RecursiveNodePropsTest.java b/vault/src/intTest/java/org/opencadc/vault/RecursiveNodePropsTest.java new file mode 100644 index 000000000..67e62e801 --- /dev/null +++ b/vault/src/intTest/java/org/opencadc/vault/RecursiveNodePropsTest.java @@ -0,0 +1,104 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2023. (c) 2023. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.vault; + +import ca.nrc.cadc.util.FileUtil; +import ca.nrc.cadc.util.Log4jInit; +import java.io.File; +import java.net.URI; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.opencadc.gms.GroupURI; + +/** + * Test the async endpoint. + * + * @author pdowler + */ +public class RecursiveNodePropsTest extends org.opencadc.conformance.vos.RecursiveNodePropsTest { + private static final Logger log = Logger.getLogger(RecursiveNodePropsTest.class); + + static { + Log4jInit.setLevel("org.opencadc.conformance.vos", Level.DEBUG); + Log4jInit.setLevel("org.opencadc.vospace", Level.INFO); + //Log4jInit.setLevel("ca.nrc.cadc.auth", Level.DEBUG); + } + + private static File ADMIN_CERT = FileUtil.getFileFromResource("vault-test.pem", RecursiveNodePropsTest.class); + + public RecursiveNodePropsTest() { + super(URI.create("ivo://opencadc.org/vault"), ADMIN_CERT); + + File altCert = FileUtil.getFileFromResource("vault-auth-test.pem", RecursiveNodePropsTest.class); + enablePermissionTests(new GroupURI(URI.create("ivo://cadc.nrc.ca/gms?opencadc-vospace-test")), altCert); + + // vault does not check the actual groups in the permission props tests, hence they can be made up. + enablePermissionPropsTest(new GroupURI(URI.create("ivo://myauth/gms?gr1")), new GroupURI(URI.create("ivo://myauth/gms?gr2"))); + } + +} diff --git a/vault/src/intTest/java/org/opencadc/vault/TransferTest.java b/vault/src/intTest/java/org/opencadc/vault/TransferTest.java new file mode 100644 index 000000000..0fe0a6b8e --- /dev/null +++ b/vault/src/intTest/java/org/opencadc/vault/TransferTest.java @@ -0,0 +1,97 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2023. (c) 2023. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.vault; + +import ca.nrc.cadc.util.Log4jInit; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; + +/** + * + * @author pdowler + */ +public class TransferTest extends org.opencadc.conformance.vos.TransferTest { + private static final Logger log = Logger.getLogger(TransferTest.class); + + static { + Log4jInit.setLevel("org.opencadc.vault", Level.INFO); + Log4jInit.setLevel("org.opencadc.conformance.vos", Level.INFO); + Log4jInit.setLevel("org.opencadc.vospace", Level.INFO); + Log4jInit.setLevel("ca.nrc.cadc.net", Level.INFO); + } + + // these are the same as raven intTest + static String SERVER = "VAULT_TEST"; + static String DATABASE = "cadctest"; + static String SCHEMA = "inventory"; + + public TransferTest() { + super(Constants.RESOURCE_ID, Constants.ADMIN_CERT); + enableTestDataNodePermission(Constants.ALT_GROUP, Constants.ALT_CERT); + } +} diff --git a/vault/src/main/java/org/opencadc/vault/NodePersistenceImpl.java b/vault/src/main/java/org/opencadc/vault/NodePersistenceImpl.java new file mode 100644 index 000000000..bf2fbb388 --- /dev/null +++ b/vault/src/main/java/org/opencadc/vault/NodePersistenceImpl.java @@ -0,0 +1,853 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2024. (c) 2024. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.vault; + +import ca.nrc.cadc.auth.AuthenticationUtil; +import ca.nrc.cadc.auth.HttpPrincipal; +import ca.nrc.cadc.auth.IdentityManager; +import ca.nrc.cadc.db.TransactionManager; +import ca.nrc.cadc.io.ResourceIterator; +import ca.nrc.cadc.net.TransientException; +import ca.nrc.cadc.reg.Standards; +import ca.nrc.cadc.reg.client.LocalAuthority; +import ca.nrc.cadc.util.InvalidConfigException; +import ca.nrc.cadc.util.MultiValuedProperties; +import java.io.IOException; +import java.net.URI; +import java.text.DateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Date; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Set; +import java.util.TreeMap; +import java.util.TreeSet; +import java.util.UUID; +import javax.security.auth.Subject; +import org.apache.log4j.Logger; +import org.opencadc.gms.GroupURI; +import org.opencadc.inventory.Artifact; +import org.opencadc.inventory.DeletedArtifactEvent; +import org.opencadc.inventory.Namespace; +import org.opencadc.inventory.PreauthKeyPair; +import org.opencadc.inventory.db.ArtifactDAO; +import org.opencadc.inventory.db.DeletedArtifactEventDAO; +import org.opencadc.inventory.db.PreauthKeyPairDAO; +import org.opencadc.permissions.TokenTool; +import org.opencadc.vospace.ContainerNode; +import org.opencadc.vospace.DataNode; +import org.opencadc.vospace.Node; +import org.opencadc.vospace.NodeNotSupportedException; +import org.opencadc.vospace.NodeProperty; +import org.opencadc.vospace.VOS; +import org.opencadc.vospace.VOSURI; +import org.opencadc.vospace.db.NodeDAO; +import org.opencadc.vospace.io.NodeWriter; +import org.opencadc.vospace.server.LocalServiceURI; +import org.opencadc.vospace.server.NodePersistence; +import org.opencadc.vospace.server.Views; +import org.opencadc.vospace.server.transfers.TransferGenerator; + +/** + * + * @author pdowler + */ +public class NodePersistenceImpl implements NodePersistence { + private static final Logger log = Logger.getLogger(NodePersistenceImpl.class); + + private static final Set ADMIN_PROPS = new TreeSet<>( + Arrays.asList( + VOS.PROPERTY_URI_CREATOR, + VOS.PROPERTY_URI_QUOTA + ) + ); + + private static final Set IMMUTABLE_PROPS = new TreeSet<>( + Arrays.asList( + VOS.PROPERTY_URI_AVAILABLESPACE, + VOS.PROPERTY_URI_CONTENTLENGTH, + VOS.PROPERTY_URI_CONTENTMD5, + VOS.PROPERTY_URI_CONTENTDATE, + VOS.PROPERTY_URI_CREATOR, + VOS.PROPERTY_URI_DATE, + VOS.PROPERTY_URI_QUOTA + ) + ); + + private static final Set ARTIFACT_PROPS = new TreeSet<>( + Arrays.asList( + // immutable + VOS.PROPERTY_URI_CONTENTLENGTH, + VOS.PROPERTY_URI_CONTENTMD5, + VOS.PROPERTY_URI_CONTENTDATE, + VOS.PROPERTY_URI_DATE, + // mutable + VOS.PROPERTY_URI_CONTENTENCODING, + VOS.PROPERTY_URI_TYPE + ) + ); + + private final Map nodeDaoConfig; + private final Map invDaoConfig; + private final Map kpDaoConfig; + private final boolean singlePool; + + private final ContainerNode root; + private final List allocationParents = new ArrayList<>(); + private final Namespace storageNamespace; + + private final boolean localGroupsOnly; + private final URI resourceID; + private final boolean preventNotFound; + + final String appName; // access by VaultTransferGenerator + + // possibly temporary hack so migration tool can set this to false and + // preserve lastModified timestamps on nodes + public boolean nodeOrigin = true; + + public NodePersistenceImpl(URI resourceID, String appName) { + if (resourceID == null) { + throw new IllegalArgumentException("resource ID required"); + } + this.resourceID = resourceID; + this.appName = appName; + + MultiValuedProperties config = VaultInitAction.getConfig(); + this.nodeDaoConfig = VaultInitAction.getDaoConfig(config); + this.invDaoConfig = VaultInitAction.getInvConfig(config); + this.kpDaoConfig = VaultInitAction.getKeyPairConfig(config); + this.singlePool = nodeDaoConfig.get("jndiDataSourceName").equals(invDaoConfig.get("jndiDataSourceName")); + + // root node + IdentityManager identityManager = AuthenticationUtil.getIdentityManager(); + UUID rootID = new UUID(0L, 0L); + this.root = new ContainerNode(rootID, ""); + root.owner = getRootOwner(config, identityManager); + root.ownerDisplay = identityManager.toDisplayString(root.owner); + log.info("ROOT owner: " + root.owner); + root.ownerID = identityManager.toOwner(root.owner); + root.isPublic = true; + root.inheritPermissions = false; + + // allocations + for (String ap : VaultInitAction.getAllocationParents(config)) { + if (ap.isEmpty()) { + // allocations are in root + allocationParents.add(root); + log.info("allocationParent: /"); + } else { + try { + + // simple top-level names only + ContainerNode cn = (ContainerNode) get(root, ap); + String str = ""; + if (cn == null) { + cn = new ContainerNode(ap); + cn.parent = root; + str = "created/"; + } + cn.isPublic = true; + cn.owner = root.owner; + cn.inheritPermissions = false; + put(cn); + allocationParents.add(cn); + log.info(str + "loaded allocationParent: /" + cn.getName()); + } catch (NodeNotSupportedException bug) { + throw new RuntimeException("BUG: failed to update isPublic=true on allocationParent " + ap, bug); + } + } + } + + String ns = config.getFirstPropertyValue(VaultInitAction.STORAGE_NAMESPACE_KEY); + this.storageNamespace = new Namespace(ns); + + this.localGroupsOnly = false; + + String pnf = config.getFirstPropertyValue(VaultInitAction.PREVENT_NOT_FOUND_KEY); + if (pnf != null) { + this.preventNotFound = Boolean.valueOf(pnf); + log.debug("Using consistency strategy: " + this.preventNotFound); + } else { + throw new IllegalStateException("invalid config: missing/invalid preventNotFound configuration"); + } + } + + private Subject getRootOwner(MultiValuedProperties mvp, IdentityManager im) { + final String owner = mvp.getFirstPropertyValue(VaultInitAction.ROOT_OWNER); + if (owner == null) { + throw new InvalidConfigException(VaultInitAction.ROOT_OWNER + " cannot be null"); + } + Subject ret = new Subject(); + ret.getPrincipals().add(new HttpPrincipal(owner)); + return im.augment(ret); + } + + @Override + public Views getViews() { + return new Views(); + } + + @Override + public TransferGenerator getTransferGenerator() { + PreauthKeyPairDAO keyDAO = new PreauthKeyPairDAO(); + keyDAO.setConfig(kpDaoConfig); + PreauthKeyPair kp = keyDAO.get(VaultInitAction.KEY_PAIR_NAME); + TokenTool tt = new TokenTool(kp.getPublicKey(), kp.getPrivateKey()); + return new VaultTransferGenerator(this, appName, getArtifactDAO(), tt, preventNotFound); + } + + private NodeDAO getDAO() { + NodeDAO instance = new NodeDAO(nodeOrigin); + instance.setConfig(nodeDaoConfig); + return instance; + } + + private ArtifactDAO getArtifactDAO() { + ArtifactDAO instance = new ArtifactDAO(true); // origin==true? + instance.setConfig(invDaoConfig); + return instance; + } + + private URI generateStorageID() { + UUID id = UUID.randomUUID(); + URI ret = URI.create(storageNamespace.getNamespace() + id.toString()); + return ret; + } + + @Override + public URI getResourceID() { + return resourceID; + } + + /** + * Get the container node that represents the root of all other nodes. + * This container node is used to navigate a path (from the root) using + * get(ContainerNode parent, String name). + * + * @return the root container node + */ + @Override + public ContainerNode getRootNode() { + return root; + } + + @Override + public boolean isAllocation(ContainerNode cn) { + if (cn.parent == null) { + return false; // root is never an allocation + } + ContainerNode p = cn.parent; + for (ContainerNode ap : allocationParents) { + if (p.getID().equals(ap.getID())) { + return true; + } + } + return false; + } + + private boolean absoluteEquals(ContainerNode c1, ContainerNode c2) { + // note: cavern does not use/preserve Node.id except for root + if (!c1.getName().equals(c2.getName())) { + return false; + } + // same name, check parents + if (c1.parent == null && c2.parent == null) { + // both root + return true; + } + if (c1.parent == null || c2.parent == null) { + // one is root + return false; + } + return absoluteEquals(c1.parent, c2.parent); + } + + @Override + public Set getAdminProps() { + return Collections.unmodifiableSet(ADMIN_PROPS); + } + + @Override + public Set getImmutableProps() { + return Collections.unmodifiableSet(IMMUTABLE_PROPS); + } + + /** + * Get a node by name. Concept: The caller uses this to navigate the path + * from the root node to the target, checking permissions and deciding what + * to do about LinkNode(s) along the way. + * + * @param parent parent node, may be special root node but not null + * @param name relative name of the child node + * @return the child node or null if it does not exist + * @throws TransientException + */ + @Override + public Node get(ContainerNode parent, String name) throws TransientException { + if (parent == null || name == null) { + throw new IllegalArgumentException("args cannot be null: parent, name"); + } + NodeDAO dao = getDAO(); + Node ret = dao.get(parent, name); + if (ret == null) { + return null; + } + + // in principle we could have queried vospace.Node join inventory.Artifact above + // and avoid this query.... simplicity for now + if (ret instanceof DataNode) { + DataNode dn = (DataNode) ret; + ArtifactDAO artifactDAO = getArtifactDAO(); + Artifact a = artifactDAO.get(dn.storageID); + DateFormat df = NodeWriter.getDateFormat(); + if (a != null) { + // DataNode.bytesUsed is an optimization (cache): + // if DataNode.bytesUsed != Artifact.contentLength we update the cache + // this retains put+get consistency in a single-site deployed (with minoc) + // and may help hide some inconsistencies in child listing sizes + if (!a.getContentLength().equals(dn.bytesUsed)) { + TransactionManager txn = dao.getTransactionManager(); + try { + log.debug("starting node transaction"); + txn.startTransaction(); + log.debug("start txn: OK"); + + DataNode locked = (DataNode) dao.lock(dn); + if (locked != null) { + dn = locked; // safer than accidentally using the wrong variable + dn.bytesUsed = a.getContentLength(); + dao.put(dn); + ret = dn; + } + + log.debug("commit txn..."); + txn.commitTransaction(); + log.debug("commit txn: OK"); + if (locked == null) { + return null; // gone + } + } catch (Exception ex) { + if (txn.isOpen()) { + log.error("failed to update bytesUsed on " + dn.getID() + " aka " + dn.getName(), ex); + txn.rollbackTransaction(); + log.debug("rollback txn: OK"); + } + } finally { + if (txn.isOpen()) { + log.error("BUG - open transaction in finally"); + txn.rollbackTransaction(); + log.error("rollback txn: OK"); + } + } + } + + Date d = ret.getLastModified(); + Date cd = null; + if (ret.getLastModified().before(a.getLastModified())) { + d = a.getLastModified(); + } + if (d.before(a.getContentLastModified())) { + // probably not possible + d = a.getContentLastModified(); + } else { + cd = a.getContentLastModified(); + } + ret.getProperties().add(new NodeProperty(VOS.PROPERTY_URI_DATE, df.format(d))); + if (cd != null) { + ret.getProperties().add(new NodeProperty(VOS.PROPERTY_URI_CONTENTDATE, df.format(cd))); + } + + // assume MD5 + ret.getProperties().add(new NodeProperty(VOS.PROPERTY_URI_CONTENTMD5, a.getContentChecksum().getSchemeSpecificPart())); + + if (a.contentEncoding != null) { + ret.getProperties().add(new NodeProperty(VOS.PROPERTY_URI_CONTENTENCODING, a.contentEncoding)); + } + if (a.contentType != null) { + ret.getProperties().add(new NodeProperty(VOS.PROPERTY_URI_TYPE, a.contentType)); + } + } + if (dn.bytesUsed == null) { + dn.bytesUsed = 0L; // no data stored + } + } + + ret.parent = parent; + IdentityManager identityManager = AuthenticationUtil.getIdentityManager(); + ret.owner = identityManager.toSubject(ret.ownerID); + ret.ownerDisplay = identityManager.toDisplayString(ret.owner); + + return ret; + } + + /** + * Get an iterator over the children of a node. The output can optionally be + * limited to a specific number of children and can optionally start at a + * specific child (usually the last one from a previous "batch") to resume + * listing at a known position. + * + * @param parent the container to iterate + * @param limit max number of nodes to return, may be null + * @param start first node in order to consider, may be null + * @return iterator of matching child nodes, may be empty + */ + @Override + public ResourceIterator iterator(ContainerNode parent, Integer limit, String start) { + if (parent == null) { + throw new IllegalArgumentException("arg cannot be null: parent"); + } + NodeDAO dao = getDAO(); + ResourceIterator ret = dao.iterator(parent, limit, start); + return new ChildNodeWrapper(parent, ret); + } + + // wrapper to add parent, owner, and props to child nodes + private class ChildNodeWrapper implements ResourceIterator { + + private final ContainerNode parent; + private final ResourceIterator childIter; + + private final IdentityManager identityManager = AuthenticationUtil.getIdentityManager(); + private final Map identCache = new TreeMap<>(); + + ChildNodeWrapper(ContainerNode parent, ResourceIterator childIter) { + this.parent = parent; + this.childIter = childIter; + // prime cache with caller + Subject caller = AuthenticationUtil.getCurrentSubject(); + if (caller != null) { + Object ownerID = identityManager.toOwner(caller); + if (ownerID != null) { + // HACK: NodeDAO returns ownerID as String and relies on the IM + // to convert to a number (eg) + identCache.put(ownerID.toString(), caller); + } + } + } + + @Override + public boolean hasNext() { + return childIter.hasNext(); + } + + @Override + public Node next() { + Node ret = childIter.next(); + ret.parent = parent; + + // owner + Subject s = identCache.get(ret.ownerID); + if (s == null) { + s = identityManager.toSubject(ret.ownerID); + identCache.put(ret.ownerID, s); + } + ret.owner = s; + ret.ownerDisplay = identityManager.toDisplayString(ret.owner); + + if (ret instanceof DataNode) { + DataNode dn = (DataNode) ret; + if (dn.bytesUsed == null) { + dn.bytesUsed = 0L; + } + } + return ret; + } + + @Override + public void close() throws IOException { + childIter.close(); + identCache.clear(); + } + + } + + /** + * Load additional node properties for the specified node. Note: this may not be + * necessary and may be removed. TBD. + * + * @param node + * @throws TransientException + */ + @Override + public void getProperties(Node node) throws TransientException { + // no-op + } + + /** + * Put the specified node. This can be an insert or update; to update, the argument + * node must have been retrieved from persistence so it has the right Entity.id + * value. This method may modify the Entity.metaChecksum and the Entity.lastModified + * values. + * + * @param node the node to insert or update + * @return the possibly modified node + * @throws NodeNotSupportedException + * @throws TransientException + */ + @Override + public Node put(Node node) throws NodeNotSupportedException, TransientException { + if (node == null) { + throw new IllegalArgumentException("arg cannot be null: node"); + } + if (node.parentID == null) { + if (node.parent == null) { + throw new RuntimeException("BUG: cannot persist node without parent: " + node); + } + node.parentID = node.parent.getID(); + } + if (node.ownerID == null) { + if (node.owner == null) { + throw new RuntimeException("BUG: cannot persist node without owner: " + node); + } + IdentityManager identityManager = AuthenticationUtil.getIdentityManager(); + node.ownerID = identityManager.toOwner(node.owner); + } + + if (localGroupsOnly) { + if (!node.getReadOnlyGroup().isEmpty() || !node.getReadWriteGroup().isEmpty()) { + LocalAuthority loc = new LocalAuthority(); + try { + URI localGMS = loc.getServiceURI(Standards.GMS_SEARCH_10.toASCIIString()); + StringBuilder serr = new StringBuilder("non-local groups:"); + int len = serr.length(); + for (GroupURI g : node.getReadOnlyGroup()) { + if (!localGMS.equals(g.getServiceID())) { + serr.append(" ").append(g.getURI().toASCIIString()); + } + } + for (GroupURI g : node.getReadWriteGroup()) { + if (!localGMS.equals(g.getServiceID())) { + serr.append(" ").append(g.getURI().toASCIIString()); + } + } + String err = serr.toString(); + if (err.length() > len) { + throw new IllegalArgumentException(err); + } + } catch (NoSuchElementException ex) { + throw new RuntimeException("CONFIG: localGroupOnly policy && local GMS service not configured"); + } + } + } + + NodeProperty contentType = null; + NodeProperty contentEncoding = null; + // need to remove all artifact props from the node.getProperties() + // and use artifactDAO to set the mutable ones + Iterator i = node.getProperties().iterator(); + while (i.hasNext()) { + NodeProperty np = i.next(); + if (VOS.PROPERTY_URI_TYPE.equals(np.getKey())) { + contentType = np; + } else if (VOS.PROPERTY_URI_CONTENTENCODING.equals(np.getKey())) { + contentEncoding = np; + } + + if (ARTIFACT_PROPS.contains(np.getKey())) { + i.remove(); + } + } + + ArtifactDAO artifactDAO = null; + Artifact a = null; + if (node instanceof DataNode) { + DataNode dn = (DataNode) node; + if (dn.storageID == null) { + // new data node? if lastModified is assigned, this looks sketchy + if (dn.getLastModified() != null) { + throw new RuntimeException( + "BUG: attempt to put a previously stored DataNode without persistent storageID: " + + dn.getID() + " aka " + dn); + } + // concept: use a persistent storageID in the node that resolves to a a file + // once someone puts the file to minoc, so Node.storageID == Artifact.uri + // but the artifact may or may not exist + dn.storageID = generateStorageID(); + } else { + if (contentType != null || contentEncoding != null) { + // update possibly required + artifactDAO = getArtifactDAO(); + a = artifactDAO.get(dn.storageID); + } else { + log.debug("no artifact props to update - skipping ArtifactDAO.get"); + } + } + } + + boolean useTxn = singlePool && a != null; // TODO + + // update node + NodeDAO dao = getDAO(); + dao.put(node); + + // update artifact after node + if (a != null) { + if (contentType == null || contentType.isMarkedForDeletion()) { + a.contentType = null; + } else { + a.contentType = contentType.getValue(); + } + if (contentEncoding == null || contentEncoding.isMarkedForDeletion()) { + a.contentEncoding = null; + } else { + a.contentEncoding = contentEncoding.getValue(); + } + artifactDAO.put(a); + + // re-add node props + if (contentType != null && !contentType.isMarkedForDeletion()) { + node.getProperties().add(contentType); + } + if (contentEncoding != null && !contentEncoding.isMarkedForDeletion()) { + node.getProperties().add(contentEncoding); + } + } + return node; + } + + @Override + public void move(Node node, ContainerNode dest, String newName) { + if (node == null || dest == null) { + throw new IllegalArgumentException("args cannot be null"); + } + if (node.parent == null || dest.parent == null) { + throw new IllegalArgumentException("args must both be peristent nodes before move"); + } + // try to detect attempt to disconnect from path to root: node is a parent of dest + ContainerNode cur = dest; + while (!cur.getID().equals(root.getID())) { + cur = cur.parent; + if (cur.getID().equals(node.getID())) { + throw new IllegalArgumentException("invalid destination for move: " + node.getID() + " -> " + dest.getID()); + } + + } + + NodeDAO dao = getDAO(); + TransactionManager txn = dao.getTransactionManager(); + try { + log.debug("starting node transaction"); + txn.startTransaction(); + log.debug("start txn: OK"); + + // lock the source node + Node locked = dao.lock(node); + if (locked != null) { + node = locked; // safer than having two vars and accidentally using the wrong one + Subject caller = AuthenticationUtil.getCurrentSubject(); + node.owner = caller; + node.ownerID = null; + node.ownerDisplay = null; + node.parent = dest; + node.parentID = null; + if (newName != null) { + node.setName(newName); + } + Node result = put(node); + log.debug("moved: " + result); + } + log.debug("commit txn..."); + txn.commitTransaction(); + log.debug("commit txn: OK"); + } catch (Exception ex) { + if (txn.isOpen()) { + log.error("failed to move " + node.getID() + " aka " + node.getName(), ex); + txn.rollbackTransaction(); + log.debug("rollback txn: OK"); + } + } finally { + if (txn.isOpen()) { + log.error("BUG - open transaction in finally"); + txn.rollbackTransaction(); + log.error("rollback txn: OK"); + } + } + } + + /** + * Delete the specified node. + * + * @param node the node to delete + * @throws TransientException + */ + @Override + public void delete(Node node) throws TransientException { + if (node == null) { + throw new IllegalArgumentException("arg cannot be null: node"); + } + + Artifact a = null; + final NodeDAO dao = getDAO(); + final ArtifactDAO artifactDAO = getArtifactDAO(); + TransactionManager txn = dao.getTransactionManager(); + TransactionManager atxn = null; + try { + if (node instanceof DataNode) { + DataNode dn = (DataNode) node; + a = artifactDAO.get(dn.storageID); + } + if (a != null && !singlePool) { + atxn = artifactDAO.getTransactionManager(); + } + + log.debug("starting node transaction"); + txn.startTransaction(); + log.debug("start txn: OK"); + + Node locked = dao.lock(node); + if (locked != null) { + node = locked; // safer than having two vars and accidentally using the wrong one + URI storageID = null; + if (node instanceof ContainerNode) { + ContainerNode cn = (ContainerNode) node; + boolean empty = dao.isEmpty(cn); + if (!empty) { + log.debug("commit txn..."); + txn.commitTransaction(); + log.debug("commit txn: OK"); + throw new IllegalArgumentException("container node '" + node.getName() + "' is not empty"); + } + } else if (node instanceof DataNode) { + DataNode dn = (DataNode) node; + if (dn.bytesUsed != null) { + // artifact exists + storageID = dn.storageID; + } + } // else: LinkNode can always be deleted + + if (singlePool && a != null) { + // inventory ops inside main txn + DeletedArtifactEventDAO daeDAO = new DeletedArtifactEventDAO(artifactDAO); + DeletedArtifactEvent dae = new DeletedArtifactEvent(a.getID()); + daeDAO.put(dae); + artifactDAO.delete(a.getID()); + } + + // TODO: need DeletedNodeDAO to create DeletedNodeEvent + dao.delete(node.getID()); + } else { + log.debug("failed to lock node " + node.getID() + " - assume deleted by another process"); + } + + log.debug("commit txn..."); + txn.commitTransaction(); + log.debug("commit txn: OK"); + + if (!singlePool && a != null) { + log.debug("starting artifact transaction"); + atxn.startTransaction(); + log.debug("start txn: OK"); + + Artifact alock = artifactDAO.lock(a); + if (alock != null) { + DeletedArtifactEventDAO daeDAO = new DeletedArtifactEventDAO(artifactDAO); + DeletedArtifactEvent dae = new DeletedArtifactEvent(alock.getID()); + daeDAO.put(dae); + artifactDAO.delete(alock.getID()); + } + log.debug("commit artifact txn..."); + atxn.commitTransaction(); + atxn = null; + log.debug("commit artifact txn: OK"); + } + } catch (Exception ex) { + if (txn.isOpen()) { + log.error("failed to delete " + node.getID() + " aka " + node.getName(), ex); + txn.rollbackTransaction(); + log.debug("rollback txn: OK"); + } + if (atxn != null && atxn.isOpen()) { + log.error("failed to delete " + a.getID() + " aka " + a.getURI(), ex); + atxn.rollbackTransaction(); + log.debug("rollback artifact txn: OK"); + } + throw ex; + } finally { + if (txn.isOpen()) { + log.error("BUG - open transaction in finally"); + txn.rollbackTransaction(); + log.error("rollback txn: OK"); + } + if (atxn != null && atxn.isOpen()) { + log.error("BUG - open artifact transaction in finally"); + atxn.rollbackTransaction(); + log.error("rollback artifact txn: OK"); + } + } + } + + // needed by vault-migrate to configure a HarvestStateDAO for delete processing + public Map getNodeDaoConfig() { + return nodeDaoConfig; + } +} diff --git a/vault/src/main/java/org/opencadc/vault/ServiceAvailability.java b/vault/src/main/java/org/opencadc/vault/ServiceAvailability.java index 53e415636..5a76fb1bc 100644 --- a/vault/src/main/java/org/opencadc/vault/ServiceAvailability.java +++ b/vault/src/main/java/org/opencadc/vault/ServiceAvailability.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * -* (c) 2023. (c) 2023. +* (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -67,11 +67,16 @@ package org.opencadc.vault; +import ca.nrc.cadc.db.DBUtil; import ca.nrc.cadc.rest.RestAction; import ca.nrc.cadc.vosi.Availability; import ca.nrc.cadc.vosi.AvailabilityPlugin; - +import ca.nrc.cadc.vosi.avail.CheckDataSource; +import javax.naming.InitialContext; +import javax.naming.NamingException; +import javax.sql.DataSource; import org.apache.log4j.Logger; +import org.opencadc.vault.metadata.DataNodeSizeSync; /** * This class performs the work of determining if the executing artifact @@ -130,7 +135,31 @@ public Availability getStatus() { return new Availability(false, RestAction.STATE_READ_ONLY_MSG); } - //TODO add availability checks for dependent services + // check database pools + DataSource ds; + String testSQL; + CheckDataSource cds; + + ds = DBUtil.findJNDIDataSource("jdbc/nodes"); + testSQL = "select * from vospace.ModelVersion"; + cds = new CheckDataSource(ds, testSQL); + cds.check(); + + ds = DBUtil.findJNDIDataSource("jdbc/inventory"); + testSQL = "select * from inventory.Artifact limit 1"; + cds = new CheckDataSource(ds, testSQL); + cds.check(); + + ds = DBUtil.findJNDIDataSource("jdbc/inventory-iterator"); + testSQL = "select * from inventory.Artifact limit 1"; + cds = new CheckDataSource(ds, testSQL); + cds.check(); + + ds = DBUtil.findJNDIDataSource("jdbc/uws"); + testSQL = "select * from uws.Job limit 1"; + cds = new CheckDataSource(ds, testSQL); + cds.check(); + } catch (Throwable t) { // the test itself failed log.debug("failure", t); @@ -149,10 +178,13 @@ public void setState(String state) { String key = appName + RestAction.STATE_MODE_KEY; if (RestAction.STATE_OFFLINE.equalsIgnoreCase(state)) { System.setProperty(key, RestAction.STATE_OFFLINE); + setOffline(true); } else if (RestAction.STATE_READ_ONLY.equalsIgnoreCase(state)) { System.setProperty(key, RestAction.STATE_READ_ONLY); + setOffline(true); } else if (RestAction.STATE_READ_WRITE.equalsIgnoreCase(state)) { System.setProperty(key, RestAction.STATE_READ_WRITE); + setOffline(false); } else { throw new IllegalArgumentException("invalid state: " + state + " expected: " + RestAction.STATE_READ_WRITE + "|" @@ -169,5 +201,15 @@ private String getState() { } return ret; } - + + private void setOffline(boolean offline) { + String jndiKey = appName + "-" + DataNodeSizeSync.class.getName(); + try { + InitialContext initialContext = new InitialContext(); + DataNodeSizeSync async = (DataNodeSizeSync) initialContext.lookup(jndiKey); + async.setOffline(offline); + } catch (NamingException e) { + log.debug(String.format("unable to find %s - %s", jndiKey, e.getMessage())); + } + } } diff --git a/vault/src/main/java/org/opencadc/vault/VaultInitAction.java b/vault/src/main/java/org/opencadc/vault/VaultInitAction.java index 2f6a34f71..a80f04526 100644 --- a/vault/src/main/java/org/opencadc/vault/VaultInitAction.java +++ b/vault/src/main/java/org/opencadc/vault/VaultInitAction.java @@ -3,7 +3,7 @@ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * -* (c) 2023. (c) 2023. +* (c) 2024. (c) 2024. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 @@ -63,38 +63,84 @@ * . * ************************************************************************ -*/ + */ package org.opencadc.vault; import ca.nrc.cadc.db.DBUtil; import ca.nrc.cadc.rest.InitAction; +import ca.nrc.cadc.rest.RestAction; +import ca.nrc.cadc.util.InvalidConfigException; import ca.nrc.cadc.util.MultiValuedProperties; import ca.nrc.cadc.util.PropertiesReader; +import ca.nrc.cadc.util.RsaSignatureGenerator; +import ca.nrc.cadc.uws.server.impl.InitDatabaseUWS; import java.net.URI; import java.net.URISyntaxException; +import java.security.KeyPair; +import java.util.ArrayList; +import java.util.List; import java.util.Map; import java.util.TreeMap; +import javax.naming.Context; +import javax.naming.InitialContext; +import javax.naming.NamingException; import javax.sql.DataSource; import org.apache.log4j.Logger; +import org.opencadc.inventory.Namespace; +import org.opencadc.inventory.PreauthKeyPair; +import org.opencadc.inventory.db.ArtifactDAO; +import org.opencadc.inventory.db.HarvestStateDAO; +import org.opencadc.inventory.db.PreauthKeyPairDAO; +import org.opencadc.inventory.db.SQLGenerator; +import org.opencadc.inventory.db.StorageSiteDAO; +import org.opencadc.inventory.db.version.InitDatabaseSI; +import org.opencadc.inventory.transfer.StorageSiteAvailabilityCheck; +import org.opencadc.vault.metadata.DataNodeSizeSync; +import org.opencadc.vospace.db.InitDatabaseVOS; +import org.opencadc.vospace.server.NodePersistence; +import org.springframework.dao.DataIntegrityViolationException; /** * * @author pdowler */ public class VaultInitAction extends InitAction { + private static final Logger log = Logger.getLogger(VaultInitAction.class); + + static String KEY_PAIR_NAME = "vault-preauth-keys"; - static final String JNDI_DATASOURCE = "jdbc/nodes"; // context.xml - + static final String JNDI_VOS_DATASOURCE = "jdbc/nodes"; // context.xml + static final String JNDI_INV_DATASOURCE = "jdbc/inventory"; // context.xml + static final String JNDI_INV_ITER_DATASOURCE = "jdbc/inventory-iterator"; // context.xml + static final String JNDI_UWS_DATASOURCE = "jdbc/uws"; // context.xml + // config keys private static final String VAULT_KEY = "org.opencadc.vault"; static final String RESOURCE_ID_KEY = VAULT_KEY + ".resourceID"; - static final String SCHEMA_KEY = VAULT_KEY + ".nodes.schema"; - + static final String PREVENT_NOT_FOUND_KEY = VAULT_KEY + ".consistency.preventNotFound"; + static final String INVENTORY_SCHEMA_KEY = VAULT_KEY + ".inventory.schema"; + static final String VOSPACE_SCHEMA_KEY = VAULT_KEY + ".vospace.schema"; + static final String SINGLE_POOL_KEY = VAULT_KEY + ".singlePool"; + static final String ALLOCATION_PARENT = VAULT_KEY + ".allocationParent"; + static final String ROOT_OWNER = VAULT_KEY + ".root.owner"; + static final String STORAGE_NAMESPACE_KEY = VAULT_KEY + ".storage.namespace"; + MultiValuedProperties props; private URI resourceID; - private Map daoConfig; + private Namespace storageNamespace; + private Map vosDaoConfig; + private Map invDaoConfig; + + private String jndiNodePersistence; // store in JNDI for cadc-vos-server lib + private String jndiPreauthKeys; // store pubkey in JNDI for download via GetKeyAction + + private String jndiSiteAvailabilities; // store in JNDI to share with ProtocolsGenerator + private Thread availabilityCheck; + + private String jndiDataNodeSizeSync; // store in JNDI to support availability mode change + private Thread dataNodeSizeSyncThread; public VaultInitAction() { super(); @@ -103,12 +149,38 @@ public VaultInitAction() { @Override public void doInit() { initConfig(); - initDatabase(); + initDatabaseVOS(); + initDatabaseINV(); + initDatabaseUWS(); + initNodePersistence(); + initKeyPair(); + initAvailabilityCheck(); + initBackgroundWorkers(); + } + + @Override + public void doShutdown() { + try { + Context ctx = new InitialContext(); + ctx.unbind(jndiNodePersistence); + } catch (Exception oops) { + log.error("unbind failed during destroy", oops); + } + + try { + Context ctx = new InitialContext(); + ctx.unbind(jndiPreauthKeys); + } catch (Exception oops) { + log.error("unbind failed during destroy", oops); + } + + terminateAvailabilityCheck(); + terminateBackgroundWorkers(); } /** * Read config file and verify that all required entries are present. - * + * * @return MultiValuedProperties containing the application config * @throws IllegalStateException if required config items are missing */ @@ -128,10 +200,46 @@ static MultiValuedProperties getConfig() { } else { sb.append("OK"); } + + String pnf = mvp.getFirstPropertyValue(PREVENT_NOT_FOUND_KEY); + sb.append("\n\t" + PREVENT_NOT_FOUND_KEY + ": "); + if (pnf == null) { + sb.append("MISSING"); + ok = false; + } else { + sb.append("OK"); + } - String schema = mvp.getFirstPropertyValue(SCHEMA_KEY); - sb.append("\n\t").append(SCHEMA_KEY).append(": "); - if (schema == null) { + String invSchema = mvp.getFirstPropertyValue(INVENTORY_SCHEMA_KEY); + sb.append("\n\t").append(INVENTORY_SCHEMA_KEY).append(": "); + if (invSchema == null) { + sb.append("MISSING"); + ok = false; + } else { + sb.append("OK"); + } + + String vosSchema = mvp.getFirstPropertyValue(VOSPACE_SCHEMA_KEY); + sb.append("\n\t").append(VOSPACE_SCHEMA_KEY).append(": "); + if (vosSchema == null) { + sb.append("MISSING"); + ok = false; + } else { + sb.append("OK"); + } + + String sp = mvp.getFirstPropertyValue(SINGLE_POOL_KEY); + sb.append("\n\t").append(SINGLE_POOL_KEY).append(": "); + if (sp == null) { + sb.append("MISSING"); + ok = false; + } else { + sb.append("OK"); + } + + String ns = mvp.getFirstPropertyValue(STORAGE_NAMESPACE_KEY); + sb.append("\n\t").append(STORAGE_NAMESPACE_KEY).append(": "); + if (ns == null) { sb.append("MISSING"); ok = false; } else { @@ -144,40 +252,274 @@ static MultiValuedProperties getConfig() { return mvp; } + + static List getAllocationParents(MultiValuedProperties props) { + List ret = new ArrayList<>(); + for (String sap : props.getProperty(ALLOCATION_PARENT)) { + String ap = sap; + if (ap.charAt(0) == '/') { + ap = ap.substring(1); + } + if (ap.length() > 0 && ap.charAt(ap.length() - 1) == '/') { + ap = ap.substring(0, ap.length() - 1); + } + if (ap.indexOf('/') >= 0) { + throw new InvalidConfigException("invalid " + ALLOCATION_PARENT + ": " + sap + + " reason: must be a top-level container node name"); + } + // empty string means root, otherwise child of root + ret.add(ap); + } + return ret; + } + + static Map getDaoConfig(MultiValuedProperties props) { + Map ret = new TreeMap<>(); + ret.put(SQLGenerator.class.getName(), SQLGenerator.class); // not configurable right now + ret.put("jndiDataSourceName", VaultInitAction.JNDI_VOS_DATASOURCE); + ret.put("invSchema", props.getFirstPropertyValue(INVENTORY_SCHEMA_KEY)); + ret.put("genSchema", props.getFirstPropertyValue(VOSPACE_SCHEMA_KEY)); // for complete init + ret.put("vosSchema", props.getFirstPropertyValue(VOSPACE_SCHEMA_KEY)); + return ret; + } - static Map getDaoConfig(MultiValuedProperties props) { + static Map getInvConfig(MultiValuedProperties props) { + boolean usp = Boolean.parseBoolean(props.getFirstPropertyValue(SINGLE_POOL_KEY)); + if (usp) { + return getDaoConfig(props); + } Map ret = new TreeMap<>(); - ret.put("jndiDataSourceName", org.opencadc.vault.VaultInitAction.JNDI_DATASOURCE); - ret.put("schema", props.getFirstPropertyValue(org.opencadc.vault.VaultInitAction.SCHEMA_KEY)); + ret.put(SQLGenerator.class.getName(), SQLGenerator.class); // not configurable right now + ret.put("jndiDataSourceName", JNDI_INV_DATASOURCE); + ret.put("invSchema", props.getFirstPropertyValue(INVENTORY_SCHEMA_KEY)); + ret.put("genSchema", props.getFirstPropertyValue(INVENTORY_SCHEMA_KEY)); // for complete init return ret; } + static Map getIteratorConfig(MultiValuedProperties props) { + Map ret = new TreeMap<>(); + ret.put(SQLGenerator.class.getName(), SQLGenerator.class); // not configurable right now + ret.put("jndiDataSourceName", JNDI_INV_ITER_DATASOURCE); + ret.put("invSchema", props.getFirstPropertyValue(INVENTORY_SCHEMA_KEY)); + ret.put("genSchema", props.getFirstPropertyValue(INVENTORY_SCHEMA_KEY)); // for complete init + return ret; + } + + static Map getKeyPairConfig(MultiValuedProperties props) { + return getDaoConfig(props); + } + private void initConfig() { log.info("initConfig: START"); this.props = getConfig(); String rid = props.getFirstPropertyValue(RESOURCE_ID_KEY); - + String ns = props.getFirstPropertyValue(STORAGE_NAMESPACE_KEY); try { this.resourceID = new URI(rid); - this.daoConfig = getDaoConfig(props); + this.storageNamespace = new Namespace(ns); + this.vosDaoConfig = getDaoConfig(props); + this.invDaoConfig = getInvConfig(props); log.info("initConfig: OK"); } catch (URISyntaxException ex) { throw new IllegalStateException("invalid config: " + RESOURCE_ID_KEY + " must be a valid URI"); } } + + private void initDatabaseVOS() { + try { + String dsname = (String) vosDaoConfig.get("jndiDataSourceName"); + String schema = (String) vosDaoConfig.get("vosSchema"); + log.info("initDatabase: " + dsname + " " + schema + " START"); + DataSource ds = DBUtil.findJNDIDataSource(dsname); + InitDatabaseVOS init = new InitDatabaseVOS(ds, null, schema); + init.doInit(); + log.info("initDatabase: " + dsname + " " + schema + " OK"); + } catch (Exception ex) { + throw new IllegalStateException("check/init vospace database failed", ex); + } + } - private void initDatabase() { - log.info("initDatabase: START"); + private void initDatabaseINV() { + try { + String dsname = (String) invDaoConfig.get("jndiDataSourceName"); + String schema = (String) invDaoConfig.get("invSchema"); + log.info("initDatabase: " + dsname + " " + schema + " START"); + DataSource ds = DBUtil.findJNDIDataSource(dsname); + InitDatabaseSI init = new InitDatabaseSI(ds, null, schema); + init.doInit(); + log.info("initDatabase: " + dsname + " " + schema + " OK"); + } catch (Exception ex) { + throw new IllegalStateException("check/init inventory database failed", ex); + } + } + + private void initDatabaseUWS() { try { - DataSource ds = DBUtil.findJNDIDataSource(JNDI_DATASOURCE); - String database = (String) daoConfig.get("database"); - String schema = (String) daoConfig.get("schema"); - //VaultInitDatabase init = new VaultInitDatabase(ds, database, schema); - //init.doInit(); - log.info("initDatabase: " + JNDI_DATASOURCE + " " + schema + " OK"); + log.info("initDatabase: " + JNDI_UWS_DATASOURCE + " uws START"); + DataSource uws = DBUtil.findJNDIDataSource(JNDI_UWS_DATASOURCE); + InitDatabaseUWS uwsi = new InitDatabaseUWS(uws, null, "uws"); + uwsi.doInit(); + log.info("initDatabase: " + JNDI_UWS_DATASOURCE + " uws OK"); } catch (Exception ex) { - throw new IllegalStateException("check/init database failed", ex); + throw new RuntimeException("check/init uws database failed", ex); } } + private void initNodePersistence() { + log.info("initNodePersistence: START"); + jndiNodePersistence = appName + "-" + NodePersistence.class.getName(); + try { + Context ctx = new InitialContext(); + try { + ctx.unbind(jndiNodePersistence); + } catch (NamingException ignore) { + log.debug("unbind previous JNDI key (" + jndiNodePersistence + ") failed... ignoring"); + } + NodePersistence npi = new NodePersistenceImpl(resourceID, appName); + ctx.bind(jndiNodePersistence, npi); + + log.info("initNodePersistence: created JNDI key: " + jndiNodePersistence + " impl: " + npi.getClass().getName()); + } catch (Exception ex) { + log.error("Failed to create JNDI Key " + jndiNodePersistence, ex); + } + } + + private void initKeyPair() { + log.info("initKeyPair: START"); + jndiPreauthKeys = appName + "-" + PreauthKeyPair.class.getName(); + try { + PreauthKeyPairDAO dao = new PreauthKeyPairDAO(); + dao.setConfig(getKeyPairConfig(props)); + PreauthKeyPair keys = dao.get(KEY_PAIR_NAME); + if (keys == null) { + KeyPair kp = RsaSignatureGenerator.getKeyPair(4096); + keys = new PreauthKeyPair(KEY_PAIR_NAME, kp.getPublic().getEncoded(), kp.getPrivate().getEncoded()); + try { + dao.put(keys); + log.info("initKeyPair: new keys created - OK"); + + } catch (DataIntegrityViolationException oops) { + log.warn("persist new " + PreauthKeyPair.class.getSimpleName() + " failed (" + oops + ") -- probably race condition"); + keys = dao.get(KEY_PAIR_NAME); + if (keys != null) { + log.info("race condition confirmed: another instance created keys - OK"); + } else { + throw new RuntimeException("check/init " + KEY_PAIR_NAME + " failed", oops); + } + } + } else { + log.info("initKeyPair: re-use existing keys - OK"); + } + Context ctx = new InitialContext(); + try { + ctx.unbind(jndiPreauthKeys); + } catch (NamingException ignore) { + log.debug("unbind previous JNDI key (" + jndiPreauthKeys + ") failed... ignoring"); + } + ctx.bind(jndiPreauthKeys, keys); + log.info("initKeyPair: created JNDI key: " + jndiPreauthKeys); + + Object o = ctx.lookup(jndiPreauthKeys); + log.info("checking... found: " + jndiPreauthKeys + " = " + o + " in " + ctx); + } catch (Exception ex) { + throw new RuntimeException("check/init " + KEY_PAIR_NAME + " failed", ex); + } + } + + private void initAvailabilityCheck() { + StorageSiteDAO storageSiteDAO = new StorageSiteDAO(); + storageSiteDAO.setConfig(getInvConfig(props)); + + this.jndiSiteAvailabilities = appName + "-" + StorageSiteAvailabilityCheck.class.getName(); + terminateAvailabilityCheck(); + this.availabilityCheck = new Thread(new StorageSiteAvailabilityCheck(storageSiteDAO, this.jndiSiteAvailabilities)); + this.availabilityCheck.setDaemon(true); + this.availabilityCheck.start(); + } + + private void terminateAvailabilityCheck() { + if (this.availabilityCheck != null) { + try { + log.info("terminating AvailabilityCheck Thread..."); + this.availabilityCheck.interrupt(); + this.availabilityCheck.join(); + log.info("terminating AvailabilityCheck Thread... [OK]"); + } catch (Throwable t) { + log.info("failed to terminate AvailabilityCheck thread", t); + } finally { + this.availabilityCheck = null; + } + } + + // ugh: bind() is inside StorageSiteAvailabilityCheck but unbind() is here + try { + InitialContext initialContext = new InitialContext(); + initialContext.unbind(this.jndiSiteAvailabilities); + } catch (NamingException e) { + log.debug(String.format("unable to unbind %s - %s", this.jndiSiteAvailabilities, e.getMessage())); + } + } + + private void initBackgroundWorkers() { + try { + HarvestStateDAO hsDAO = new HarvestStateDAO(); + hsDAO.setConfig(vosDaoConfig); + + ArtifactDAO artifactDAO = new ArtifactDAO(); + Map iterprops = getIteratorConfig(props); + log.warn("iterator pool: " + iterprops.get("jndiDataSourceName")); + artifactDAO.setConfig(iterprops); + + // determine startup mode + boolean offline = false; // normal + String key = appName + RestAction.STATE_MODE_KEY; + String ret = System.getProperty(key); + if (ret != null + && (RestAction.STATE_READ_ONLY.equals(ret) || RestAction.STATE_OFFLINE.equals(ret))) { + offline = true; + } + + terminateBackgroundWorkers(); + DataNodeSizeSync async = new DataNodeSizeSync(hsDAO, artifactDAO, storageNamespace); + async.setOffline(offline); + this.dataNodeSizeSyncThread = new Thread(async); + dataNodeSizeSyncThread.setDaemon(true); + dataNodeSizeSyncThread.start(); + + // store in JNDI so availability can set offline + this.jndiDataNodeSizeSync = appName + "-" + DataNodeSizeSync.class.getName(); + InitialContext ctx = new InitialContext(); + try { + ctx.unbind(jndiDataNodeSizeSync); + } catch (NamingException ignore) { + log.debug("unbind previous JNDI key (" + jndiPreauthKeys + ") failed... ignoring"); + } + ctx.bind(jndiDataNodeSizeSync, async); + log.info("initBackgroundWorkers: created JNDI key: " + jndiDataNodeSizeSync); + } catch (Exception ex) { + throw new RuntimeException("check/init ArtifactSync failed", ex); + } + } + + private void terminateBackgroundWorkers() { + if (this.dataNodeSizeSyncThread != null) { + try { + log.info("terminating " + DataNodeSizeSync.class.getSimpleName() + " Thread..."); + this.dataNodeSizeSyncThread.interrupt(); + this.dataNodeSizeSyncThread.join(); + log.info("terminating " + DataNodeSizeSync.class.getSimpleName() + " Thread... [OK]"); + } catch (Throwable t) { + log.info("failed to terminate " + DataNodeSizeSync.class.getSimpleName() + " thread", t); + } finally { + this.dataNodeSizeSyncThread = null; + } + + try { + InitialContext initialContext = new InitialContext(); + initialContext.unbind(this.jndiDataNodeSizeSync); + } catch (NamingException e) { + log.debug(String.format("unable to unbind %s - %s", this.jndiDataNodeSizeSync, e.getMessage())); + } + } + } } diff --git a/vault/src/main/java/org/opencadc/vault/VaultTransferGenerator.java b/vault/src/main/java/org/opencadc/vault/VaultTransferGenerator.java new file mode 100644 index 000000000..3b0884887 --- /dev/null +++ b/vault/src/main/java/org/opencadc/vault/VaultTransferGenerator.java @@ -0,0 +1,213 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2022. (c) 2022. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.vault; + +import ca.nrc.cadc.auth.AuthenticationUtil; +import ca.nrc.cadc.auth.IdentityManager; +import ca.nrc.cadc.net.ResourceNotFoundException; +import ca.nrc.cadc.uws.Parameter; +import ca.nrc.cadc.vosi.Availability; +import java.io.IOException; +import java.net.URI; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import javax.naming.Context; +import javax.naming.InitialContext; +import javax.naming.NamingException; +import javax.security.auth.Subject; +import org.apache.log4j.Logger; +import org.opencadc.inventory.db.ArtifactDAO; +import org.opencadc.inventory.transfer.ProtocolsGenerator; +import org.opencadc.inventory.transfer.StorageSiteAvailabilityCheck; +import org.opencadc.inventory.transfer.StorageSiteRule; +import org.opencadc.permissions.TokenTool; +import org.opencadc.vospace.DataNode; +import org.opencadc.vospace.Node; +import org.opencadc.vospace.NodeNotFoundException; +import org.opencadc.vospace.VOSURI; +import org.opencadc.vospace.server.PathResolver; +import org.opencadc.vospace.server.auth.VOSpaceAuthorizer; +import org.opencadc.vospace.server.transfers.TransferGenerator; +import org.opencadc.vospace.transfer.Protocol; +import org.opencadc.vospace.transfer.Transfer; + +/** + * + * @author pdowler + */ +public class VaultTransferGenerator implements TransferGenerator { + private static final Logger log = Logger.getLogger(VaultTransferGenerator.class); + + private final NodePersistenceImpl nodePersistence; + private final VOSpaceAuthorizer authorizer; + private final ArtifactDAO artifactDAO; + private final TokenTool tokenTool; + private final boolean preventNotFound; + + private final Map siteRules = new HashMap<>(); + private final Map siteAvailabilities; + + @SuppressWarnings("unchecked") + public VaultTransferGenerator(NodePersistenceImpl nodePersistence, String appName, + ArtifactDAO artifactDAO, TokenTool tokenTool, boolean preventNotFound) { + this.nodePersistence = nodePersistence; + this.authorizer = new VOSpaceAuthorizer(nodePersistence); + this.artifactDAO = artifactDAO; + this.tokenTool = tokenTool; + this.preventNotFound = preventNotFound; + + // TODO: get appname from ??? + String siteAvailabilitiesKey = appName + "-" + StorageSiteAvailabilityCheck.class.getName(); + log.debug("siteAvailabilitiesKey: " + siteAvailabilitiesKey); + try { + Context initContext = new InitialContext(); + this.siteAvailabilities = (Map) initContext.lookup(siteAvailabilitiesKey); + log.debug("found siteAvailabilities in JNDI: " + siteAvailabilitiesKey + " = " + siteAvailabilities); + for (Map.Entry me: siteAvailabilities.entrySet()) { + log.debug("found: " + me.getKey() + " = " + me.getValue()); + } + } catch (NamingException e) { + throw new IllegalStateException("JNDI lookup error", e); + } + } + + @Override + public List getEndpoints(VOSURI target, Transfer transfer, List additionalParams) throws Exception { + log.debug("getEndpoints: " + target); + if (target == null) { + throw new IllegalArgumentException("target is required"); + } + if (transfer == null) { + throw new IllegalArgumentException("transfer is required"); + } + List ret; + try { + PathResolver ps = new PathResolver(nodePersistence, authorizer); + Node node = ps.getNode(target.getPath(), true); + if (node == null) { + throw new NodeNotFoundException(target.getPath()); + } + + if (node instanceof DataNode) { + DataNode dn = (DataNode) node; + ret = handleDataNode(dn, target.getName(), transfer); + } else { + throw new UnsupportedOperationException("transfer: " + node.getClass().getSimpleName() + + " at " + target.getPath()); + } + } finally { + // nothing right now + } + return ret; + } + + private List handleDataNode(DataNode node, String filename, Transfer trans) + throws IOException { + log.debug("handleDataNode: " + node); + + IdentityManager im = AuthenticationUtil.getIdentityManager(); + Subject caller = AuthenticationUtil.getCurrentSubject(); + Object userObject = im.toOwner(caller); + String callingUser = (userObject == null ? null : userObject.toString()); + + ProtocolsGenerator pg = new ProtocolsGenerator(artifactDAO, siteAvailabilities, siteRules); + pg.tokenGen = tokenTool; + pg.user = callingUser; + pg.requirePreauthAnon = true; + pg.preventNotFound = preventNotFound; + + Transfer artifactTrans = new Transfer(node.storageID, trans.getDirection()); + Set protoURIs = new HashSet<>(); + // storage nodes only work with pre-auth URLs. Return those regardless of the security method + // requested by the user + for (Protocol p : trans.getProtocols()) { + log.debug("requested protocol: " + p); + if (!protoURIs.contains(p.getUri())) { + Protocol anonProto = new Protocol(p.getUri()); + artifactTrans.getProtocols().add(anonProto); + protoURIs.add(p.getUri()); + log.debug("Added anon protocol for " + p.getUri()); + } + } + + try { + List ret = pg.getProtocols(artifactTrans, filename); + log.debug("generated urls: " + ret.size()); + for (Protocol p : ret) { + log.debug(p.getEndpoint() + " using " + p.getSecurityMethod()); + } + return ret; + } catch (ResourceNotFoundException ex) { + return new ArrayList<>(); + } + } +} diff --git a/vault/src/main/java/org/opencadc/vault/files/GetAction.java b/vault/src/main/java/org/opencadc/vault/files/GetAction.java new file mode 100644 index 000000000..231cad51e --- /dev/null +++ b/vault/src/main/java/org/opencadc/vault/files/GetAction.java @@ -0,0 +1,130 @@ +/* + ************************************************************************ + ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* + ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** + * + * (c) 2024. (c) 2024. + * Government of Canada Gouvernement du Canada + * National Research Council Conseil national de recherches + * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 + * All rights reserved Tous droits réservés + * + * NRC disclaims any warranties, Le CNRC dénie toute garantie + * expressed, implied, or énoncée, implicite ou légale, + * statutory, of any kind with de quelque nature que ce + * respect to the software, soit, concernant le logiciel, + * including without limitation y compris sans restriction + * any warranty of merchantability toute garantie de valeur + * or fitness for a particular marchande ou de pertinence + * purpose. NRC shall not be pour un usage particulier. + * liable in any event for any Le CNRC ne pourra en aucun cas + * damages, whether direct or être tenu responsable de tout + * indirect, special or general, dommage, direct ou indirect, + * consequential or incidental, particulier ou général, + * arising from the use of the accessoire ou fortuit, résultant + * software. Neither the name de l'utilisation du logiciel. Ni + * of the National Research le nom du Conseil National de + * Council of Canada nor the Recherches du Canada ni les noms + * names of its contributors may de ses participants ne peuvent + * be used to endorse or promote être utilisés pour approuver ou + * products derived from this promouvoir les produits dérivés + * software without specific prior de ce logiciel sans autorisation + * written permission. préalable et particulière + * par écrit. + * + * This file is part of the Ce fichier fait partie du projet + * OpenCADC project. OpenCADC. + * + * OpenCADC is free software: OpenCADC est un logiciel libre ; + * you can redistribute it and/or vous pouvez le redistribuer ou le + * modify it under the terms of modifier suivant les termes de + * the GNU Affero General Public la “GNU Affero General Public + * License as published by the License” telle que publiée + * Free Software Foundation, par la Free Software Foundation + * either version 3 of the : soit la version 3 de cette + * License, or (at your option) licence, soit (à votre gré) + * any later version. toute version ultérieure. + * + * OpenCADC is distributed in the OpenCADC est distribué + * hope that it will be useful, dans l’espoir qu’il vous + * but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE + * without even the implied GARANTIE : sans même la garantie + * warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ + * or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF + * PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence + * General Public License for Générale Publique GNU Affero + * more details. pour plus de détails. + * + * You should have received Vous devriez avoir reçu une + * a copy of the GNU Affero copie de la Licence Générale + * General Public License along Publique GNU Affero avec + * with OpenCADC. If not, see OpenCADC ; si ce n’est + * . pas le cas, consultez : + * . + * + ************************************************************************ + */ + +package org.opencadc.vault.files; + +import ca.nrc.cadc.auth.AuthenticationUtil; +import ca.nrc.cadc.net.TransientException; +import java.net.HttpURLConnection; +import java.util.List; +import javax.security.auth.Subject; +import org.apache.log4j.Logger; +import org.opencadc.vospace.DataNode; +import org.opencadc.vospace.VOS; +import org.opencadc.vospace.VOSURI; +import org.opencadc.vospace.server.NodeFault; +import org.opencadc.vospace.server.Utils; +import org.opencadc.vospace.server.transfers.TransferGenerator; +import org.opencadc.vospace.transfer.Direction; +import org.opencadc.vospace.transfer.Protocol; +import org.opencadc.vospace.transfer.Transfer; + +/** + * Class to redirect to a storage URL from which the content of a DataNode can be downloaded + * @author adriand + */ +public class GetAction extends HeadAction { + protected static Logger log = Logger.getLogger(GetAction.class); + + public GetAction() { + super(); + } + + @Override + public void doAction() throws Exception { + DataNode node = resolveAndSetMetadata(); + + Subject caller = AuthenticationUtil.getCurrentSubject(); + if (!voSpaceAuthorizer.hasSingleNodeReadPermission(node, caller)) { + // TODO: should output requested vos URI here + throw NodeFault.PermissionDenied.getStatus(syncInput.getPath()); + } + + if (node.bytesUsed == null || node.bytesUsed == 0L) { + // empty file + syncOutput.setCode(HttpURLConnection.HTTP_NO_CONTENT); + return; + } + + VOSURI targetURI = localServiceURI.getURI(node); + Transfer pullTransfer = new Transfer(targetURI.getURI(), Direction.pullFromVoSpace); + pullTransfer.version = VOS.VOSPACE_21; + pullTransfer.getProtocols().add(new Protocol(VOS.PROTOCOL_HTTPS_GET)); // anon, preauth + + TransferGenerator tg = nodePersistence.getTransferGenerator(); + List protos = tg.getEndpoints(targetURI, pullTransfer, null); + if (protos.isEmpty()) { + throw new TransientException("No location found for file " + Utils.getPath(node)); + } + Protocol proto = protos.get(0); + String loc = proto.getEndpoint(); + log.debug("Location: " + loc); + syncOutput.setHeader("Location", loc); + syncOutput.setCode(HttpURLConnection.HTTP_SEE_OTHER); + } + +} diff --git a/vault/src/main/java/org/opencadc/vault/files/HeadAction.java b/vault/src/main/java/org/opencadc/vault/files/HeadAction.java new file mode 100644 index 000000000..902c4c9a6 --- /dev/null +++ b/vault/src/main/java/org/opencadc/vault/files/HeadAction.java @@ -0,0 +1,176 @@ +/* + ************************************************************************ + ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* + ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** + * + * (c) 2024. (c) 2024. + * Government of Canada Gouvernement du Canada + * National Research Council Conseil national de recherches + * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 + * All rights reserved Tous droits réservés + * + * NRC disclaims any warranties, Le CNRC dénie toute garantie + * expressed, implied, or énoncée, implicite ou légale, + * statutory, of any kind with de quelque nature que ce + * respect to the software, soit, concernant le logiciel, + * including without limitation y compris sans restriction + * any warranty of merchantability toute garantie de valeur + * or fitness for a particular marchande ou de pertinence + * purpose. NRC shall not be pour un usage particulier. + * liable in any event for any Le CNRC ne pourra en aucun cas + * damages, whether direct or être tenu responsable de tout + * indirect, special or general, dommage, direct ou indirect, + * consequential or incidental, particulier ou général, + * arising from the use of the accessoire ou fortuit, résultant + * software. Neither the name de l'utilisation du logiciel. Ni + * of the National Research le nom du Conseil National de + * Council of Canada nor the Recherches du Canada ni les noms + * names of its contributors may de ses participants ne peuvent + * be used to endorse or promote être utilisés pour approuver ou + * products derived from this promouvoir les produits dérivés + * software without specific prior de ce logiciel sans autorisation + * written permission. préalable et particulière + * par écrit. + * + * This file is part of the Ce fichier fait partie du projet + * OpenCADC project. OpenCADC. + * + * OpenCADC is free software: OpenCADC est un logiciel libre ; + * you can redistribute it and/or vous pouvez le redistribuer ou le + * modify it under the terms of modifier suivant les termes de + * the GNU Affero General Public la “GNU Affero General Public + * License as published by the License” telle que publiée + * Free Software Foundation, par la Free Software Foundation + * either version 3 of the : soit la version 3 de cette + * License, or (at your option) licence, soit (à votre gré) + * any later version. toute version ultérieure. + * + * OpenCADC is distributed in the OpenCADC est distribué + * hope that it will be useful, dans l’espoir qu’il vous + * but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE + * without even the implied GARANTIE : sans même la garantie + * warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ + * or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF + * PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence + * General Public License for Générale Publique GNU Affero + * more details. pour plus de détails. + * + * You should have received Vous devriez avoir reçu une + * a copy of the GNU Affero copie de la Licence Générale + * General Public License along Publique GNU Affero avec + * with OpenCADC. If not, see OpenCADC ; si ce n’est + * . pas le cas, consultez : + * . + * + ************************************************************************ + */ + +package org.opencadc.vault.files; + +import ca.nrc.cadc.net.ResourceNotFoundException; +import ca.nrc.cadc.rest.InlineContentHandler; +import ca.nrc.cadc.rest.RestAction; +import ca.nrc.cadc.rest.Version; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Date; +import javax.naming.Context; +import javax.naming.InitialContext; +import org.apache.log4j.Logger; +import org.opencadc.vospace.DataNode; +import org.opencadc.vospace.Node; +import org.opencadc.vospace.VOS; +import org.opencadc.vospace.VOSURI; +import org.opencadc.vospace.io.NodeWriter; +import org.opencadc.vospace.server.LocalServiceURI; +import org.opencadc.vospace.server.NodePersistence; +import org.opencadc.vospace.server.PathResolver; +import org.opencadc.vospace.server.Utils; +import org.opencadc.vospace.server.auth.VOSpaceAuthorizer; + +/** + * Class to handle a HEAD request to a DataNode + * @author adriand + */ +public class HeadAction extends RestAction { + protected static Logger log = Logger.getLogger(HeadAction.class); + + protected VOSpaceAuthorizer voSpaceAuthorizer; + protected NodePersistence nodePersistence; + protected LocalServiceURI localServiceURI; + + public HeadAction() { + super(); + } + + @Override + protected final InlineContentHandler getInlineContentHandler() { + return null; + } + + @Override + protected String getServerImpl() { + // no null version checking because fail to build correctly can't get past basic testing + Version v = getVersionFromResource(); + return "storage-inventory/vault-" + v.getMajorMinor(); + } + + @Override + public void initAction() throws Exception { + String jndiNodePersistence = super.appName + "-" + NodePersistence.class.getName(); + try { + Context ctx = new InitialContext(); + this.nodePersistence = ((NodePersistence) ctx.lookup(jndiNodePersistence)); + this.voSpaceAuthorizer = new VOSpaceAuthorizer(nodePersistence); + localServiceURI = new LocalServiceURI(nodePersistence.getResourceID()); + } catch (Exception oops) { + throw new RuntimeException("BUG: NodePersistence implementation not found with JNDI key " + jndiNodePersistence, oops); + } + + checkReadable(); + } + + @Override + public void doAction() throws Exception { + resolveAndSetMetadata(); + } + + DataNode resolveAndSetMetadata() throws Exception { + PathResolver pathResolver = new PathResolver(nodePersistence, voSpaceAuthorizer); + String filePath = syncInput.getPath(); + Node node = pathResolver.getNode(filePath, true); + + if (node == null) { + throw new ResourceNotFoundException("Target not found: " + filePath); + } + + if (!(node instanceof DataNode)) { + throw new IllegalArgumentException("Resolved target is not a data node: " + Utils.getPath(node)); + } + + log.debug("node path resolved: " + node.getName() + " type: " + node.getClass().getName()); + + DataNode dn = (DataNode) node; + syncOutput.setHeader("Content-Length", dn.bytesUsed); + syncOutput.setHeader("Content-Disposition", "inline; filename=\"" + node.getName() + "\""); + syncOutput.setHeader("Content-Type", node.getPropertyValue(VOS.PROPERTY_URI_TYPE)); + syncOutput.setHeader("Content-Encoding", node.getPropertyValue(VOS.PROPERTY_URI_CONTENTENCODING)); + if (node.getPropertyValue(VOS.PROPERTY_URI_DATE) != null) { + Date lastMod = NodeWriter.getDateFormat().parse(node.getPropertyValue(VOS.PROPERTY_URI_DATE)); + syncOutput.setLastModified(lastMod); + } + + String contentMD5 = node.getPropertyValue(VOS.PROPERTY_URI_CONTENTMD5); + if (contentMD5 != null) { + try { + URI md5 = new URI("md5:" + contentMD5); + syncOutput.setDigest(md5); + } catch (URISyntaxException ex) { + throw new RuntimeException("BUG: invalid " + VOS.PROPERTY_URI_CONTENTMD5 + " value " + contentMD5); + } + } + syncOutput.setCode(200); + return dn; + } + +} diff --git a/vault/src/main/java/org/opencadc/vault/metadata/BackgroundLogInfo.java b/vault/src/main/java/org/opencadc/vault/metadata/BackgroundLogInfo.java new file mode 100644 index 000000000..88ac11a93 --- /dev/null +++ b/vault/src/main/java/org/opencadc/vault/metadata/BackgroundLogInfo.java @@ -0,0 +1,106 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2024. (c) 2024. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.vault.metadata; + +import ca.nrc.cadc.date.DateUtil; +import ca.nrc.cadc.log.WebServiceLogInfo; +import java.text.DateFormat; +import java.util.Date; +import org.apache.log4j.Logger; + +/** + * Log structure for background threads. + * + * @author pdowler + */ +public class BackgroundLogInfo extends WebServiceLogInfo { + private static final Logger log = Logger.getLogger(BackgroundLogInfo.class); + + public Boolean leader; + public String instance; + public String lastmodified; + public Long processed; + public Long sleep; + + + public BackgroundLogInfo(String instance) { + super.serviceName = "vault"; + this.instance = instance; + } + + public void setOperation(String op) { + super.method = op; + } + + public void setLastModified(Date ts) { + DateFormat df = DateUtil.getDateFormat(DateUtil.IVOA_DATE_FORMAT, DateUtil.UTC); + this.lastmodified = df.format(ts); + } + + +} diff --git a/vault/src/main/java/org/opencadc/vault/metadata/DataNodeSizeSync.java b/vault/src/main/java/org/opencadc/vault/metadata/DataNodeSizeSync.java new file mode 100644 index 000000000..3bce32e99 --- /dev/null +++ b/vault/src/main/java/org/opencadc/vault/metadata/DataNodeSizeSync.java @@ -0,0 +1,224 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2024. (c) 2024. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.vault.metadata; + +import java.net.URI; +import java.util.Date; +import java.util.UUID; +import org.apache.log4j.Logger; +import org.opencadc.inventory.Artifact; +import org.opencadc.inventory.Namespace; +import org.opencadc.inventory.db.ArtifactDAO; +import org.opencadc.inventory.db.HarvestState; +import org.opencadc.inventory.db.HarvestStateDAO; +import org.opencadc.vospace.db.DataNodeSizeWorker; + +/** + * Main artifact-sync agent that enables incremental sync of Artifact + * metadata to Node. + * + * @author pdowler + */ +public class DataNodeSizeSync implements Runnable { + private static final Logger log = Logger.getLogger(DataNodeSizeSync.class); + + private static final long ROUNDS = 6000L; // 6 sec + private static final long SHORT_SLEEP = 5 * ROUNDS; + private static final long LONG_SLEEP = 10 * ROUNDS; + private static final long EVICT_AGE = 12 * ROUNDS; + + private static final long FAIL_SLEEP = 10 * ROUNDS; // slightly smaller than evict + + private final UUID instanceID = UUID.randomUUID(); + private final HarvestStateDAO stateDAO; + private final ArtifactDAO artifactDAO; + private final Namespace artifactNamespace; + + private boolean offline = false; + + public DataNodeSizeSync(HarvestStateDAO stateDAO, ArtifactDAO artifactDAO, Namespace artifactNamespace) { + this.stateDAO = stateDAO; + this.artifactDAO = artifactDAO; + this.artifactNamespace = artifactNamespace; + + // we need continuous timestamp updates to retain leader status, so only schedule maintenance + stateDAO.setUpdateBufferCount(0); + stateDAO.setMaintCount(9999); // every 1e4 + } + + public void setOffline(boolean offline) { + this.offline = offline; + } + + @Override + public void run() { + String name = Artifact.class.getSimpleName(); + URI resourceID = URI.create("jdbc:inventory"); + try { + Thread.sleep(1 * ROUNDS); // delay startup a bit + + while (true) { + while (offline) { + log.warn("disabled: sleep=" + LONG_SLEEP); + Thread.sleep(LONG_SLEEP); + } + + log.debug("check leader " + instanceID); + HarvestState state = stateDAO.get(name, resourceID); + log.debug("check leader " + instanceID + " found: " + state); + if (state.instanceID == null) { + state.instanceID = instanceID; + stateDAO.put(state); + state = stateDAO.get(state.getID()); + log.debug("created: " + state); + } + + long t1 = System.currentTimeMillis(); + BackgroundLogInfo logInfo = new BackgroundLogInfo(instanceID.toString()); + logInfo.setOperation(DataNodeSizeWorker.class.getSimpleName()); + logInfo.setSuccess(false); + + // determine leader + boolean leader = checkLeaderStatus(state); + logInfo.leader = leader; + logInfo.setLastModified(state.curLastModified); + long sleep = LONG_SLEEP; + if (leader) { + log.debug("leader: " + state); + boolean fail = false; + log.info(logInfo.start()); + try { + DataNodeSizeWorker worker = new DataNodeSizeWorker(stateDAO, state, artifactDAO, artifactNamespace); + worker.run(); + logInfo.setLastModified(state.curLastModified); + logInfo.processed = worker.getNumArtifactsProcessed(); + } catch (Exception ex) { + log.error("unexpected worker fail", ex); + fail = true; + } + + leader = checkLeaderStatus(state); + if (!fail && leader) { + try { + stateDAO.flushBufferedState(); + } catch (Exception ex) { + log.error("unexpected HarvestState flush fail", ex); + fail = true; + } + } + + if (!fail && leader) { + try { + // update leader timestamp before sleeping + stateDAO.put(state, true); + } catch (Exception ex) { + log.error("unexpected HarvestState force update fail", ex); + fail = true; + } + } + + if (fail) { + sleep = FAIL_SLEEP; + } else { + sleep = SHORT_SLEEP; + } + logInfo.setSuccess(!fail); + logInfo.setElapsedTime(System.currentTimeMillis() - t1); + } else { + // not leader success + sleep = LONG_SLEEP; + logInfo.setSuccess(true); + } + logInfo.sleep = sleep; + log.info(logInfo.end()); + Thread.sleep(sleep); + } + } catch (InterruptedException ex) { + log.debug("interrupted - assuming shutdown", ex); + } + } + + private boolean checkLeaderStatus(HarvestState state) { + boolean leader = false; + if (instanceID.equals(state.instanceID)) { + stateDAO.put(state, true); + leader = true; + } else { + // see if we should perform a coup... + Date now = new Date(); + long age = now.getTime() - state.getLastModified().getTime(); + if (age > EVICT_AGE) { + log.info("EVICTING " + state.instanceID + " because age " + age + " > " + EVICT_AGE); + state.instanceID = instanceID; + stateDAO.put(state); + leader = true; + } + } + return leader; + } +} diff --git a/vault/src/main/java/org/opencadc/vault/uws/AsyncTransferManager.java b/vault/src/main/java/org/opencadc/vault/uws/AsyncTransferManager.java new file mode 100644 index 000000000..d29762761 --- /dev/null +++ b/vault/src/main/java/org/opencadc/vault/uws/AsyncTransferManager.java @@ -0,0 +1,97 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2022. (c) 2022. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.vault.uws; + +import ca.nrc.cadc.uws.server.JobExecutor; +import ca.nrc.cadc.uws.server.JobPersistence; +import ca.nrc.cadc.uws.server.JobUpdater; +import ca.nrc.cadc.uws.server.ThreadPoolExecutor; +import org.apache.log4j.Logger; +import org.opencadc.vospace.server.transfers.TransferRunner; + +/** + * + * @author pdowler + */ +public class AsyncTransferManager extends VaultJobManager { + private static final Logger log = Logger.getLogger(AsyncTransferManager.class); + + public AsyncTransferManager() { + super(); + JobPersistence jp = createJobPersistence(); + JobUpdater ju = (JobUpdater) jp; + JobExecutor jobExec = new ThreadPoolExecutor(ju, TransferRunner.class, 6, "vault-transfers"); + super.setJobPersistence(jp); + super.setJobExecutor(jobExec); + + // these are made up and bogus + super.setMaxDestruction(600000L); + super.setMaxExecDuration(60000L); + super.setMaxQuote(60000L); + } +} diff --git a/vault/src/main/java/org/opencadc/vault/uws/RecursiveDeleteNodeJobManager.java b/vault/src/main/java/org/opencadc/vault/uws/RecursiveDeleteNodeJobManager.java new file mode 100644 index 000000000..88cc56629 --- /dev/null +++ b/vault/src/main/java/org/opencadc/vault/uws/RecursiveDeleteNodeJobManager.java @@ -0,0 +1,103 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2023. (c) 2023. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +* $Revision: 4 $ +* +************************************************************************ +*/ + +package org.opencadc.vault.uws; + +import ca.nrc.cadc.uws.server.JobExecutor; +import ca.nrc.cadc.uws.server.JobPersistence; +import ca.nrc.cadc.uws.server.JobUpdater; +import ca.nrc.cadc.uws.server.ThreadPoolExecutor; +import org.apache.log4j.Logger; +import org.opencadc.vospace.server.async.RecursiveDeleteNodeRunner; + +/** + * + * @author pdowler, majorb, yeunga, adriand + */ +public class RecursiveDeleteNodeJobManager extends VaultJobManager { + private static final Logger log = Logger.getLogger(RecursiveDeleteNodeJobManager.class); + + private static final Long MAX_EXEC_DURATION = Long.valueOf(12 * 7200L); // 24 hours? + private static final Long MAX_DESTRUCTION = Long.valueOf(7 * 24 * 3600L); // 1 week + private static final Long MAX_QUOTE = Long.valueOf(12 * 7200L); // same as exec + + public RecursiveDeleteNodeJobManager() { + super(); + JobPersistence jp = createJobPersistence(); + JobUpdater ju = (JobUpdater) jp; + super.setJobPersistence(jp); + + JobExecutor jobExec = new ThreadPoolExecutor(ju, RecursiveDeleteNodeRunner.class, 3); + super.setJobExecutor(jobExec); + + super.setMaxExecDuration(MAX_EXEC_DURATION); + super.setMaxDestruction(MAX_DESTRUCTION); + super.setMaxQuote(MAX_QUOTE); + } +} diff --git a/vault/src/main/java/org/opencadc/vault/uws/RecursiveNodePropsJobManager.java b/vault/src/main/java/org/opencadc/vault/uws/RecursiveNodePropsJobManager.java new file mode 100644 index 000000000..411f8b67e --- /dev/null +++ b/vault/src/main/java/org/opencadc/vault/uws/RecursiveNodePropsJobManager.java @@ -0,0 +1,103 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2023. (c) 2023. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +* $Revision: 4 $ +* +************************************************************************ +*/ + +package org.opencadc.vault.uws; + +import ca.nrc.cadc.uws.server.JobExecutor; +import ca.nrc.cadc.uws.server.JobPersistence; +import ca.nrc.cadc.uws.server.JobUpdater; +import ca.nrc.cadc.uws.server.ThreadPoolExecutor; +import org.apache.log4j.Logger; +import org.opencadc.vospace.server.async.RecursiveNodePropsRunner; + +/** + * + * @author pdowler, majorb, yeunga, adriand + */ +public class RecursiveNodePropsJobManager extends VaultJobManager { + private static final Logger log = Logger.getLogger(RecursiveNodePropsJobManager.class); + + private static final Long MAX_EXEC_DURATION = Long.valueOf(12 * 7200L); // 24 hours? + private static final Long MAX_DESTRUCTION = Long.valueOf(7 * 24 * 3600L); // 1 week + private static final Long MAX_QUOTE = Long.valueOf(12 * 7200L); // same as exec + + public RecursiveNodePropsJobManager() { + super(); + JobPersistence jp = createJobPersistence(); + JobUpdater ju = (JobUpdater) jp; + super.setJobPersistence(jp); + + JobExecutor jobExec = new ThreadPoolExecutor(ju, RecursiveNodePropsRunner.class, 3); + super.setJobExecutor(jobExec); + + super.setMaxExecDuration(MAX_EXEC_DURATION); + super.setMaxDestruction(MAX_DESTRUCTION); + super.setMaxQuote(MAX_QUOTE); + } +} diff --git a/vault/src/main/java/org/opencadc/vault/uws/SyncTransferManager.java b/vault/src/main/java/org/opencadc/vault/uws/SyncTransferManager.java new file mode 100644 index 000000000..8d5a6774a --- /dev/null +++ b/vault/src/main/java/org/opencadc/vault/uws/SyncTransferManager.java @@ -0,0 +1,97 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2023. (c) 2023. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +************************************************************************ +*/ + +package org.opencadc.vault.uws; + +import ca.nrc.cadc.uws.server.JobExecutor; +import ca.nrc.cadc.uws.server.JobPersistence; +import ca.nrc.cadc.uws.server.JobUpdater; +import ca.nrc.cadc.uws.server.SyncJobExecutor; +import org.apache.log4j.Logger; +import org.opencadc.vospace.server.transfers.TransferRunner; + +/** + * + * @author pdowler + */ +public class SyncTransferManager extends VaultJobManager { + private static final Logger log = Logger.getLogger(SyncTransferManager.class); + + public SyncTransferManager() { + super(); + JobPersistence jp = createJobPersistence(); + JobUpdater ju = (JobUpdater) jp; + JobExecutor jobExec = new SyncJobExecutor(ju, TransferRunner.class); + super.setJobPersistence(jp); + super.setJobExecutor(jobExec); + + // TODO: would be nice to enable a feature like destroy-on-complete instead of timed destruction + super.setMaxDestruction(60000L); + super.setMaxExecDuration(6000L); + super.setMaxQuote(60000L); + } +} diff --git a/vault/src/main/java/org/opencadc/vault/uws/VaultJobManager.java b/vault/src/main/java/org/opencadc/vault/uws/VaultJobManager.java new file mode 100644 index 000000000..139390ac8 --- /dev/null +++ b/vault/src/main/java/org/opencadc/vault/uws/VaultJobManager.java @@ -0,0 +1,97 @@ +/* +************************************************************************ +******************* CANADIAN ASTRONOMY DATA CENTRE ******************* +************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** +* +* (c) 2023. (c) 2023. +* Government of Canada Gouvernement du Canada +* National Research Council Conseil national de recherches +* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 +* All rights reserved Tous droits réservés +* +* NRC disclaims any warranties, Le CNRC dénie toute garantie +* expressed, implied, or énoncée, implicite ou légale, +* statutory, of any kind with de quelque nature que ce +* respect to the software, soit, concernant le logiciel, +* including without limitation y compris sans restriction +* any warranty of merchantability toute garantie de valeur +* or fitness for a particular marchande ou de pertinence +* purpose. NRC shall not be pour un usage particulier. +* liable in any event for any Le CNRC ne pourra en aucun cas +* damages, whether direct or être tenu responsable de tout +* indirect, special or general, dommage, direct ou indirect, +* consequential or incidental, particulier ou général, +* arising from the use of the accessoire ou fortuit, résultant +* software. Neither the name de l'utilisation du logiciel. Ni +* of the National Research le nom du Conseil National de +* Council of Canada nor the Recherches du Canada ni les noms +* names of its contributors may de ses participants ne peuvent +* be used to endorse or promote être utilisés pour approuver ou +* products derived from this promouvoir les produits dérivés +* software without specific prior de ce logiciel sans autorisation +* written permission. préalable et particulière +* par écrit. +* +* This file is part of the Ce fichier fait partie du projet +* OpenCADC project. OpenCADC. +* +* OpenCADC is free software: OpenCADC est un logiciel libre ; +* you can redistribute it and/or vous pouvez le redistribuer ou le +* modify it under the terms of modifier suivant les termes de +* the GNU Affero General Public la “GNU Affero General Public +* License as published by the License” telle que publiée +* Free Software Foundation, par la Free Software Foundation +* either version 3 of the : soit la version 3 de cette +* License, or (at your option) licence, soit (à votre gré) +* any later version. toute version ultérieure. +* +* OpenCADC is distributed in the OpenCADC est distribué +* hope that it will be useful, dans l’espoir qu’il vous +* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE +* without even the implied GARANTIE : sans même la garantie +* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ +* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF +* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence +* General Public License for Générale Publique GNU Affero +* more details. pour plus de détails. +* +* You should have received Vous devriez avoir reçu une +* a copy of the GNU Affero copie de la Licence Générale +* General Public License along Publique GNU Affero avec +* with OpenCADC. If not, see OpenCADC ; si ce n’est +* . pas le cas, consultez : +* . +* +* $Revision: 4 $ +* +************************************************************************ +*/ + +package org.opencadc.vault.uws; + +import ca.nrc.cadc.auth.AuthenticationUtil; +import ca.nrc.cadc.uws.server.JobPersistence; +import ca.nrc.cadc.uws.server.SimpleJobManager; +import ca.nrc.cadc.uws.server.impl.PostgresJobPersistence; +import org.apache.log4j.Logger; + +/** + * + * @author adriand + */ +public class VaultJobManager extends SimpleJobManager { + private static final Logger log = Logger.getLogger(VaultJobManager.class); + + protected VaultJobManager() { + super(); + } + + protected final JobPersistence createJobPersistence() { + return new PostgresJobPersistence(AuthenticationUtil.getIdentityManager()); + } + + @Override + public void terminate() throws InterruptedException { + super.terminate(); + } +} diff --git a/vault/src/main/resources/VOSpacePlugins.properties b/vault/src/main/resources/VOSpacePlugins.properties new file mode 100644 index 000000000..7d3e00421 --- /dev/null +++ b/vault/src/main/resources/VOSpacePlugins.properties @@ -0,0 +1,12 @@ +# +# Configuration information for the vospace storage interface +# + +# Define the class that implements the interface TransferGenerator +# This class will be loaded at runtime to handle transfer requests. +# ca.nrc.cadc.vos.transfers.TransferGenerator = +ca.nrc.cadc.vos.server.transfers.TransferGenerator = ca.nrc.cadc.vospace.transfers.ADTransferGenerator + +# Define an optional class that implements the interface NodePersistence +# This class will be loaded at runtime to node persistence operations. +ca.nrc.cadc.vos.server.NodePersistence = ca.nrc.cadc.vospace.VOSpaceNodePersistence \ No newline at end of file diff --git a/vault/src/main/resources/Views.properties b/vault/src/main/resources/Views.properties new file mode 100644 index 000000000..cde5e1e1b --- /dev/null +++ b/vault/src/main/resources/Views.properties @@ -0,0 +1,72 @@ +############################################################################### +# +# Views.properties +# +# Defines the views available in this instance of VOSpace. These are loaded +# upon the loading of class ca.nrc.cadc.vos.ViewFactory into the VM. +# +# The names of the views to be defined must be listed under the 'views' key +# and separated by a space. For example: +# views = ... +# +# For each view name listed, three settings must be defined +# uri = The URI of the view +# alias = The alias (or shortcut) name of the view +# class = The implementing class of the view +# +# Additionally, two optional settings may be defined: +# accepts = true/false, true if this services accepts the view +# provides = true/false, true if this service provides the view +# +# For example: +# .uri = ivo://cadc.nrc.ca/vospace/core#dataview +# .alias = data +# .class = ca.nrc.cadc.vos.DataView +# .accepts = false +# .provides = true +# +# Notes: +# - View classes must extend class ca.nrc.cadc.vos.AbstractView +# - There cannot be a duplicate of any aliases or URIs in any of the +# view definitions +# +############################################################################### + +#views = data rss manifest cutout header +views = data + +# data view definition +data.uri = ivo://cadc.nrc.ca/vospace/view#data +data.alias = data +data.class = org.opencadc.vospace.server.DataView +data.accepts = false +data.provides = true + +# rss view definition +rss.uri = ivo://cadc.nrc.ca/vospace/view#rss +rss.alias = rss +rss.class = ca.nrc.cadc.vos.server.RssView +rss.accepts = false +rss.provides = true + +# manifest view definition +manifest.uri = ivo://cadc.nrc.ca/vospace/view#manifest +manifest.alias = manifest +manifest.class = ca.nrc.cadc.vos.server.ManifestView +manifest.accepts = false +manifest.provides = true + +# cutout view definition +cutout.uri = ivo://cadc.nrc.ca/vospace/view#cutout +cutout.alias = cutout +cutout.class = ca.nrc.cadc.vospace.CutoutView +cutout.accepts = false +cutout.provides = true + +# header view definition +header.uri = ivo://cadc.nrc.ca/vospace/view#header +header.alias = header +header.class = ca.nrc.cadc.vospace.HeaderView +header.accepts = false +header.provides = true + diff --git a/vault/src/main/webapp/META-INF/context.xml b/vault/src/main/webapp/META-INF/context.xml index cbc72f2e0..02f5f7090 100644 --- a/vault/src/main/webapp/META-INF/context.xml +++ b/vault/src/main/webapp/META-INF/context.xml @@ -15,4 +15,43 @@ removeAbandoned="false" testOnBorrow="true" validationQuery="select 123" /> + + + + + + + + diff --git a/vault/src/main/webapp/WEB-INF/web.xml b/vault/src/main/webapp/WEB-INF/web.xml index 2d519aa70..f57bad3bf 100644 --- a/vault/src/main/webapp/WEB-INF/web.xml +++ b/vault/src/main/webapp/WEB-INF/web.xml @@ -5,8 +5,10 @@ "http://java.sun.com/j2ee/dtds/web-app_2_3.dtd"> - vault + + index.html + logControl @@ -19,10 +21,13 @@ logLevelPackages org.opencadc.vault + org.opencadc.inventory + org.opencadc.vospace ca.nrc.cadc.db ca.nrc.cadc.rest ca.nrc.cadc.util ca.nrc.cadc.vosi + ca.nrc.cadc.uws @@ -32,61 +37,265 @@ 1 + + + AvailabilityServlet + ca.nrc.cadc.vosi.AvailabilityServlet + + ca.nrc.cadc.vosi.AvailabilityPlugin + org.opencadc.vault.ServiceAvailability + + 2 + + + + + CapabilitiesServlet + ca.nrc.cadc.rest.RestServlet + + init + ca.nrc.cadc.vosi.CapInitAction + + + head + ca.nrc.cadc.vosi.CapHeadAction + + + get + ca.nrc.cadc.vosi.CapGetAction + + + input + /capabilities.xml + + 2 + + NodesServlet + ca.nrc.cadc.rest.RestServlet + + augmentSubject + true + + + init + org.opencadc.vault.VaultInitAction + + + get + org.opencadc.vospace.server.actions.GetNodeAction + + + put + org.opencadc.vospace.server.actions.CreateNodeAction + + + post + org.opencadc.vospace.server.actions.UpdateNodeAction + + + delete + org.opencadc.vospace.server.actions.DeleteNodeAction + + 3 + + + + FilesServlet + ca.nrc.cadc.rest.RestServlet + + augmentSubject + true + + + get + org.opencadc.vault.files.GetAction + + + head + org.opencadc.vault.files.HeadAction + + 3 + + + + PubKeyServlet ca.nrc.cadc.rest.RestServlet - init - org.opencadc.vault.VaultInitAction + augmentSubject + false - 2 + + get + org.opencadc.inventory.transfer.GetKeyAction + + 3 - + - CapabilitiesServlet - ca.nrc.cadc.rest.RestServlet + SyncTransferServlet + ca.nrc.cadc.uws.server.JobServlet - init - ca.nrc.cadc.vosi.CapInitAction + get + ca.nrc.cadc.uws.web.SyncGetAction - head - ca.nrc.cadc.vosi.CapHeadAction + post + ca.nrc.cadc.uws.web.SyncPostAction - get - ca.nrc.cadc.vosi.CapGetAction + delete + ca.nrc.cadc.uws.web.DeleteAction + + + ca.nrc.cadc.uws.web.SyncPostAction.execOnPOST + true + + + ca.nrc.cadc.uws.server.JobManager + org.opencadc.vault.uws.SyncTransferManager - input - /capabilities.xml + ca.nrc.cadc.rest.InlineContentHandler + org.opencadc.vospace.server.transfers.InlineTransferHandler 3 - + - AvailabilityServlet - ca.nrc.cadc.vosi.AvailabilityServlet + AsyncTransferServlet + ca.nrc.cadc.uws.server.JobServlet + + get + ca.nrc.cadc.uws.web.GetAction + + + post + ca.nrc.cadc.uws.web.PostAction + + + delete + ca.nrc.cadc.uws.web.DeleteAction + - ca.nrc.cadc.vosi.AvailabilityPlugin - org.opencadc.vault.ServiceAvailability + ca.nrc.cadc.uws.server.JobManager + org.opencadc.vault.uws.AsyncTransferManager - availabilityProperties - vault-availability.properties + ca.nrc.cadc.rest.InlineContentHandler + org.opencadc.vospace.server.transfers.InlineTransferHandler - 4 + 3 + + + + RecursiveDeleteNodeServlet + ca.nrc.cadc.uws.server.JobServlet + + get + ca.nrc.cadc.uws.web.GetAction + + + post + ca.nrc.cadc.uws.web.PostAction + + + delete + ca.nrc.cadc.uws.web.DeleteAction + + + ca.nrc.cadc.uws.server.JobManager + org.opencadc.vault.uws.RecursiveDeleteNodeJobManager + + 3 + + + + + RecursiveNodePropsServlet + ca.nrc.cadc.uws.server.JobServlet + + get + ca.nrc.cadc.uws.web.GetAction + + + post + ca.nrc.cadc.uws.web.PostAction + + + delete + ca.nrc.cadc.uws.web.DeleteAction + + + ca.nrc.cadc.uws.server.JobManager + org.opencadc.vault.uws.RecursiveNodePropsJobManager + + + ca.nrc.cadc.rest.InlineContentHandler + org.opencadc.vospace.server.async.InlineNodeJobHandler + + 3 + + + + + + TransferDetailsServlet + org.opencadc.vospace.server.transfers.TransferDetailsServlet + + ca.nrc.cadc.uws.server.JobManager + org.opencadc.vault.uws.SyncTransferManager + + 3 + + + + PubKeyServlet + /pubkey + + NodesServlet /nodes/* + + + FilesServlet + /files/* + + + + RecursiveDeleteNodeServlet + /async-delete/* + + + + RecursiveNodePropsServlet + /async-setprops/* + + + + AsyncTransferServlet + /transfers/* + + + SyncTransferServlet + /synctrans/* + + + + TransferDetailsServlet + /xfer/* + + AvailabilityServlet @@ -105,4 +314,5 @@ /logControl + diff --git a/vault/src/main/webapp/capabilities.xml b/vault/src/main/webapp/capabilities.xml index 0a6205003..221e35e05 100644 --- a/vault/src/main/webapp/capabilities.xml +++ b/vault/src/main/webapp/capabilities.xml @@ -6,23 +6,114 @@ - https://replace.com/vault/capabilities + https://replace.me.com/vault/capabilities - https://replace.com/vault/availability - - + https://replace.me.com/vault/availability - https://replace.com/vault/logControl + https://replace.me.com/vault/logControl + + + + + + https://replace.me.com/vault/nodes + + + + + + + + + + https://replace.me.com/vault/files + + + + + + + + + + https://replace.me.com/vault/async-delete + + + + + + + + + + https://replace.me.com/vault/async-setprops + + + + + + + + + + https://replace.me.com/vault/transfers + + + + + + + + + + https://replace.me.com/vault/synctrans + + + + + + + + + + + diff --git a/vault/src/main/webapp/index.html b/vault/src/main/webapp/index.html new file mode 100644 index 000000000..0fb2c056b --- /dev/null +++ b/vault/src/main/webapp/index.html @@ -0,0 +1,165 @@ + + + + + vault API + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
 
+
+ + diff --git a/vault/src/main/webapp/service.yaml b/vault/src/main/webapp/service.yaml new file mode 100644 index 000000000..0ed9ed7a9 --- /dev/null +++ b/vault/src/main/webapp/service.yaml @@ -0,0 +1,518 @@ +swagger: '2.0' +info: + version: 2.1.0 + title: VOSpace + description: | + The CANFAR Vault web service, a VOSpace storage implementation. + + VOSpace is the IVOA (International Virtual Observatory Aliance) standard interface to distributed storage. This VOSpace web service is an access point for a distributed storage network. There are three main functions of the VOSpace service: + 1. Add, replace, or delete data objects in a tree data structure. + 2. Manipulate the metadata for the data objects. + 3. Obtain URIs through which the content of the data objects can be accessed. + + Please see the specification for full details: VOSpace 2.1 Specification + + Interaction with the synchronous and asynchronous data object transfer endpoints use the job management patterns defined in the IVOA Universal Worker Service (UWS) specification: UWS 1.1 Specification +schemes: + - https +basePath: /vault +paths: + /files/{filePath}: + + get: + description: | + Get the specified file. + tags: + - Files + responses: + '200': + description: Successful response + '400': + description: If the user requested a container node (directory). + '403': + description: If the user does not have permission. + '404': + description: If the file or part of the path to the file could not be found. + '500': + description: Internal error + '503': + description: Service busy + default: + description: Unexpected error + head: + description: | + Get the metadata of the specified file. + tags: + - Files + responses: + '200': + description: Successful response + '400': + description: If the user requested a container node (directory). + '403': + description: If the user does not have permission. + '404': + description: If the file or part of the path to the file could not be found. + '500': + description: Internal error + '503': + description: Service busy + default: + description: Unexpected error + + /nodes/{nodePath}: + put: + description: | + Create a new node at a specified location + tags: + - Nodes + consumes: + - text/xml + responses: + '200': + description: Successful response + '201': + description: Successful response + '403': + description: If the user does not have permission. + '404': + description: If the path to the node could not be found. + '409': + description: If the node already exits. + '500': + description: Internal error + '503': + description: Service busy + default: + description: Unexpeced error + schema: + $ref: '#/definitions/Error' + parameters: + - name: Node + in: body + description: The new Node + required: true + schema: + $ref: '#/definitions/Node' + delete: + description: | + Delete a node. To delete a non-empty ContainerNode, see async-delete below. + tags: + - Nodes + responses: + '200': + description: Successful response + '204': + description: Successful response + '403': + description: If the user does not have permission. + '404': + description: If the path to the node could not be found. + '500': + description: Internal error + '503': + description: Service busy + default: + description: Unexpeced error + schema: + $ref: '#/definitions/Error' + get: + description: | + Get the details for a specific Node. + tags: + - Nodes + responses: + '200': + description: Successful response + schema: + $ref: '#/definitions/Node' + '403': + description: If the user does not have permission. + '404': + description: If the path to the node could not be found. + '500': + description: Internal error + '503': + description: Service busy + default: + description: Unexpeced error + schema: + $ref: '#/definitions/Error' + parameters: + - name: detail + in: query + description: set the node detail level + required: false + type: string + enum: + - min + - max + - properties + - name: uri + in: query + description: for container nodes, the uri of a child node of the container on which to start the list of children. + required: false + type: string + - name: limit + in: query + description: for container nodes, the number of children to return. + required: false + type: string + - name: view + in: query + description: for data nodes, a specific view + required: false + type: string + enum: + - data + post: + description: | + Set the property values for a specific Node + tags: + - Nodes + consumes: + - text/xml + responses: + '200': + description: Successful response + '201': + description: Successful response + '403': + description: If the user does not have permission. + '404': + description: If the path to the node could not be found. + '500': + description: Internal error + '503': + description: Service busy + default: + description: Unexpeced error + schema: + $ref: '#/definitions/Error' + parameters: + - name: Node + in: body + description: The updated Node + required: true + schema: + $ref: '#/definitions/Node' + parameters: + - name: nodePath + in: path + description: The path for the node + required: true + type: string + /transfers: + post: + description: | + Post a transfer request document to the asynchronous VOSpace UWS Job endpoint. This is the first step in performing any transfer for data objects: + + - uploading a data object + - downloading a data object + - moving a data object + - copying a data object + tags: + - Transfering data + consumes: + - text/xml + responses: + '200': + description: Successful response + '201': + description: Successful response + '403': + description: If the user does not have permission. + '404': + description: If the source node could not be found. + '409': + description: If the destination node already exists. + '500': + description: Internal error + '503': + description: Service busy + default: + description: Unexpeced error + parameters: + - name: Transfer + in: body + description: The transfer negotiation document + required: true + schema: + $ref: '#/definitions/Transfer' + /synctrans: + post: + description: | + Post a via query parameters a transfer request to the synchronous VOSpace UWS Job endpoint. This is the first step in performing any transfer for data objects: + + - uploading a data object + - downloading a data object + - moving a data object + - copying a data object + tags: + - Transfering data + consumes: + - text/xml + responses: + '200': + description: Successful response + '201': + description: Successful response + '403': + description: If the user does not have permission. + '404': + description: If the source node could not be found. + '409': + description: If the destination node already exists. + '500': + description: Internal error + '503': + description: Service busy + default: + description: Unexpeced error + parameters: + - name: target + in: query + description: The target parameter + required: true + type: string + format: uri + - name: direction + in: query + description: The direction of the transfer + required: true + type: string + - name: protocol + in: query + description: The protocol to use for the transfer + required: true + type: string + format: uri + /async-delete: + post: + description: | + Post a recursive delete command. This is an IVOA UWS end point. + tags: + - Recursive operations (async) + consumes: + - None + responses: + '200': + description: Successful response + '201': + description: Successful response + '403': + description: If the user does not have permission. + '404': + description: If the source node could not be found. + '409': + description: If the destination node already exists. + '500': + description: Internal error + '503': + description: Service busy + default: + description: Unexpeced error + schema: + $ref: '#/definitions/Error' + parameters: + - name: target + in: query + description: The base node (typically a container) to recursively delete + required: true + type: string + format: uri + /async-setprops: + post: + description: | + Post a recursive set properties command. This is an IVOA UWS end point. + tags: + - Recursive operations (async) + consumes: + - None + responses: + '200': + description: Successful response + '201': + description: Successful response + '403': + description: If the user does not have permission. + '404': + description: If the source node could not be found. + '409': + description: If the destination node already exists. + '500': + description: Internal error + '503': + description: Service busy + default: + description: Unexpeced error + schema: + $ref: '#/definitions/Error' + parameters: + - name: nodeURI + in: query + description: The base node (typically a container) to recursively set properties to + required: true + type: string + format: uri + /availability: + get: + tags: + - Support Interfaces + summary: VOSI Availability + description: Indicates whether the service is operable and shows the reliability of the service for extended and scheduled requests. If the query parameter 'detail=min' is used, a light weight heart beat test will be performed. The heart beat test returns status 200 if the service is available. + parameters: + - name: detail + in: query + description: specifies heart beat to be used to check for availability of this service, the value 'min' must be used, otherwise the full availability test will be performed + required: false + type: string + responses: + '200': + description: A VOSI availability document in XML. + schema: + $ref: '#/definitions/availability' + default: + description: Unexpected error + schema: + $ref: '#/definitions/Error' + /capabilities: + get: + summary: VOSI Capabilities + tags: + - Support Interfaces + description: | + Provides the service metadata in the form of a list of Capability descriptions. Each of these descriptions is an + XML element that: +
    +
  • states that the service provides a particular, IVOA-standard function;
  • +
  • lists the interfaces for invoking that function;
  • +
  • records any details of the implementation of the function that are not defined as default or constant in the standard for that function.
  • +
+ responses: + '200': + description: A VOSI Capabilities document in XML. + schema: + $ref: '#/definitions/capabilities' + '500': + description: Internal server error + '503': + description: Service too busy + default: + description: Unexpected error + schema: + $ref: '#/definitions/Error' +definitions: + Property: + type: object + required: + - uri + description: The property identifier + properties: + uri: + type: string + format: uri + View: + type: object + required: + - uri + description: The view identifier + properties: + uri: + type: string + format: uri + Protocol: + type: object + required: + - uri + description: The protocol identifier + properties: + uri: + type: string + format: uri + Node: + type: object + required: + - uri + description: The node identifier + properties: + uri: + type: string + format: uri + Transfer: + type: object + required: + - target + - direction + - protocol + description: The transfer negotiation document + properties: + target: + type: string + format: uri + direction: + type: string + protocol: + type: string + format: uri + availability: + type: object + xml: + name: availability + namespace: http://www.ivoa.net/xml/VOSIAvailability/v1.0 + prefix: vosi + properties: + available: + type: boolean + xml: + attribute: true + prefix: vosi + note: + type: string + xml: + attribute: true + prefix: vosi + capabilities: + type: array + items: + $ref: '#/definitions/capability' + xml: + namespace: http://www.ivoa.net/xml/VOSICapabilities/v1.0 + prefix: vosi + wrapped: true + capability: + type: object + properties: + standardID: + type: string + format: uri + xml: + attribute: true + interface: + type: object + properties: + accessURL: + type: string + properties: + use: + type: string + xml: + attribute: true + securityMethod: + type: string + properties: + standardID: + type: string + format: uri + xml: + attribute: true + Error: + required: + - code + - message + properties: + code: + type: integer + format: int32 + message: + type: string