From 631a9b7337f499ceadd226c99e15bd2249ecf8a3 Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Fri, 26 Jan 2024 12:35:05 +0100 Subject: [PATCH 01/92] added DataInfoBase and child classes --- .../ids/v3/model/DataFileInfo.java | 57 +++++++++++ .../ids/v3/model/DataInfoBase.java | 50 ++++++++++ .../icatproject/ids/v3/model/DataSetInfo.java | 95 +++++++++++++++++++ 3 files changed, 202 insertions(+) create mode 100644 src/main/java/org/icatproject/ids/v3/model/DataFileInfo.java create mode 100644 src/main/java/org/icatproject/ids/v3/model/DataInfoBase.java create mode 100644 src/main/java/org/icatproject/ids/v3/model/DataSetInfo.java diff --git a/src/main/java/org/icatproject/ids/v3/model/DataFileInfo.java b/src/main/java/org/icatproject/ids/v3/model/DataFileInfo.java new file mode 100644 index 00000000..73591bc4 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/model/DataFileInfo.java @@ -0,0 +1,57 @@ +package org.icatproject.ids.v3.model; +import org.icatproject.ids.plugin.DfInfo; + +/** + * Contains information about a Datafile. Replaces DsInfo in v3 + * May should implement DfInfo interface + */ +public class DataFileInfo extends DataInfoBase implements Comparable, DfInfo { + + protected String createId; + protected String modId; + protected Long datasId; + + public DataFileInfo(Long id, String name, String location, String createId, String modId, Long datasId) { + super(id, name, location); + + this.createId = createId; + this.modId = modId; + this.datasId = datasId; + } + + public String getCreateId() { + return this.createId; + } + + public String getModId() { + return this.modId; + } + + public Long getDsId() { + return this.datasId; + } + + @Override + public String toString() { + return this.location; + } + + @Override + public int compareTo(DataFileInfo o) { + if (this.id > o.getId()) { + return 1; + } + if (this.id < o.getId()) { + return -1; + } + return 0; + } + + // implementing DfInfo + @Override + public Long getDfId() { return this.getId(); } + @Override + public String getDfLocation() { return this.getLocation(); } + @Override + public String getDfName() { return this.getName(); } +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/model/DataInfoBase.java b/src/main/java/org/icatproject/ids/v3/model/DataInfoBase.java new file mode 100644 index 00000000..4f458b00 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/model/DataInfoBase.java @@ -0,0 +1,50 @@ +package org.icatproject.ids.v3.model; + + +/** + * A Base class for Data objct types. like Datasets or Datafiles + */ +public abstract class DataInfoBase { + + protected Long id; + protected String name; + protected String location; + + protected DataInfoBase(Long id, String name, String location){ + this.name = name; + this.id = id; + this.location = location; + } + + @Override + public abstract String toString(); + + public Long getId() { + return id; + } + + public String getName() { + return name; + } + + public String getLocation() { + return location; + } + + @Override + public int hashCode() { + return (int) (this.id ^ (this.id >>> 32)); + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (obj == null || obj.getClass() != this.getClass()) { + return false; + } + return this.id == ((DataInfoBase) obj).getId(); + } + +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/model/DataSetInfo.java b/src/main/java/org/icatproject/ids/v3/model/DataSetInfo.java new file mode 100644 index 00000000..ca95751a --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/model/DataSetInfo.java @@ -0,0 +1,95 @@ +package org.icatproject.ids.v3.model; + +import org.icatproject.Dataset; +import org.icatproject.Facility; +import org.icatproject.Investigation; +import org.icatproject.ids.exceptions.InsufficientPrivilegesException; +import org.icatproject.ids.plugin.DsInfo; + +/** + * Contains information about a Dataset. Replaces DsInfo in v3. + * May should implement DsInfo interface + */ +public class DataSetInfo extends DataInfoBase implements DsInfo { + + protected Long facilityId; + protected String facilityName; + protected Long investigationId; + protected String investigationName; + protected String visitId; + + + public DataSetInfo(Dataset ds) throws InsufficientPrivilegesException { + super(ds.getId(), ds.getName(), ds.getLocation()); + + Investigation investigation = ds.getInvestigation(); + if (investigation == null) { + throw new InsufficientPrivilegesException( + "Probably not able to read Investigation for dataset id " + ds.getId()); + } + Facility facility = investigation.getFacility(); + if (facility == null) { + throw new InsufficientPrivilegesException( + "Probably not able to read Facility for investigation id " + + investigation.getId()); + } + + this.investigationId = investigation.getId(); + this.investigationName = investigation.getName(); + this.visitId = investigation.getVisitId(); + this.facilityId = facility.getId(); + this.facilityName = facility.getName(); + } + + + public DataSetInfo(Long id, String name, String location, Long facilityId, String facilityName, Long investigationId, String investigationName, String visitId) { + super(id, name, location); + + this.facilityId = facilityId; + this.facilityName = facilityName; + this.investigationId = investigationId; + this.investigationName = investigationName; + this.visitId = visitId; + } + + @Override + public String toString() { + return this.investigationId + "/" + this.id + " (" + this.facilityName + "/" + this.investigationName + "/" + this.visitId + "/" + + this.name + ")"; + } + + public Long getFacilityId() { + return facilityId; + } + + public String getFacilityName() { + return facilityName; + } + + public Long getInvestigationId() { + return investigationId; + } + + public String getInvestigationName() { + return investigationName; + } + + public String getVisitId() { + return visitId; + } + + + // implementing DsInfo + + @Override + public Long getDsId() { return this.getId(); } + @Override + public String getDsName() { return this.getName(); } + @Override + public String getDsLocation() { return this.getLocation(); } + @Override + public Long getInvId() { return this.getInvestigationId(); } + @Override + public String getInvName() { return this.getInvestigationName(); } + +} \ No newline at end of file From 1797fcd42de307ef983d8d37d34779e8e6d524eb Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Tue, 6 Feb 2024 13:34:54 +0100 Subject: [PATCH 02/92] WIP: using new DataInfoBase #1 --- .../org/icatproject/ids/DataSelection.java | 22 +- .../icatproject/ids/FiniteStateMachine.java | 80 +++--- .../java/org/icatproject/ids/IdsBean.java | 267 +++++++++--------- .../icatproject/ids/IdsBeanForDataFile.java | 5 + .../icatproject/ids/IdsBeanForDataSet.java | 5 + .../java/org/icatproject/ids/LockManager.java | 7 +- .../java/org/icatproject/ids/Prepared.java | 7 +- src/main/java/org/icatproject/ids/Tidier.java | 6 +- .../ids/v3/model/DataFileInfo.java | 4 +- .../ids/v3/model/DataInfoBase.java | 4 +- .../icatproject/ids/v3/model/DataSetInfo.java | 2 +- .../icatproject/ids/PreparePackingTest.java | 23 +- 12 files changed, 228 insertions(+), 204 deletions(-) create mode 100644 src/main/java/org/icatproject/ids/IdsBeanForDataFile.java create mode 100644 src/main/java/org/icatproject/ids/IdsBeanForDataSet.java diff --git a/src/main/java/org/icatproject/ids/DataSelection.java b/src/main/java/org/icatproject/ids/DataSelection.java index c0ed8abc..809ce605 100644 --- a/src/main/java/org/icatproject/ids/DataSelection.java +++ b/src/main/java/org/icatproject/ids/DataSelection.java @@ -9,6 +9,8 @@ import java.util.Map; import java.util.Set; +import javax.xml.crypto.Data; + import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonValue; @@ -28,6 +30,8 @@ import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.exceptions.NotFoundException; import org.icatproject.ids.plugin.DsInfo; +import org.icatproject.ids.v3.model.DataFileInfo; +import org.icatproject.ids.v3.model.DataSetInfo; /** * Class to convert 3 comma separated strings containing Investigation, @@ -48,8 +52,8 @@ public class DataSelection { private List invids; private List dsids; private List dfids; - private Map dsInfos; - private Set dfInfos; + private Map dsInfos; + private Set dfInfos; private Set emptyDatasets; private boolean dsWanted; private boolean dfWanted; @@ -132,12 +136,12 @@ private void resolveDatasetIds() if (dss.size() == 1) { Dataset ds = (Dataset) dss.get(0); long dsid = ds.getId(); - dsInfos.put(dsid, new DsInfoImpl(ds)); + dsInfos.put(dsid, new DataSetInfo(ds)); if (dfWanted) { Datafile df = (Datafile) icat.get(userSessionId, "Datafile", dfid); String location = IdsBean.getLocation(dfid, df.getLocation()); dfInfos.add( - new DfInfoImpl(dfid, df.getName(), location, df.getCreateId(), df.getModId(), dsid)); + new DataFileInfo(dfid, df.getName(), location, df.getCreateId(), df.getModId(), dsid)); } } else { // Next line may reveal a permissions problem @@ -148,7 +152,7 @@ private void resolveDatasetIds() for (Long dsid : dsids) { Dataset ds = (Dataset) icat.get(userSessionId, "Dataset ds INCLUDE ds.investigation.facility", dsid); - dsInfos.put(dsid, new DsInfoImpl(ds)); + dsInfos.put(dsid, new DataSetInfo(ds)); // dataset access for the user has been checked so the REST session for the // reader account can be used if the IDS setting to allow this is enabled String query = "SELECT min(df.id), max(df.id), count(df.id) FROM Datafile df WHERE df.dataset.id = " @@ -228,7 +232,7 @@ private void manyDss(Long invid, JsonArray result) for (JsonValue tupV : result) { JsonArray tup = (JsonArray) tupV; long dsid = tup.getJsonNumber(0).longValueExact(); - dsInfos.put(dsid, new DsInfoImpl(dsid, tup.getString(1), tup.getString(2, null), invid, invName, + dsInfos.put(dsid, new DataSetInfo(dsid, tup.getString(1), tup.getString(2, null), invid, invName, visitId, facilityId, facilityName)); query = "SELECT min(df.id), max(df.id), count(df.id) FROM Datafile df WHERE df.dataset.id = " @@ -276,7 +280,7 @@ private void manyDfs(long dsid, JsonArray result) long dfid = tup.getJsonNumber(0).longValueExact(); String location = IdsBean.getLocation(dfid, tup.getString(2, null)); dfInfos.add( - new DfInfoImpl(dfid, tup.getString(1), location, tup.getString(3), tup.getString(4), dsid)); + new DataFileInfo(dfid, tup.getString(1), location, tup.getString(3), tup.getString(4), dsid)); } } else { long half = (min + max) / 2; @@ -294,11 +298,11 @@ private void manyDfs(long dsid, JsonArray result) } } - public Map getDsInfo() { + public Map getDsInfo() { return dsInfos; } - public Set getDfInfo() { + public Set getDfInfo() { return dfInfos; } diff --git a/src/main/java/org/icatproject/ids/FiniteStateMachine.java b/src/main/java/org/icatproject/ids/FiniteStateMachine.java index 84894e33..2b631e38 100644 --- a/src/main/java/org/icatproject/ids/FiniteStateMachine.java +++ b/src/main/java/org/icatproject/ids/FiniteStateMachine.java @@ -45,6 +45,8 @@ import org.icatproject.ids.thread.DsArchiver; import org.icatproject.ids.thread.DsRestorer; import org.icatproject.ids.thread.DsWriter; +import org.icatproject.ids.v3.model.DataFileInfo; +import org.icatproject.ids.v3.model.DataSetInfo; @Singleton @DependsOn({"LockManager"}) @@ -68,16 +70,16 @@ public void run() { Map restoreLocks = new HashMap<>(); Map deleteLocks = new HashMap<>(); - Map newOps = new HashMap<>(); - final Iterator> it = deferredDfOpsQueue.entrySet().iterator(); + Map newOps = new HashMap<>(); + final Iterator> it = deferredDfOpsQueue.entrySet().iterator(); while (it.hasNext()) { - Entry opEntry = it.next(); - DfInfoImpl dfInfo = opEntry.getKey(); + Entry opEntry = it.next(); + DataFileInfo dfInfo = opEntry.getKey(); Long dsId = dfInfo.getDsId(); - DsInfo dsInfo; + DataSetInfo dsInfo; try { Dataset ds = (Dataset) reader.get("Dataset ds INCLUDE ds.investigation.facility", dsId); - dsInfo = new DsInfoImpl(ds); + dsInfo = new DataSetInfo(ds); } catch (Exception e) { logger.error("Could not get dsInfo {}: {}.", dsId, e.getMessage()); continue; @@ -210,11 +212,11 @@ public void run() { try { synchronized (deferredDsOpsQueue) { final long now = System.currentTimeMillis(); - Map newOps = new HashMap<>(); - final Iterator> it = deferredDsOpsQueue.entrySet().iterator(); + Map newOps = new HashMap<>(); + final Iterator> it = deferredDsOpsQueue.entrySet().iterator(); while (it.hasNext()) { - final Entry opEntry = it.next(); - final DsInfo dsInfo = opEntry.getKey(); + final Entry opEntry = it.next(); + final DataSetInfo dsInfo = opEntry.getKey(); if (!dsChanging.containsKey(dsInfo)) { final RequestedState state = opEntry.getValue(); if (state == RequestedState.WRITE_REQUESTED @@ -242,7 +244,7 @@ public void run() { try { Lock lock = lockManager.lock(dsInfo, LockType.EXCLUSIVE); it.remove(); - long dsId = dsInfo.getDsId(); + long dsId = dsInfo.getId(); logger.debug("Will process " + dsInfo + " with " + state); dsChanging.put(dsInfo, state); final Thread w = new Thread( @@ -293,13 +295,13 @@ public enum RequestedState { */ private long processOpsDelayMillis; - private Map deferredDfOpsQueue = new HashMap<>(); + private Map deferredDfOpsQueue = new HashMap<>(); - private Map deferredDsOpsQueue = new HashMap<>(); + private Map deferredDsOpsQueue = new HashMap<>(); - private Map dfChanging = new HashMap<>(); + private Map dfChanging = new HashMap<>(); - private Map dsChanging = new HashMap<>(); + private Map dsChanging = new HashMap<>(); private Path markerDir; private long processQueueIntervalMillis; @@ -328,16 +330,16 @@ private void exit() { } /** - * Find any DfInfo which may be offline + * Find any DataFileInfo which may be offline */ - public Set getDfMaybeOffline() { - Map union; + public Set getDfMaybeOffline() { + Map union; synchronized (deferredDfOpsQueue) { union = new HashMap<>(dfChanging); union.putAll(deferredDfOpsQueue); } - Set result = new HashSet<>(); - for (Entry entry : union.entrySet()) { + Set result = new HashSet<>(); + for (Entry entry : union.entrySet()) { if (entry.getValue() != RequestedState.WRITE_REQUESTED) { result.add(entry.getKey()); } @@ -346,16 +348,16 @@ public Set getDfMaybeOffline() { } /** - * Find any DfInfo which are being restored or are queued for restoration + * Find any DataFileInfo which are being restored or are queued for restoration */ - public Set getDfRestoring() { - Map union; + public Set getDfRestoring() { + Map union; synchronized (deferredDfOpsQueue) { union = new HashMap<>(dfChanging); union.putAll(deferredDfOpsQueue); } - Set result = new HashSet<>(); - for (Entry entry : union.entrySet()) { + Set result = new HashSet<>(); + for (Entry entry : union.entrySet()) { if (entry.getValue() == RequestedState.RESTORE_REQUESTED) { result.add(entry.getKey()); } @@ -364,16 +366,16 @@ public Set getDfRestoring() { } /** - * Find any DsInfo which may be offline + * Find any DataSetInfo which may be offline */ - public Set getDsMaybeOffline() { - Map union; + public Set getDsMaybeOffline() { + Map union; synchronized (deferredDsOpsQueue) { union = new HashMap<>(dsChanging); union.putAll(deferredDsOpsQueue); } - Set result = new HashSet<>(); - for (Entry entry : union.entrySet()) { + Set result = new HashSet<>(); + for (Entry entry : union.entrySet()) { if (entry.getValue() != RequestedState.WRITE_REQUESTED) { result.add(entry.getKey()); } @@ -384,14 +386,14 @@ public Set getDsMaybeOffline() { /** * Find any DsInfo which are being restored or are queued for restoration */ - public Set getDsRestoring() { - Map union; + public Set getDsRestoring() { + Map union; synchronized (deferredDsOpsQueue) { union = new HashMap<>(dsChanging); union.putAll(deferredDsOpsQueue); } - Set result = new HashSet<>(); - for (Entry entry : union.entrySet()) { + Set result = new HashSet<>(); + for (Entry entry : union.entrySet()) { if (entry.getValue() == RequestedState.RESTORE_REQUESTED) { result.add(entry.getKey()); } @@ -473,7 +475,7 @@ private void init() { } } - public void queue(DfInfoImpl dfInfo, DeferredOp deferredOp) throws InternalException { + public void queue(DataFileInfo dfInfo, DeferredOp deferredOp) throws InternalException { logger.info("Requesting " + deferredOp + " of datafile " + dfInfo); synchronized (deferredDfOpsQueue) { @@ -488,7 +490,7 @@ public void queue(DfInfoImpl dfInfo, DeferredOp deferredOp) throws InternalExcep if (state == null) { if (deferredOp == DeferredOp.WRITE) { try { - Path marker = markerDir.resolve(Long.toString(dfInfo.getDfId())); + Path marker = markerDir.resolve(Long.toString(dfInfo.getId())); Files.createFile(marker); logger.debug("Created marker " + marker); } catch (FileAlreadyExistsException e) { @@ -534,7 +536,7 @@ public void queue(DfInfoImpl dfInfo, DeferredOp deferredOp) throws InternalExcep } } - public void queue(DsInfo dsInfo, DeferredOp deferredOp) throws InternalException { + public void queue(DataSetInfo dsInfo, DeferredOp deferredOp) throws InternalException { logger.info("Requesting " + deferredOp + " of dataset " + dsInfo); synchronized (deferredDsOpsQueue) { @@ -590,9 +592,9 @@ public void removeFromChanging(DsInfo dsInfo) { } } - private void requestWrite(DsInfo dsInfo) throws InternalException { + private void requestWrite(DataSetInfo dsInfo) throws InternalException { try { - Path marker = markerDir.resolve(Long.toString(dsInfo.getDsId())); + Path marker = markerDir.resolve(Long.toString(dsInfo.getId())); Files.createFile(marker); logger.debug("Created marker " + marker); } catch (FileAlreadyExistsException e) { diff --git a/src/main/java/org/icatproject/ids/IdsBean.java b/src/main/java/org/icatproject/ids/IdsBean.java index 529c79aa..c86fb556 100644 --- a/src/main/java/org/icatproject/ids/IdsBean.java +++ b/src/main/java/org/icatproject/ids/IdsBean.java @@ -79,6 +79,9 @@ import org.icatproject.ids.plugin.DsInfo; import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.plugin.ZipMapperInterface; +import org.icatproject.ids.v3.model.DataFileInfo; +import org.icatproject.ids.v3.model.DataInfoBase; +import org.icatproject.ids.v3.model.DataSetInfo; import org.icatproject.utils.IcatSecurity; import org.icatproject.utils.ShellCommand; @@ -87,18 +90,18 @@ public class IdsBean { public class RunPrepDsCheck implements Callable { - private Collection toCheck; + private Collection toCheck; private Set emptyDatasets; - public RunPrepDsCheck(Collection toCheck, Set emptyDatasets) { + public RunPrepDsCheck(Collection toCheck, Set emptyDatasets) { this.toCheck = toCheck; this.emptyDatasets = emptyDatasets; } @Override public Void call() throws Exception { - for (DsInfo dsInfo : toCheck) { - fsm.checkFailure(dsInfo.getDsId()); + for (DataSetInfo dsInfo : toCheck) { + fsm.checkFailure(dsInfo.getId()); restoreIfOffline(dsInfo, emptyDatasets); } return null; @@ -108,16 +111,16 @@ public Void call() throws Exception { public class RunPrepDfCheck implements Callable { - private SortedSet toCheck; + private SortedSet toCheck; - public RunPrepDfCheck(SortedSet toCheck) { + public RunPrepDfCheck(SortedSet toCheck) { this.toCheck = toCheck; } @Override public Void call() throws Exception { - for (DfInfoImpl dfInfo : toCheck) { - fsm.checkFailure(dfInfo.getDfId()); + for (DataFileInfo dfInfo : toCheck) { + fsm.checkFailure(dfInfo.getId()); restoreIfOffline(dfInfo); } return null; @@ -131,15 +134,15 @@ enum CallType { public class RestoreDfTask implements Callable { - private Set dfInfos; + private Set dfInfos; - public RestoreDfTask(Set dfInfos) { + public RestoreDfTask(Set dfInfos) { this.dfInfos = dfInfos; } @Override public Void call() throws Exception { - for (DfInfoImpl dfInfo : dfInfos) { + for (DataFileInfo dfInfo : dfInfos) { restoreIfOffline(dfInfo); } return null; @@ -148,17 +151,17 @@ public Void call() throws Exception { } public class RestoreDsTask implements Callable { - private Collection dsInfos; + private Collection dsInfos; private Set emptyDs; - public RestoreDsTask(Collection dsInfos, Set emptyDs) { + public RestoreDsTask(Collection dsInfos, Set emptyDs) { this.dsInfos = dsInfos; this.emptyDs = emptyDs; } @Override public Void call() throws Exception { - for (DsInfo dsInfo : dsInfos) { + for (DataSetInfo dsInfo : dsInfos) { restoreIfOffline(dsInfo, emptyDs); } return null; @@ -169,15 +172,15 @@ private class SO implements StreamingOutput { private long offset; private boolean zip; - private Map dsInfos; + private Map dsInfos; private Lock lock; private boolean compress; - private Set dfInfos; + private Set dfInfos; private String ip; private long start; private Long transferId; - SO(Map dsInfos, Set dfInfos, long offset, boolean zip, boolean compress, + SO(Map dsInfos, Set dfInfos, long offset, boolean zip, boolean compress, Lock lock, Long transferId, String ip, long start) { this.offset = offset; this.zip = zip; @@ -204,7 +207,7 @@ public void write(OutputStream output) throws IOException { zos.setLevel(0); // Otherwise use default compression } - for (DfInfoImpl dfInfo : dfInfos) { + for (DataFileInfo dfInfo : dfInfos) { logger.debug("Adding " + dfInfo + " to zip"); transfer = dfInfo; DsInfo dsInfo = dsInfos.get(dfInfo.getDsId()); @@ -227,7 +230,7 @@ public void write(OutputStream output) throws IOException { } zos.close(); } else { - DfInfoImpl dfInfo = dfInfos.iterator().next(); + DataFileInfo dfInfo = dfInfos.iterator().next(); transfer = dfInfo; InputStream stream = mainStorage.get(dfInfo.getDfLocation(), dfInfo.getCreateId(), dfInfo.getModId()); @@ -357,17 +360,17 @@ static String getLocationFromDigest(long id, String locationWithHash, String key } } - static void pack(OutputStream stream, boolean zip, boolean compress, Map dsInfos, - Set dfInfos, Set emptyDatasets) { + static void pack(OutputStream stream, boolean zip, boolean compress, Map dsInfos, + Set dfInfos, Set emptyDatasets) { JsonGenerator gen = Json.createGenerator(stream); gen.writeStartObject(); gen.write("zip", zip); gen.write("compress", compress); gen.writeStartArray("dsInfo"); - for (DsInfo dsInfo : dsInfos.values()) { + for (DataSetInfo dsInfo : dsInfos.values()) { logger.debug("dsInfo " + dsInfo); - gen.writeStartObject().write("dsId", dsInfo.getDsId()) + gen.writeStartObject().write("dsId", dsInfo.getId()) .write("dsName", dsInfo.getDsName()).write("facilityId", dsInfo.getFacilityId()) .write("facilityName", dsInfo.getFacilityName()).write("invId", dsInfo.getInvId()) @@ -382,9 +385,9 @@ static void pack(OutputStream stream, boolean zip, boolean compress, Map dsInfos = new TreeMap<>(); - SortedSet dfInfos = new TreeSet<>(); + SortedMap dsInfos = new TreeMap<>(); + SortedSet dfInfos = new TreeSet<>(); Set emptyDatasets = new HashSet<>(); for (JsonValue itemV : pd.getJsonArray("dfInfo")) { JsonObject item = (JsonObject) itemV; String dfLocation = item.isNull("dfLocation") ? null : item.getString("dfLocation"); - dfInfos.add(new DfInfoImpl(item.getJsonNumber("dfId").longValueExact(), item.getString("dfName"), + dfInfos.add(new DataFileInfo(item.getJsonNumber("dfId").longValueExact(), item.getString("dfName"), dfLocation, item.getString("createId"), item.getString("modId"), item.getJsonNumber("dsId").longValueExact())); @@ -433,7 +436,7 @@ static Prepared unpack(InputStream stream) throws InternalException { JsonObject item = (JsonObject) itemV; long dsId = item.getJsonNumber("dsId").longValueExact(); String dsLocation = item.isNull("dsLocation") ? null : item.getString("dsLocation"); - dsInfos.put(dsId, new DsInfoImpl(dsId, item.getString("dsName"), dsLocation, + dsInfos.put(dsId, new DataSetInfo(dsId, item.getString("dsName"), dsLocation, item.getJsonNumber("invId").longValueExact(), item.getString("invName"), item.getString("visitId"), item.getJsonNumber("facilityId").longValueExact(), item.getString("facilityName"))); } @@ -502,7 +505,7 @@ public static void validateUUID(String thing, String id) throws BadRequestExcept class PreparedStatus { public ReentrantLock lock = new ReentrantLock(); - public DfInfoImpl fromDfElement; + public DataFileInfo fromDfElement; public Future future; public Long fromDsElement; } @@ -550,15 +553,15 @@ public void archive(String sessionId, String investigationIds, String datasetIds if (storageUnit == StorageUnit.DATASET) { DataSelection dataSelection = new DataSelection(propertyHandler, reader, sessionId, investigationIds, datasetIds, datafileIds, Returns.DATASETS); - Map dsInfos = dataSelection.getDsInfo(); - for (DsInfo dsInfo : dsInfos.values()) { + Map dsInfos = dataSelection.getDsInfo(); + for (DataSetInfo dsInfo : dsInfos.values()) { fsm.queue(dsInfo, DeferredOp.ARCHIVE); } } else if (storageUnit == StorageUnit.DATAFILE) { DataSelection dataSelection = new DataSelection(propertyHandler, reader, sessionId, investigationIds, datasetIds, datafileIds, Returns.DATAFILES); - Set dfInfos = dataSelection.getDfInfo(); - for (DfInfoImpl dfInfo : dfInfos) { + Set dfInfos = dataSelection.getDfInfo(); + for (DataFileInfo dfInfo : dfInfos) { fsm.queue(dfInfo, DeferredOp.ARCHIVE); } } else { @@ -581,16 +584,16 @@ public void archive(String sessionId, String investigationIds, String datasetIds } } - private void checkDatafilesPresent(Set dfInfos) + private void checkDatafilesPresent(Set dfInfos) throws NotFoundException, InternalException { /* Check that datafiles have not been deleted before locking */ int n = 0; StringBuffer sb = new StringBuffer("SELECT COUNT(df) from Datafile df WHERE (df.id in ("); - for (DfInfo dfInfo : dfInfos) { + for (DataFileInfo dfInfo : dfInfos) { if (n != 0) { sb.append(','); } - sb.append(dfInfo.getDfId()); + sb.append(dfInfo.getId()); if (++n == maxIdsInQuery) { try { if (((Long) reader.search(sb.append("))").toString()).get(0)).intValue() != n) { @@ -615,12 +618,12 @@ private void checkDatafilesPresent(Set dfInfos) } - private void checkOnline(Collection dsInfos, Set emptyDatasets, - Set dfInfos) + private void checkOnline(Collection dsInfos, Set emptyDatasets, + Set dfInfos) throws InternalException, DataNotOnlineException { if (storageUnit == StorageUnit.DATASET) { boolean maybeOffline = false; - for (DsInfo dsInfo : dsInfos) { + for (DataSetInfo dsInfo : dsInfos) { if (restoreIfOffline(dsInfo, emptyDatasets)) { maybeOffline = true; } @@ -631,7 +634,7 @@ private void checkOnline(Collection dsInfos, Set emptyDatasets, } } else if (storageUnit == StorageUnit.DATAFILE) { boolean maybeOffline = false; - for (DfInfoImpl dfInfo : dfInfos) { + for (DataFileInfo dfInfo : dfInfos) { if (restoreIfOffline(dfInfo)) { maybeOffline = true; } @@ -662,8 +665,8 @@ public void delete(String sessionId, String investigationIds, String datasetIds, investigationIds, datasetIds, datafileIds, Returns.DATASETS_AND_DATAFILES); // Do it - Collection dsInfos = dataSelection.getDsInfo().values(); - Set dfInfos = dataSelection.getDfInfo(); + Collection dsInfos = dataSelection.getDsInfo().values(); + Set dfInfos = dataSelection.getDfInfo(); try (Lock lock = lockManager.lock(dsInfos, LockType.EXCLUSIVE)) { if (storageUnit == StorageUnit.DATASET) { @@ -672,9 +675,9 @@ public void delete(String sessionId, String investigationIds, String datasetIds, /* Now delete from ICAT */ List dfs = new ArrayList<>(); - for (DfInfoImpl dfInfo : dataSelection.getDfInfo()) { + for (DataFileInfo dfInfo : dataSelection.getDfInfo()) { Datafile df = new Datafile(); - df.setId(dfInfo.getDfId()); + df.setId(dfInfo.getId()); dfs.add(df); } try { @@ -696,7 +699,7 @@ public void delete(String sessionId, String investigationIds, String datasetIds, * been removed from ICAT so will not be accessible to any * subsequent IDS calls. */ - for (DfInfoImpl dfInfo : dataSelection.getDfInfo()) { + for (DataFileInfo dfInfo : dataSelection.getDfInfo()) { String location = dfInfo.getDfLocation(); try { if ((long) reader @@ -719,7 +722,7 @@ public void delete(String sessionId, String investigationIds, String datasetIds, } if (storageUnit == StorageUnit.DATASET) { - for (DsInfo dsInfo : dsInfos) { + for (DataSetInfo dsInfo : dsInfos) { fsm.queue(dsInfo, DeferredOp.WRITE); } } @@ -771,8 +774,8 @@ public Response getData(String preparedId, String outname, final long offset, St final boolean zip = prepared.zip; final boolean compress = prepared.compress; - final Set dfInfos = prepared.dfInfos; - final Map dsInfos = prepared.dsInfos; + final Set dfInfos = prepared.dfInfos; + final Map dsInfos = prepared.dsInfos; Set emptyDatasets = prepared.emptyDatasets; Lock lock = null; @@ -853,8 +856,8 @@ public Response getData(String sessionId, String investigationIds, String datase investigationIds, datasetIds, datafileIds, Returns.DATASETS_AND_DATAFILES); // Do it - Map dsInfos = dataSelection.getDsInfo(); - Set dfInfos = dataSelection.getDfInfo(); + Map dsInfos = dataSelection.getDsInfo(); + Set dfInfos = dataSelection.getDfInfo(); Lock lock = null; try { @@ -947,15 +950,15 @@ public String getDatafileIds(String preparedId, String ip) final boolean zip = prepared.zip; final boolean compress = prepared.compress; - final Set dfInfos = prepared.dfInfos; + final Set dfInfos = prepared.dfInfos; ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { gen.write("zip", zip); gen.write("compress", compress); gen.writeStartArray("ids"); - for (DfInfoImpl dfInfo : dfInfos) { - gen.write(dfInfo.getDfId()); + for (DataFileInfo dfInfo : dfInfos) { + gen.write(dfInfo.getId()); } gen.writeEnd().writeEnd().close(); } @@ -990,12 +993,12 @@ public String getDatafileIds(String sessionId, String investigationIds, String d investigationIds, datasetIds, datafileIds, Returns.DATAFILES); // Do it - Set dfInfos = dataSelection.getDfInfo(); + Set dfInfos = dataSelection.getDfInfo(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { gen.writeStartArray("ids"); - for (DfInfoImpl dfInfo : dfInfos) { - gen.write(dfInfo.getDfId()); + for (DataFileInfo dfInfo : dfInfos) { + gen.write(dfInfo.getId()); } gen.writeEnd().writeEnd().close(); } @@ -1083,7 +1086,7 @@ public long getSize(String preparedId, String ip) throw new InternalException(e.getClass() + " " + e.getMessage()); } - final Set dfInfos = prepared.dfInfos; + final Set dfInfos = prepared.dfInfos; // Note that the "fast computation for the simple case" (see the other getSize() implementation) is not // available when calling getSize() with a preparedId. @@ -1098,11 +1101,11 @@ public long getSize(String preparedId, String ip) StringBuilder sb = new StringBuilder(); int n = 0; - for (DfInfoImpl df : dfInfos) { + for (DataFileInfo df : dfInfos) { if (sb.length() != 0) { sb.append(','); } - sb.append(df.getDfId()); + sb.append(df.getId()); if (n++ == 500) { size += getSizeFor(sessionId, sb); sb = new StringBuilder(); @@ -1172,11 +1175,11 @@ public long getSize(String sessionId, String investigationIds, String datasetIds StringBuilder sb = new StringBuilder(); int n = 0; - for (DfInfoImpl df : dataSelection.getDfInfo()) { + for (DataFileInfo df : dataSelection.getDfInfo()) { if (sb.length() != 0) { sb.append(','); } - sb.append(df.getDfId()); + sb.append(df.getId()); if (n++ == 500) { size += getSizeFor(sessionId, sb); sb = new StringBuilder(); @@ -1273,32 +1276,32 @@ public String getStatus(String preparedId, String ip) throw new InternalException(e.getClass() + " " + e.getMessage()); } - final Set dfInfos = prepared.dfInfos; - final Map dsInfos = prepared.dsInfos; + final Set dfInfos = prepared.dfInfos; + final Map dsInfos = prepared.dsInfos; Set emptyDatasets = prepared.emptyDatasets; Status status = Status.ONLINE; if (storageUnit == StorageUnit.DATASET) { - Set restoring = fsm.getDsRestoring(); - Set maybeOffline = fsm.getDsMaybeOffline(); - for (DsInfo dsInfo : dsInfos.values()) { - fsm.checkFailure(dsInfo.getDsId()); + Set restoring = fsm.getDsRestoring(); + Set maybeOffline = fsm.getDsMaybeOffline(); + for (DataSetInfo dsInfo : dsInfos.values()) { + fsm.checkFailure(dsInfo.getId()); if (restoring.contains(dsInfo)) { status = Status.RESTORING; } else if (maybeOffline.contains(dsInfo)) { status = Status.ARCHIVED; break; - } else if (!emptyDatasets.contains(dsInfo.getDsId()) && !mainStorage.exists(dsInfo)) { + } else if (!emptyDatasets.contains(dsInfo.getId()) && !mainStorage.exists(dsInfo)) { status = Status.ARCHIVED; break; } } } else if (storageUnit == StorageUnit.DATAFILE) { - Set restoring = fsm.getDfRestoring(); - Set maybeOffline = fsm.getDfMaybeOffline(); - for (DfInfo dfInfo : dfInfos) { - fsm.checkFailure(dfInfo.getDfId()); + Set restoring = fsm.getDfRestoring(); + Set maybeOffline = fsm.getDfMaybeOffline(); + for (DataFileInfo dfInfo : dfInfos) { + fsm.checkFailure(dfInfo.getId()); if (restoring.contains(dfInfo)) { status = Status.RESTORING; } else if (maybeOffline.contains(dfInfo)) { @@ -1353,19 +1356,19 @@ public String getStatus(String sessionId, String investigationIds, String datase if (storageUnit == StorageUnit.DATASET) { DataSelection dataSelection = new DataSelection(propertyHandler, reader, sessionId, investigationIds, datasetIds, datafileIds, Returns.DATASETS); - Map dsInfos = dataSelection.getDsInfo(); + Map dsInfos = dataSelection.getDsInfo(); - Set restoring = fsm.getDsRestoring(); - Set maybeOffline = fsm.getDsMaybeOffline(); + Set restoring = fsm.getDsRestoring(); + Set maybeOffline = fsm.getDsMaybeOffline(); Set emptyDatasets = dataSelection.getEmptyDatasets(); - for (DsInfo dsInfo : dsInfos.values()) { - fsm.checkFailure(dsInfo.getDsId()); + for (DataSetInfo dsInfo : dsInfos.values()) { + fsm.checkFailure(dsInfo.getId()); if (restoring.contains(dsInfo)) { status = Status.RESTORING; } else if (maybeOffline.contains(dsInfo)) { status = Status.ARCHIVED; break; - } else if (!emptyDatasets.contains(dsInfo.getDsId()) && !mainStorage.exists(dsInfo)) { + } else if (!emptyDatasets.contains(dsInfo.getId()) && !mainStorage.exists(dsInfo)) { status = Status.ARCHIVED; break; } @@ -1373,12 +1376,12 @@ public String getStatus(String sessionId, String investigationIds, String datase } else if (storageUnit == StorageUnit.DATAFILE) { DataSelection dataSelection = new DataSelection(propertyHandler, reader, sessionId, investigationIds, datasetIds, datafileIds, Returns.DATAFILES); - Set dfInfos = dataSelection.getDfInfo(); + Set dfInfos = dataSelection.getDfInfo(); - Set restoring = fsm.getDfRestoring(); - Set maybeOffline = fsm.getDfMaybeOffline(); - for (DfInfo dfInfo : dfInfos) { - fsm.checkFailure(dfInfo.getDfId()); + Set restoring = fsm.getDfRestoring(); + Set maybeOffline = fsm.getDfMaybeOffline(); + for (DataFileInfo dfInfo : dfInfos) { + fsm.checkFailure(dfInfo.getId()); if (restoring.contains(dfInfo)) { status = Status.RESTORING; } else if (maybeOffline.contains(dfInfo)) { @@ -1525,14 +1528,14 @@ public Boolean isPrepared(String preparedId, String ip) } if (storageUnit == StorageUnit.DATASET) { - Collection toCheck = status.fromDsElement == null ? preparedJson.dsInfos.values() + Collection toCheck = status.fromDsElement == null ? preparedJson.dsInfos.values() : preparedJson.dsInfos.tailMap(status.fromDsElement).values(); logger.debug("Will check online status of {} entries", toCheck.size()); - for (DsInfo dsInfo : toCheck) { - fsm.checkFailure(dsInfo.getDsId()); + for (DataSetInfo dsInfo : toCheck) { + fsm.checkFailure(dsInfo.getId()); if (restoreIfOffline(dsInfo, preparedJson.emptyDatasets)) { prepared = false; - status.fromDsElement = dsInfo.getDsId(); + status.fromDsElement = dsInfo.getId(); toCheck = preparedJson.dsInfos.tailMap(status.fromDsElement).values(); logger.debug("Will check in background status of {} entries", toCheck.size()); status.future = threadPool.submit(new RunPrepDsCheck(toCheck, preparedJson.emptyDatasets)); @@ -1543,19 +1546,19 @@ public Boolean isPrepared(String preparedId, String ip) toCheck = status.fromDsElement == null ? Collections.emptySet() : preparedJson.dsInfos.headMap(status.fromDsElement).values(); logger.debug("Will check finally online status of {} entries", toCheck.size()); - for (DsInfo dsInfo : toCheck) { - fsm.checkFailure(dsInfo.getDsId()); + for (DataSetInfo dsInfo : toCheck) { + fsm.checkFailure(dsInfo.getId()); if (restoreIfOffline(dsInfo, preparedJson.emptyDatasets)) { prepared = false; } } } } else if (storageUnit == StorageUnit.DATAFILE) { - SortedSet toCheck = status.fromDfElement == null ? preparedJson.dfInfos + SortedSet toCheck = status.fromDfElement == null ? preparedJson.dfInfos : preparedJson.dfInfos.tailSet(status.fromDfElement); logger.debug("Will check online status of {} entries", toCheck.size()); - for (DfInfoImpl dfInfo : toCheck) { - fsm.checkFailure(dfInfo.getDfId()); + for (DataFileInfo dfInfo : toCheck) { + fsm.checkFailure(dfInfo.getId()); if (restoreIfOffline(dfInfo)) { prepared = false; status.fromDfElement = dfInfo; @@ -1569,8 +1572,8 @@ public Boolean isPrepared(String preparedId, String ip) toCheck = status.fromDfElement == null ? new TreeSet<>() : preparedJson.dfInfos.headSet(status.fromDfElement); logger.debug("Will check finally online status of {} entries", toCheck.size()); - for (DfInfoImpl dfInfo : toCheck) { - fsm.checkFailure(dfInfo.getDfId()); + for (DataFileInfo dfInfo : toCheck) { + fsm.checkFailure(dfInfo.getId()); if (restoreIfOffline(dfInfo)) { prepared = false; } @@ -1629,19 +1632,19 @@ public String prepareData(String sessionId, String investigationIds, String data // Do it String preparedId = UUID.randomUUID().toString(); - Map dsInfos = dataSelection.getDsInfo(); + Map dsInfos = dataSelection.getDsInfo(); Set emptyDs = dataSelection.getEmptyDatasets(); - Set dfInfos = dataSelection.getDfInfo(); + Set dfInfos = dataSelection.getDfInfo(); if (storageUnit == StorageUnit.DATASET) { - for (DsInfo dsInfo : dsInfos.values()) { - fsm.recordSuccess(dsInfo.getDsId()); + for (DataSetInfo dsInfo : dsInfos.values()) { + fsm.recordSuccess(dsInfo.getId()); } threadPool.submit(new RestoreDsTask(dsInfos.values(), emptyDs)); } else if (storageUnit == StorageUnit.DATAFILE) { - for (DfInfo dfInfo : dfInfos) { - fsm.recordSuccess(dfInfo.getDfId()); + for (DataFileInfo dfInfo : dfInfos) { + fsm.recordSuccess(dfInfo.getId()); } threadPool.submit(new RestoreDfTask(dfInfos)); } @@ -1755,16 +1758,16 @@ public Response put(InputStream body, String sessionId, String name, String data throw new InternalException(type + " " + e.getMessage()); } - DsInfo dsInfo = new DsInfoImpl(ds); + DataSetInfo dsInfo = new DataSetInfo(ds); try (Lock lock = lockManager.lock(dsInfo, LockType.SHARED)) { if (storageUnit == StorageUnit.DATASET) { - Set dfInfos = Collections.emptySet(); + Set dfInfos = Collections.emptySet(); Set emptyDatasets = new HashSet<>(); try { List counts = icat.search(sessionId, - "COUNT(Datafile) <-> Dataset [id=" + dsInfo.getDsId() + "]"); + "COUNT(Datafile) <-> Dataset [id=" + dsInfo.getId() + "]"); if ((Long) counts.get(0) == 0) { - emptyDatasets.add(dsInfo.getDsId()); + emptyDatasets.add(dsInfo.getId()); } } catch (IcatException_Exception e) { IcatExceptionType type = e.getFaultInfo().getType(); @@ -1776,7 +1779,7 @@ public Response put(InputStream body, String sessionId, String name, String data } throw new InternalException(type + " " + e.getMessage()); } - Set dsInfos = new HashSet<>(); + Set dsInfos = new HashSet<>(); dsInfos.add(dsInfo); checkOnline(dsInfos, emptyDatasets, dfInfos); } @@ -1820,7 +1823,7 @@ public Response put(InputStream body, String sessionId, String name, String data } catch (IcatException_Exception e) { throw new InternalException(e.getFaultInfo().getType() + " " + e.getMessage()); } - fsm.queue(new DfInfoImpl(dfId, name, location, df.getCreateId(), df.getModId(), dsInfo.getDsId()), + fsm.queue(new DataFileInfo(dfId, name, location, df.getCreateId(), df.getModId(), dsInfo.getId()), DeferredOp.WRITE); } @@ -1951,12 +1954,12 @@ public void reset(String preparedId, String ip) throws BadRequestException, Inte } if (storageUnit == StorageUnit.DATASET) { - for (DsInfo dsInfo : preparedJson.dsInfos.values()) { - fsm.recordSuccess(dsInfo.getDsId()); + for (DataSetInfo dsInfo : preparedJson.dsInfos.values()) { + fsm.recordSuccess(dsInfo.getId()); } } else if (storageUnit == StorageUnit.DATAFILE) { - for (DfInfoImpl dfInfo : preparedJson.dfInfos) { - fsm.recordSuccess(dfInfo.getDfId()); + for (DataFileInfo dfInfo : preparedJson.dfInfos) { + fsm.recordSuccess(dfInfo.getId()); } } @@ -1986,12 +1989,12 @@ public void reset(String sessionId, String investigationIds, String datasetIds, // Do it if (storageUnit == StorageUnit.DATASET) { - for (DsInfo dsInfo : dataSelection.getDsInfo().values()) { - fsm.recordSuccess(dsInfo.getDsId()); + for (DataInfoBase dsInfo : dataSelection.getDsInfo().values()) { + fsm.recordSuccess(dsInfo.getId()); } } else if (storageUnit == StorageUnit.DATAFILE) { - for (DfInfoImpl dfInfo : dataSelection.getDfInfo()) { - fsm.recordSuccess(dfInfo.getDfId()); + for (DataInfoBase dfInfo : dataSelection.getDfInfo()) { + fsm.recordSuccess(dfInfo.getId()); } } @@ -2020,7 +2023,7 @@ private void restartUnfinishedWork() throws InternalException { Dataset ds = null; try { ds = (Dataset) reader.get("Dataset ds INCLUDE ds.investigation.facility", dsid); - DsInfo dsInfo = new DsInfoImpl(ds); + DataSetInfo dsInfo = new DataSetInfo(ds); fsm.queue(dsInfo, DeferredOp.WRITE); logger.info("Queued dataset with id " + dsid + " " + dsInfo + " to be written as it was not written out previously by IDS"); @@ -2039,7 +2042,7 @@ private void restartUnfinishedWork() throws InternalException { try { df = (Datafile) reader.get("Datafile ds INCLUDE ds.dataset", dfid); String location = getLocation(df.getId(), df.getLocation()); - DfInfoImpl dfInfo = new DfInfoImpl(dfid, df.getName(), location, df.getCreateId(), + DataFileInfo dfInfo = new DataFileInfo(dfid, df.getName(), location, df.getCreateId(), df.getModId(), df.getDataset().getId()); fsm.queue(dfInfo, DeferredOp.WRITE); logger.info("Queued datafile with id " + dfid + " " + dfInfo @@ -2076,15 +2079,15 @@ public void restore(String sessionId, String investigationIds, String datasetIds if (storageUnit == StorageUnit.DATASET) { DataSelection dataSelection = new DataSelection(propertyHandler, reader, sessionId, investigationIds, datasetIds, datafileIds, Returns.DATASETS); - Map dsInfos = dataSelection.getDsInfo(); - for (DsInfo dsInfo : dsInfos.values()) { + Map dsInfos = dataSelection.getDsInfo(); + for (DataSetInfo dsInfo : dsInfos.values()) { fsm.queue(dsInfo, DeferredOp.RESTORE); } } else if (storageUnit == StorageUnit.DATAFILE) { DataSelection dataSelection = new DataSelection(propertyHandler, reader, sessionId, investigationIds, datasetIds, datafileIds, Returns.DATAFILES); - Set dfInfos = dataSelection.getDfInfo(); - for (DfInfoImpl dfInfo : dfInfos) { + Set dfInfos = dataSelection.getDfInfo(); + for (DataFileInfo dfInfo : dfInfos) { fsm.queue(dfInfo, DeferredOp.RESTORE); } } else { @@ -2106,7 +2109,7 @@ public void restore(String sessionId, String investigationIds, String datasetIds } } - private boolean restoreIfOffline(DfInfoImpl dfInfo) throws InternalException { + private boolean restoreIfOffline(DataFileInfo dfInfo) throws InternalException { boolean maybeOffline = false; if (fsm.getDfMaybeOffline().contains(dfInfo)) { maybeOffline = true; @@ -2117,11 +2120,11 @@ private boolean restoreIfOffline(DfInfoImpl dfInfo) throws InternalException { return maybeOffline; } - private boolean restoreIfOffline(DsInfo dsInfo, Set emptyDatasets) throws InternalException { + private boolean restoreIfOffline(DataSetInfo dsInfo, Set emptyDatasets) throws InternalException { boolean maybeOffline = false; if (fsm.getDsMaybeOffline().contains(dsInfo)) { maybeOffline = true; - } else if (!emptyDatasets.contains(dsInfo.getDsId()) && !mainStorage.exists(dsInfo)) { + } else if (!emptyDatasets.contains(dsInfo.getId()) && !mainStorage.exists(dsInfo)) { fsm.queue(dsInfo, DeferredOp.RESTORE); maybeOffline = true; } @@ -2148,21 +2151,21 @@ public void write(String sessionId, String investigationIds, String datasetIds, investigationIds, datasetIds, datafileIds, Returns.DATASETS_AND_DATAFILES); // Do it - Map dsInfos = dataSelection.getDsInfo(); - Set dfInfos = dataSelection.getDfInfo(); + Map dsInfos = dataSelection.getDsInfo(); + Set dfInfos = dataSelection.getDfInfo(); try (Lock lock = lockManager.lock(dsInfos.values(), LockType.SHARED)) { if (twoLevel) { boolean maybeOffline = false; if (storageUnit == StorageUnit.DATASET) { - for (DsInfo dsInfo : dsInfos.values()) { - if (!dataSelection.getEmptyDatasets().contains(dsInfo.getDsId()) && + for (DataSetInfo dsInfo : dsInfos.values()) { + if (!dataSelection.getEmptyDatasets().contains(dsInfo.getId()) && !mainStorage.exists(dsInfo)) { maybeOffline = true; } } } else if (storageUnit == StorageUnit.DATAFILE) { - for (DfInfoImpl dfInfo : dfInfos) { + for (DataFileInfo dfInfo : dfInfos) { if (!mainStorage.exists(dfInfo.getDfLocation())) { maybeOffline = true; } @@ -2174,11 +2177,11 @@ public void write(String sessionId, String investigationIds, String datasetIds, } if (storageUnit == StorageUnit.DATASET) { - for (DsInfo dsInfo : dsInfos.values()) { + for (DataSetInfo dsInfo : dsInfos.values()) { fsm.queue(dsInfo, DeferredOp.WRITE); } } else if (storageUnit == StorageUnit.DATAFILE) { - for (DfInfoImpl dfInfo : dfInfos) { + for (DataFileInfo dfInfo : dfInfos) { fsm.queue(dfInfo, DeferredOp.WRITE); } } else { diff --git a/src/main/java/org/icatproject/ids/IdsBeanForDataFile.java b/src/main/java/org/icatproject/ids/IdsBeanForDataFile.java new file mode 100644 index 00000000..37260130 --- /dev/null +++ b/src/main/java/org/icatproject/ids/IdsBeanForDataFile.java @@ -0,0 +1,5 @@ +package org.icatproject.ids; + +public class IdsBeanForDataFile extends IdsBean { + +} diff --git a/src/main/java/org/icatproject/ids/IdsBeanForDataSet.java b/src/main/java/org/icatproject/ids/IdsBeanForDataSet.java new file mode 100644 index 00000000..c5142ab8 --- /dev/null +++ b/src/main/java/org/icatproject/ids/IdsBeanForDataSet.java @@ -0,0 +1,5 @@ +package org.icatproject.ids; + +public class IdsBeanForDataSet extends IdsBean { + +} diff --git a/src/main/java/org/icatproject/ids/LockManager.java b/src/main/java/org/icatproject/ids/LockManager.java index 86225ea1..3d57bc94 100644 --- a/src/main/java/org/icatproject/ids/LockManager.java +++ b/src/main/java/org/icatproject/ids/LockManager.java @@ -15,6 +15,7 @@ import org.icatproject.ids.plugin.AlreadyLockedException; import org.icatproject.ids.plugin.DsInfo; import org.icatproject.ids.plugin.MainStorageInterface; +import org.icatproject.ids.v3.model.DataSetInfo; @Singleton public class LockManager { @@ -130,7 +131,7 @@ private void init() { logger.debug("LockManager initialized."); } - public Lock lock(DsInfo ds, LockType type) throws AlreadyLockedException, IOException { + public Lock lock(DataSetInfo ds, LockType type) throws AlreadyLockedException, IOException { Long id = ds.getDsId(); assert id != null; synchronized (lockMap) { @@ -155,10 +156,10 @@ public Lock lock(DsInfo ds, LockType type) throws AlreadyLockedException, IOExce } } - public Lock lock(Collection datasets, LockType type) throws AlreadyLockedException, IOException { + public Lock lock(Collection datasets, LockType type) throws AlreadyLockedException, IOException { LockCollection locks = new LockCollection(); try { - for (DsInfo ds : datasets) { + for (DataSetInfo ds : datasets) { locks.add(lock(ds, type)); } } catch (AlreadyLockedException | IOException e) { diff --git a/src/main/java/org/icatproject/ids/Prepared.java b/src/main/java/org/icatproject/ids/Prepared.java index 886402c4..cf725505 100644 --- a/src/main/java/org/icatproject/ids/Prepared.java +++ b/src/main/java/org/icatproject/ids/Prepared.java @@ -4,13 +4,14 @@ import java.util.SortedMap; import java.util.SortedSet; -import org.icatproject.ids.plugin.DsInfo; +import org.icatproject.ids.v3.model.DataFileInfo; +import org.icatproject.ids.v3.model.DataSetInfo; /* This is a POJO with only package access so don't make data private */ class Prepared { boolean zip; boolean compress; - SortedSet dfInfos; - SortedMap dsInfos; + SortedSet dfInfos; + SortedMap dsInfos; Set emptyDatasets; } diff --git a/src/main/java/org/icatproject/ids/Tidier.java b/src/main/java/org/icatproject/ids/Tidier.java index 7f1bd626..2f868eb9 100644 --- a/src/main/java/org/icatproject/ids/Tidier.java +++ b/src/main/java/org/icatproject/ids/Tidier.java @@ -29,6 +29,8 @@ import org.icatproject.ids.plugin.DfInfo; import org.icatproject.ids.plugin.DsInfo; import org.icatproject.ids.plugin.MainStorageInterface; +import org.icatproject.ids.v3.model.DataFileInfo; +import org.icatproject.ids.v3.model.DataSetInfo; @Singleton @Startup @@ -71,7 +73,7 @@ public void run() { List os = reader.search(query); logger.debug(query + " returns " + os.size() + " datasets"); for (Object o : os) { - DsInfoImpl dsInfoImpl = new DsInfoImpl((Dataset) o); + DataSetInfo dsInfoImpl = new DataSetInfo((Dataset) o); logger.debug( "Requesting archive of " + dsInfoImpl + " to recover main storage"); fsm.queue(dsInfoImpl, DeferredOp.ARCHIVE); @@ -123,7 +125,7 @@ public void run() { logger.debug(query + " returns " + os.size() + " datafiles"); for (Object o : os) { Datafile df = (Datafile) o; - DfInfoImpl dfInfoImpl = new DfInfoImpl(df.getId(), df.getName(), + DataFileInfo dfInfoImpl = new DataFileInfo(df.getId(), df.getName(), IdsBean.getLocation(df.getId(), df.getLocation()), df.getCreateId(), df.getModId(), df.getDataset().getId()); diff --git a/src/main/java/org/icatproject/ids/v3/model/DataFileInfo.java b/src/main/java/org/icatproject/ids/v3/model/DataFileInfo.java index 73591bc4..8f3f722c 100644 --- a/src/main/java/org/icatproject/ids/v3/model/DataFileInfo.java +++ b/src/main/java/org/icatproject/ids/v3/model/DataFileInfo.java @@ -9,7 +9,7 @@ public class DataFileInfo extends DataInfoBase implements Comparable dsInfos = new HashMap<>(); - Set dfInfos = new HashSet<>(); + Map dsInfos = new HashMap<>(); + Set dfInfos = new HashSet<>(); Set emptyDatasets = new HashSet<>(); long dsid1 = 17L; long dsid2 = 18L; long invId = 15L; long facilityId = 45L; - dfInfos.add(new DfInfoImpl(5L, "dfName", "dfLocation", "createId", "modId", dsid1)); + dfInfos.add(new DataFileInfo(5L, "dfName", "dfLocation", "createId", "modId", dsid1)); - dfInfos.add(new DfInfoImpl(51L, "dfName2", null, "createId", "modId", dsid1)); + dfInfos.add(new DataFileInfo(51L, "dfName2", null, "createId", "modId", dsid1)); - dsInfos.put(dsid1, new DsInfoImpl(dsid1, "dsName", "dsLocation", invId, "invName", + dsInfos.put(dsid1, new DataSetInfo(dsid1, "dsName", "dsLocation", invId, "invName", "visitId", facilityId, "facilityName")); - dsInfos.put(dsid2, new DsInfoImpl(dsid2, "dsName2", null, invId, "invName", "visitId", + dsInfos.put(dsid2, new DataSetInfo(dsid2, "dsName2", null, invId, "invName", "visitId", facilityId, "facilityName")); emptyDatasets.add(dsid2); @@ -55,7 +56,7 @@ public void packAndUnpack() throws Exception { Prepared prepared = IdsBean.unpack(stream); assertTrue(prepared.zip); assertFalse(prepared.compress); - for (DfInfoImpl dfInfo : prepared.dfInfos) { + for (DataFileInfo dfInfo : prepared.dfInfos) { if (dfInfo.getDfId() == 5L) { assertEquals("dfName", dfInfo.getDfName()); assertEquals("dfLocation", dfInfo.getDfLocation()); @@ -69,9 +70,9 @@ public void packAndUnpack() throws Exception { assertEquals("modId", dfInfo.getModId()); assertEquals(dsid1, dfInfo.getDsId()); } - for (Entry entry : prepared.dsInfos.entrySet()) { + for (Entry entry : prepared.dsInfos.entrySet()) { Long key = entry.getKey(); - DsInfo value = entry.getValue(); + DataSetInfo value = entry.getValue(); assertEquals((Long) key, (Long) value.getDsId()); if (value.getDsId() == dsid1) { assertEquals("dsName", value.getDsName()); From 60cb9fd46bf31de96839ffbf631532facce4385d Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Mon, 12 Feb 2024 15:27:19 +0100 Subject: [PATCH 03/92] started new handler pattern first with GetData --- .../org/icatproject/ids/DataSelection.java | 7 +- .../icatproject/ids/FiniteStateMachine.java | 4 +- .../java/org/icatproject/ids/IdsBean.java | 11 +- .../icatproject/ids/IdsBeanForDataFile.java | 5 - .../icatproject/ids/IdsBeanForDataSet.java | 5 - .../java/org/icatproject/ids/IdsService.java | 55 +-- .../java/org/icatproject/ids/LockManager.java | 2 +- .../java/org/icatproject/ids/Prepared.java | 16 +- .../org/icatproject/ids/PropertyHandler.java | 4 +- .../java/org/icatproject/ids/StorageUnit.java | 2 +- src/main/java/org/icatproject/ids/Tidier.java | 4 +- .../ids/v3/RequestHandlerServiceBase.java | 130 +++++++ .../icatproject/ids/v3/ServiceProvider.java | 72 ++++ .../ids/v3/UnfinishedWorkServiceBase.java | 149 +++++++++ .../icatproject/ids/v3/enums/CallType.java | 5 + .../icatproject/ids/v3/enums/RequestType.java | 8 + .../ids/v3/enums/ValueContainerType.java | 8 + .../ids/v3/handlers/GetDataHandler.java | 316 ++++++++++++++++++ .../ids/v3/handlers/RequestHandlerBase.java | 207 ++++++++++++ .../org/icatproject/ids/v3/helper/SO.java | 132 ++++++++ .../v3/{model => models}/DataFileInfo.java | 2 +- .../v3/{model => models}/DataInfoBase.java | 2 +- .../ids/v3/{model => models}/DataSetInfo.java | 2 +- .../ids/v3/models/ValueContainer.java | 140 ++++++++ .../icatproject/ids/PreparePackingTest.java | 4 +- 25 files changed, 1230 insertions(+), 62 deletions(-) delete mode 100644 src/main/java/org/icatproject/ids/IdsBeanForDataFile.java delete mode 100644 src/main/java/org/icatproject/ids/IdsBeanForDataSet.java create mode 100644 src/main/java/org/icatproject/ids/v3/RequestHandlerServiceBase.java create mode 100644 src/main/java/org/icatproject/ids/v3/ServiceProvider.java create mode 100644 src/main/java/org/icatproject/ids/v3/UnfinishedWorkServiceBase.java create mode 100644 src/main/java/org/icatproject/ids/v3/enums/CallType.java create mode 100644 src/main/java/org/icatproject/ids/v3/enums/RequestType.java create mode 100644 src/main/java/org/icatproject/ids/v3/enums/ValueContainerType.java create mode 100644 src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java create mode 100644 src/main/java/org/icatproject/ids/v3/handlers/RequestHandlerBase.java create mode 100644 src/main/java/org/icatproject/ids/v3/helper/SO.java rename src/main/java/org/icatproject/ids/v3/{model => models}/DataFileInfo.java (97%) rename src/main/java/org/icatproject/ids/v3/{model => models}/DataInfoBase.java (96%) rename src/main/java/org/icatproject/ids/v3/{model => models}/DataSetInfo.java (98%) create mode 100644 src/main/java/org/icatproject/ids/v3/models/ValueContainer.java diff --git a/src/main/java/org/icatproject/ids/DataSelection.java b/src/main/java/org/icatproject/ids/DataSelection.java index 809ce605..246f7461 100644 --- a/src/main/java/org/icatproject/ids/DataSelection.java +++ b/src/main/java/org/icatproject/ids/DataSelection.java @@ -9,8 +9,6 @@ import java.util.Map; import java.util.Set; -import javax.xml.crypto.Data; - import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonValue; @@ -29,9 +27,8 @@ import org.icatproject.ids.exceptions.InsufficientPrivilegesException; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.exceptions.NotFoundException; -import org.icatproject.ids.plugin.DsInfo; -import org.icatproject.ids.v3.model.DataFileInfo; -import org.icatproject.ids.v3.model.DataSetInfo; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataSetInfo; /** * Class to convert 3 comma separated strings containing Investigation, diff --git a/src/main/java/org/icatproject/ids/FiniteStateMachine.java b/src/main/java/org/icatproject/ids/FiniteStateMachine.java index 2b631e38..07883e09 100644 --- a/src/main/java/org/icatproject/ids/FiniteStateMachine.java +++ b/src/main/java/org/icatproject/ids/FiniteStateMachine.java @@ -45,8 +45,8 @@ import org.icatproject.ids.thread.DsArchiver; import org.icatproject.ids.thread.DsRestorer; import org.icatproject.ids.thread.DsWriter; -import org.icatproject.ids.v3.model.DataFileInfo; -import org.icatproject.ids.v3.model.DataSetInfo; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataSetInfo; @Singleton @DependsOn({"LockManager"}) diff --git a/src/main/java/org/icatproject/ids/IdsBean.java b/src/main/java/org/icatproject/ids/IdsBean.java index c86fb556..257ace33 100644 --- a/src/main/java/org/icatproject/ids/IdsBean.java +++ b/src/main/java/org/icatproject/ids/IdsBean.java @@ -79,9 +79,10 @@ import org.icatproject.ids.plugin.DsInfo; import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.plugin.ZipMapperInterface; -import org.icatproject.ids.v3.model.DataFileInfo; -import org.icatproject.ids.v3.model.DataInfoBase; -import org.icatproject.ids.v3.model.DataSetInfo; +import org.icatproject.ids.v3.enums.CallType; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataInfoBase; +import org.icatproject.ids.v3.models.DataSetInfo; import org.icatproject.utils.IcatSecurity; import org.icatproject.utils.ShellCommand; @@ -128,10 +129,6 @@ public Void call() throws Exception { } - enum CallType { - INFO, PREPARE, READ, WRITE, MIGRATE - } - public class RestoreDfTask implements Callable { private Set dfInfos; diff --git a/src/main/java/org/icatproject/ids/IdsBeanForDataFile.java b/src/main/java/org/icatproject/ids/IdsBeanForDataFile.java deleted file mode 100644 index 37260130..00000000 --- a/src/main/java/org/icatproject/ids/IdsBeanForDataFile.java +++ /dev/null @@ -1,5 +0,0 @@ -package org.icatproject.ids; - -public class IdsBeanForDataFile extends IdsBean { - -} diff --git a/src/main/java/org/icatproject/ids/IdsBeanForDataSet.java b/src/main/java/org/icatproject/ids/IdsBeanForDataSet.java deleted file mode 100644 index c5142ab8..00000000 --- a/src/main/java/org/icatproject/ids/IdsBeanForDataSet.java +++ /dev/null @@ -1,5 +0,0 @@ -package org.icatproject.ids; - -public class IdsBeanForDataSet extends IdsBean { - -} diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index 17564a1f..93c53cbd 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -4,6 +4,7 @@ import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; +import java.util.HashMap; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -39,6 +40,10 @@ import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.exceptions.NotFoundException; import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.RequestHandlerServiceBase; +import org.icatproject.ids.v3.ServiceProvider; +import org.icatproject.ids.v3.enums.RequestType; +import org.icatproject.ids.v3.models.ValueContainer; @Path("/") @Stateless @@ -49,7 +54,24 @@ public class IdsService { @EJB private IdsBean idsBean; - private Pattern rangeRe; + @EJB + Transmitter transmitter; + + @EJB + private FiniteStateMachine fsm; + + @EJB + private LockManager lockManager; + + @EJB + private IcatReader reader; + + private RequestHandlerServiceBase requestHandler; + + + public IdsService() { + this.requestHandler = new RequestHandlerServiceBase(); + } /** * Archive data specified by the investigationIds, datasetIds and @@ -170,26 +192,21 @@ public Response getData(@Context HttpServletRequest request, @QueryParam("prepar @QueryParam("compress") boolean compress, @QueryParam("zip") boolean zip, @QueryParam("outname") String outname, @HeaderParam("Range") String range) throws BadRequestException, NotFoundException, InternalException, InsufficientPrivilegesException, DataNotOnlineException { - Response response = null; - long offset = 0; - if (range != null) { - Matcher m = rangeRe.matcher(range); - if (!m.matches()) { - throw new BadRequestException("The range must match " + rangeRe.pattern()); - } - offset = Long.parseLong(m.group(1)); - logger.debug("Range " + range + " -> offset " + offset); - } + var parameters = new HashMap(); + parameters.put( "request", new ValueContainer(request) ); + parameters.put( "preparedId", new ValueContainer(preparedId) ); + parameters.put( "sessionId", new ValueContainer(sessionId) ); + parameters.put( "investigationIds", new ValueContainer(investigationIds) ); + parameters.put( "datasetIds", new ValueContainer(datasetIds) ); + parameters.put( "datafileIds", new ValueContainer(datafileIds) ); + parameters.put( "compress", new ValueContainer(compress) ); + parameters.put( "zip", new ValueContainer(zip) ); + parameters.put( "outname", new ValueContainer(outname) ); + parameters.put( "Range", new ValueContainer(range) ); - if (preparedId != null) { - response = idsBean.getData(preparedId, outname, offset, request.getRemoteAddr()); - } else { - response = idsBean.getData(sessionId, investigationIds, datasetIds, datafileIds, compress, zip, outname, - offset, request.getRemoteAddr()); - } - return response; + return this.requestHandler.handle(RequestType.GETDATA, parameters).getResponse(); } /** @@ -334,7 +351,7 @@ public String getStatus(@Context HttpServletRequest request, @QueryParam("prepar @PostConstruct private void init() { logger.info("creating IdsService"); - rangeRe = Pattern.compile("bytes=(\\d+)-"); + this.requestHandler.init(this.transmitter, this.lockManager, this.fsm, this.reader); logger.info("created IdsService"); } diff --git a/src/main/java/org/icatproject/ids/LockManager.java b/src/main/java/org/icatproject/ids/LockManager.java index 3d57bc94..972e6e98 100644 --- a/src/main/java/org/icatproject/ids/LockManager.java +++ b/src/main/java/org/icatproject/ids/LockManager.java @@ -15,7 +15,7 @@ import org.icatproject.ids.plugin.AlreadyLockedException; import org.icatproject.ids.plugin.DsInfo; import org.icatproject.ids.plugin.MainStorageInterface; -import org.icatproject.ids.v3.model.DataSetInfo; +import org.icatproject.ids.v3.models.DataSetInfo; @Singleton public class LockManager { diff --git a/src/main/java/org/icatproject/ids/Prepared.java b/src/main/java/org/icatproject/ids/Prepared.java index cf725505..09587230 100644 --- a/src/main/java/org/icatproject/ids/Prepared.java +++ b/src/main/java/org/icatproject/ids/Prepared.java @@ -4,14 +4,14 @@ import java.util.SortedMap; import java.util.SortedSet; -import org.icatproject.ids.v3.model.DataFileInfo; -import org.icatproject.ids.v3.model.DataSetInfo; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataSetInfo; /* This is a POJO with only package access so don't make data private */ -class Prepared { - boolean zip; - boolean compress; - SortedSet dfInfos; - SortedMap dsInfos; - Set emptyDatasets; +public class Prepared { + public boolean zip; + public boolean compress; + public SortedSet dfInfos; + public SortedMap dsInfos; + public Set emptyDatasets; } diff --git a/src/main/java/org/icatproject/ids/PropertyHandler.java b/src/main/java/org/icatproject/ids/PropertyHandler.java index a82c846d..96e819df 100644 --- a/src/main/java/org/icatproject/ids/PropertyHandler.java +++ b/src/main/java/org/icatproject/ids/PropertyHandler.java @@ -22,10 +22,10 @@ import org.icatproject.ICAT; import org.icatproject.IcatException_Exception; -import org.icatproject.ids.IdsBean.CallType; import org.icatproject.ids.plugin.ArchiveStorageInterface; import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.plugin.ZipMapperInterface; +import org.icatproject.ids.v3.enums.CallType; import org.icatproject.utils.CheckedProperties; import org.icatproject.utils.CheckedProperties.CheckedPropertyException; @@ -345,7 +345,7 @@ public long getStopArchivingLevel() { return stopArchivingLevel; } - StorageUnit getStorageUnit() { + public StorageUnit getStorageUnit() { return storageUnit; } diff --git a/src/main/java/org/icatproject/ids/StorageUnit.java b/src/main/java/org/icatproject/ids/StorageUnit.java index 9e4712fb..215b0445 100644 --- a/src/main/java/org/icatproject/ids/StorageUnit.java +++ b/src/main/java/org/icatproject/ids/StorageUnit.java @@ -1,5 +1,5 @@ package org.icatproject.ids; -enum StorageUnit { +public enum StorageUnit { DATASET, DATAFILE } diff --git a/src/main/java/org/icatproject/ids/Tidier.java b/src/main/java/org/icatproject/ids/Tidier.java index 2f868eb9..8e898af0 100644 --- a/src/main/java/org/icatproject/ids/Tidier.java +++ b/src/main/java/org/icatproject/ids/Tidier.java @@ -29,8 +29,8 @@ import org.icatproject.ids.plugin.DfInfo; import org.icatproject.ids.plugin.DsInfo; import org.icatproject.ids.plugin.MainStorageInterface; -import org.icatproject.ids.v3.model.DataFileInfo; -import org.icatproject.ids.v3.model.DataSetInfo; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataSetInfo; @Singleton @Startup diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerServiceBase.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerServiceBase.java new file mode 100644 index 00000000..06e072ce --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerServiceBase.java @@ -0,0 +1,130 @@ +package org.icatproject.ids.v3; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.HashMap; +import java.util.concurrent.Executors; + +import javax.xml.datatype.DatatypeFactory; + +import org.icatproject.ids.FiniteStateMachine; +import org.icatproject.ids.IcatReader; +import org.icatproject.ids.LockManager; +import org.icatproject.ids.PropertyHandler; +import org.icatproject.ids.Transmitter; +import org.icatproject.ids.exceptions.BadRequestException; +import org.icatproject.ids.exceptions.DataNotOnlineException; +import org.icatproject.ids.exceptions.InsufficientPrivilegesException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.exceptions.NotFoundException; +import org.icatproject.ids.plugin.ArchiveStorageInterface; +import org.icatproject.ids.v3.enums.RequestType; +import org.icatproject.ids.v3.handlers.GetDataHandler; +import org.icatproject.ids.v3.handlers.RequestHandlerBase; +import org.icatproject.ids.v3.models.ValueContainer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class RequestHandlerServiceBase { + + private HashMap handlers; + private PropertyHandler propertyHandler; + protected final static Logger logger = LoggerFactory.getLogger(RequestHandlerBase.class); + private static Boolean inited = false; + protected Path preparedDir; + private static String key; + private ArchiveStorageInterface archiveStorage; + private boolean twoLevel; + private Path datasetDir; + private Path markerDir; + private UnfinishedWorkServiceBase unfinishedWorkService; + + + public RequestHandlerServiceBase() { + + this.propertyHandler = PropertyHandler.getInstance(); + this.unfinishedWorkService = new UnfinishedWorkServiceBase(); + + this.handlers = new HashMap(); + this.registerHandler(RequestType.GETDATA, new GetDataHandler()); + } + + private void registerHandler(RequestType requestType, RequestHandlerBase requestHandler) { + + //use only the handlers that supports the configured StorageUnit + if( requestHandler.supportsStorageUnit(this.propertyHandler.getStorageUnit()) ) + this.handlers.put(requestType, requestHandler); + } + + public ValueContainer handle(RequestType requestType, HashMap parameters) throws InternalException, BadRequestException, InsufficientPrivilegesException, NotFoundException, DataNotOnlineException { + + if(this.handlers.containsKey(requestType)) + return this.handlers.get(requestType).handle(parameters); + else + throw new InternalException("No handler found for RequestType " + requestType + " and StorageUnit " + this.propertyHandler.getStorageUnit() + " in RequestHandlerService. Do you forgot to register?"); + } + + public void init(Transmitter transmitter, LockManager lockManager , FiniteStateMachine fsm, IcatReader reader) { + try { + synchronized (inited) { + logger.info("creating RequestHandlerService"); + ServiceProvider.createInstance(transmitter, fsm, lockManager, reader); + propertyHandler = ServiceProvider.getInstance().getPropertyHandler(); + // zipMapper = propertyHandler.getZipMapper(); + // mainStorage = propertyHandler.getMainStorage(); + archiveStorage = propertyHandler.getArchiveStorage(); + twoLevel = archiveStorage != null; + // datatypeFactory = DatatypeFactory.newInstance(); + preparedDir = propertyHandler.getCacheDir().resolve("prepared"); + Files.createDirectories(preparedDir); + + // rootUserNames = propertyHandler.getRootUserNames(); + // readOnly = propertyHandler.getReadOnly(); + // enableWrite = propertyHandler.getEnableWrite(); + + // icat = propertyHandler.getIcatService(); + + if (!inited) { + key = propertyHandler.getKey(); + logger.info("Key is " + (key == null ? "not set" : "set")); + } + + if (twoLevel) { + // storageUnit = propertyHandler.getStorageUnit(); + datasetDir = propertyHandler.getCacheDir().resolve("dataset"); + markerDir = propertyHandler.getCacheDir().resolve("marker"); + if (!inited) { + Files.createDirectories(datasetDir); + Files.createDirectories(markerDir); + this.unfinishedWorkService.restartUnfinishedWork(markerDir, key); + } + } + + if (!inited) { + UnfinishedWorkServiceBase.cleanPreparedDir(preparedDir); + if (twoLevel) { + UnfinishedWorkServiceBase.cleanDatasetCache(datasetDir); + } + } + + // maxIdsInQuery = propertyHandler.getMaxIdsInQuery(); + + // threadPool = Executors.newCachedThreadPool(); + + // logSet = propertyHandler.getLogSet(); + + for(RequestHandlerBase handler : this.handlers.values()) { + handler.init(); + } + + inited = true; + + logger.info("created RequestHandlerService"); + } + } catch (Throwable e) { + logger.error("Won't start ", e); + throw new RuntimeException("RequestHandlerService reports " + e.getClass() + " " + e.getMessage()); + } + } + +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/ServiceProvider.java b/src/main/java/org/icatproject/ids/v3/ServiceProvider.java new file mode 100644 index 00000000..306e9db5 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/ServiceProvider.java @@ -0,0 +1,72 @@ +package org.icatproject.ids.v3; + +import org.icatproject.ICAT; +import org.icatproject.ids.FiniteStateMachine; +import org.icatproject.ids.IcatReader; +import org.icatproject.ids.LockManager; +import org.icatproject.ids.PropertyHandler; +import org.icatproject.ids.Transmitter; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.plugin.MainStorageInterface; + +public class ServiceProvider { + + private static ServiceProvider instance = null; + + private Transmitter transmitter; + private FiniteStateMachine fsm; + private LockManager lockManager; + private IcatReader icatReader; + private ICAT icat; + + private ServiceProvider(Transmitter transmitter, FiniteStateMachine fsm, LockManager lockManager, IcatReader reader) { + this.transmitter = transmitter; + this.fsm = fsm; + this.lockManager = lockManager; + this.icatReader = reader; + } + + public static void createInstance(Transmitter transmitter, FiniteStateMachine fsm, LockManager lockManager, IcatReader reader) { + + if(instance != null) return; + + instance = new ServiceProvider(transmitter, fsm, lockManager, reader); + } + + public static ServiceProvider getInstance() throws InternalException { + if(instance == null) { + throw new InternalException("ServiceProvider is not yet instantiated, please call createInstance at first."); + } + return instance; + } + + public Transmitter getTransmitter() { + return transmitter; + } + + public FiniteStateMachine getFsm() { + return fsm; + } + + public LockManager getLockManager() { + return lockManager; + } + + public IcatReader getIcatReader() { + return icatReader; + } + + public PropertyHandler getPropertyHandler() { + return PropertyHandler.getInstance(); + } + + public MainStorageInterface getMainStorage() { + return this.getPropertyHandler().getMainStorage(); + } + + public ICAT getIcat() { + return this.getPropertyHandler().getIcatService(); + } + + +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/UnfinishedWorkServiceBase.java b/src/main/java/org/icatproject/ids/v3/UnfinishedWorkServiceBase.java new file mode 100644 index 00000000..6d863c73 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/UnfinishedWorkServiceBase.java @@ -0,0 +1,149 @@ +package org.icatproject.ids.v3; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.NoSuchAlgorithmException; + +import org.icatproject.Datafile; +import org.icatproject.Dataset; +import org.icatproject.IcatExceptionType; +import org.icatproject.IcatException_Exception; +import org.icatproject.ids.DeferredOp; +import org.icatproject.ids.StorageUnit; +import org.icatproject.ids.exceptions.InsufficientPrivilegesException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataSetInfo; +import org.icatproject.utils.IcatSecurity; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class UnfinishedWorkServiceBase { + + + protected final static Logger logger = LoggerFactory.getLogger(UnfinishedWorkServiceBase.class); + + public UnfinishedWorkServiceBase() { + + } + + public void restartUnfinishedWork(Path markerDir, String key) throws InternalException { + + try { + var serviceProvider = ServiceProvider.getInstance(); + StorageUnit storageUnit = serviceProvider.getPropertyHandler().getStorageUnit(); + for (File file : markerDir.toFile().listFiles()) { + if (storageUnit == StorageUnit.DATASET) { + long dsid = Long.parseLong(file.toPath().getFileName().toString()); + Dataset ds = null; + try { + ds = (Dataset) serviceProvider.getIcatReader().get("Dataset ds INCLUDE ds.investigation.facility", dsid); + DataSetInfo dsInfo = new DataSetInfo(ds); + serviceProvider.getFsm().queue(dsInfo, DeferredOp.WRITE); + logger.info("Queued dataset with id " + dsid + " " + dsInfo + + " to be written as it was not written out previously by IDS"); + } catch (IcatException_Exception e) { + if (e.getFaultInfo().getType() == IcatExceptionType.NO_SUCH_OBJECT_FOUND) { + logger.warn("Dataset with id " + dsid + + " was not written out by IDS and now no longer known to ICAT"); + Files.delete(file.toPath()); + } else { + throw e; + } + } + } else if (storageUnit == StorageUnit.DATAFILE) { + long dfid = Long.parseLong(file.toPath().getFileName().toString()); + Datafile df = null; + try { + df = (Datafile) serviceProvider.getIcatReader().get("Datafile ds INCLUDE ds.dataset", dfid); + String location = getLocation(df.getId(), df.getLocation(), key); + DataFileInfo dfInfo = new DataFileInfo(dfid, df.getName(), location, df.getCreateId(), + df.getModId(), df.getDataset().getId()); + serviceProvider.getFsm().queue(dfInfo, DeferredOp.WRITE); + logger.info("Queued datafile with id " + dfid + " " + dfInfo + + " to be written as it was not written out previously by IDS"); + } catch (IcatException_Exception e) { + if (e.getFaultInfo().getType() == IcatExceptionType.NO_SUCH_OBJECT_FOUND) { + logger.warn("Datafile with id " + dfid + + " was not written out by IDS and now no longer known to ICAT"); + Files.delete(file.toPath()); + } else { + throw e; + } + } + } + } + } catch (Exception e) { + throw new InternalException(e.getClass() + " " + e.getMessage()); + } + } + + public static void cleanPreparedDir(Path preparedDir) { + for (File file : preparedDir.toFile().listFiles()) { + Path path = file.toPath(); + String pf = path.getFileName().toString(); + if (pf.startsWith("tmp.") || pf.endsWith(".tmp")) { + try { + long thisSize = 0; + if (Files.isDirectory(path)) { + for (File notZipFile : file.listFiles()) { + thisSize += Files.size(notZipFile.toPath()); + Files.delete(notZipFile.toPath()); + } + } + thisSize += Files.size(path); + Files.delete(path); + logger.debug("Deleted " + path + " to reclaim " + thisSize + " bytes"); + } catch (IOException e) { + logger.debug("Failed to delete " + path + e.getMessage()); + } + } + } + } + + static void cleanDatasetCache(Path datasetDir) { + for (File dsFile : datasetDir.toFile().listFiles()) { + Path path = dsFile.toPath(); + try { + long thisSize = Files.size(path); + Files.delete(path); + logger.debug("Deleted " + path + " to reclaim " + thisSize + " bytes"); + } catch (IOException e) { + logger.debug("Failed to delete " + path + " " + e.getClass() + " " + e.getMessage()); + } + } + } + + + public static String getLocation(long dfid, String location, String key) + throws InsufficientPrivilegesException, InternalException { + if (location == null) { + throw new InternalException("location is null"); + } + if (key == null) { + return location; + } else { + return getLocationFromDigest(dfid, location, key); + } + } + + static String getLocationFromDigest(long id, String locationWithHash, String key) + throws InternalException, InsufficientPrivilegesException { + int i = locationWithHash.lastIndexOf(' '); + try { + String location = locationWithHash.substring(0, i); + String hash = locationWithHash.substring(i + 1); + if (!hash.equals(IcatSecurity.digest(id, location, key))) { + throw new InsufficientPrivilegesException( + "Location \"" + locationWithHash + "\" does not contain a valid hash."); + } + return location; + } catch (IndexOutOfBoundsException e) { + throw new InsufficientPrivilegesException("Location \"" + locationWithHash + "\" does not contain hash."); + } catch (NoSuchAlgorithmException e) { + throw new InternalException(e.getMessage()); + } + } +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/enums/CallType.java b/src/main/java/org/icatproject/ids/v3/enums/CallType.java new file mode 100644 index 00000000..c33bec1c --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/enums/CallType.java @@ -0,0 +1,5 @@ +package org.icatproject.ids.v3.enums; + +public enum CallType { + INFO, PREPARE, READ, WRITE, MIGRATE +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java new file mode 100644 index 00000000..83baffb5 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java @@ -0,0 +1,8 @@ +package org.icatproject.ids.v3.enums; + +/** + * This enum contains all defined types of requests to this server + */ +public enum RequestType { + GETDATA +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/enums/ValueContainerType.java b/src/main/java/org/icatproject/ids/v3/enums/ValueContainerType.java new file mode 100644 index 00000000..5aa1a252 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/enums/ValueContainerType.java @@ -0,0 +1,8 @@ +package org.icatproject.ids.v3.enums; + +/** + * This enum provides all possible values of a ValueContainer + */ +public enum ValueContainerType { + INVALID, INT, BOOL, STRING, REQUEST, RESPONSE +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java new file mode 100644 index 00000000..e45576ef --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java @@ -0,0 +1,316 @@ +package org.icatproject.ids.v3.handlers; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.HttpURLConnection; +import java.nio.file.Files; +import java.nio.file.NoSuchFileException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicLong; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.icatproject.IcatException_Exception; +import org.icatproject.ids.DataSelection; +import org.icatproject.ids.Prepared; +import org.icatproject.ids.DataSelection.Returns; +import org.icatproject.ids.LockManager; +import org.icatproject.ids.LockManager.Lock; +import org.icatproject.ids.LockManager.LockType; +import org.icatproject.ids.StorageUnit; +import org.icatproject.ids.exceptions.BadRequestException; +import org.icatproject.ids.exceptions.DataNotOnlineException; +import org.icatproject.ids.exceptions.IdsException; +import org.icatproject.ids.exceptions.InsufficientPrivilegesException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.exceptions.NotFoundException; +import org.icatproject.ids.plugin.AlreadyLockedException; +import org.icatproject.ids.v3.ServiceProvider; +import org.icatproject.ids.v3.enums.CallType; +import org.icatproject.ids.v3.helper.SO; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataSetInfo; +import org.icatproject.ids.v3.models.ValueContainer; + +import jakarta.json.Json; +import jakarta.json.stream.JsonGenerator; +import jakarta.ws.rs.core.Response; + +public class GetDataHandler extends RequestHandlerBase { + + private Pattern rangeRe; + private static AtomicLong atomicLong = new AtomicLong(); + + public GetDataHandler() { + super(new StorageUnit[] {StorageUnit.DATAFILE, StorageUnit.DATASET} ); + } + + public void init() { + this.rangeRe = Pattern.compile("bytes=(\\d+)-"); + } + + @Override + public ValueContainer handle(HashMap parameters) throws BadRequestException, NotFoundException, InternalException, InsufficientPrivilegesException, DataNotOnlineException { + Response response = null; + + long offset = 0; + var range = parameters.get("range"); + if ( range != null) { + var rangeValue = range.getString(); + + Matcher m = rangeRe.matcher(rangeValue); + if (!m.matches()) { + throw new BadRequestException("The range must match " + rangeRe.pattern()); + } + offset = Long.parseLong(m.group(1)); + logger.debug("Range " + rangeValue + " -> offset " + offset); + } + + var preparedId = parameters.get("preparedId"); + if (preparedId != null) { + response = this.getData(preparedId.getString(), + parameters.getOrDefault("outname", ValueContainer.getInvalid()).getString(), + offset, + parameters.getOrDefault("request", ValueContainer.getInvalid()).getRequest().getRemoteAddr()); + } else { + response = this.getData(parameters.getOrDefault("sessionId", ValueContainer.getInvalid()).getString(), + parameters.getOrDefault("investigationIds", ValueContainer.getInvalid()).getString(), + parameters.getOrDefault("datasetIds", ValueContainer.getInvalid()).getString(), + parameters.getOrDefault("datafileIds", ValueContainer.getInvalid()).getString(), + parameters.getOrDefault("compress", ValueContainer.getInvalid()).getBool(), + parameters.getOrDefault("zip", ValueContainer.getInvalid()).getBool(), + parameters.getOrDefault("outname", ValueContainer.getInvalid()).getString(), + offset, + parameters.getOrDefault("request", ValueContainer.getInvalid()).getRequest().getRemoteAddr()); + } + + return new ValueContainer(response); + } + + + + private Response getData(String preparedId, String outname, final long offset, String ip) throws BadRequestException, + NotFoundException, InternalException, InsufficientPrivilegesException, DataNotOnlineException { + + long time = System.currentTimeMillis(); + + // Log and validate + logger.info("New webservice request: getData preparedId = '" + preparedId + "' outname = '" + outname + + "' offset = " + offset); + + validateUUID("preparedId", preparedId); + + // Do it + Prepared prepared; + try (InputStream stream = Files.newInputStream(preparedDir.resolve(preparedId))) { + prepared = unpack(stream); + } catch (NoSuchFileException e) { + throw new NotFoundException("The preparedId " + preparedId + " is not known"); + } catch (IOException e) { + throw new InternalException(e.getClass() + " " + e.getMessage()); + } + + final boolean zip = prepared.zip; + final boolean compress = prepared.compress; + final Set dfInfos = prepared.dfInfos; + final Map dsInfos = prepared.dsInfos; + Set emptyDatasets = prepared.emptyDatasets; + + Lock lock = null; + try { + var serviceProvider = ServiceProvider.getInstance(); + lock = serviceProvider.getLockManager().lock(dsInfos.values(), LockType.SHARED); + + if (twoLevel) { + checkOnline(dsInfos.values(), emptyDatasets, dfInfos); + } + checkDatafilesPresent(dfInfos); + + /* Construct the name to include in the headers */ + String name; + if (outname == null) { + if (zip) { + name = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss").format(new Date()) + ".zip"; + } else { + name = dfInfos.iterator().next().getDfName(); + } + } else { + if (zip) { + String ext = outname.substring(outname.lastIndexOf(".") + 1, outname.length()); + if ("zip".equals(ext)) { + name = outname; + } else { + name = outname + ".zip"; + } + } else { + name = outname; + } + } + + Long transferId = null; + if (serviceProvider.getPropertyHandler().getLogSet().contains(CallType.READ)) { + transferId = atomicLong.getAndIncrement(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { + gen.write("transferId", transferId); + gen.write("preparedId", preparedId); + gen.writeEnd(); + } + serviceProvider.getTransmitter().processMessage("getDataStart", ip, baos.toString(), time); + } + + return Response.status(offset == 0 ? HttpURLConnection.HTTP_OK : HttpURLConnection.HTTP_PARTIAL) + .entity(new SO(dsInfos, dfInfos, offset, zip, compress, lock, transferId, ip, time, serviceProvider)) + .header("Content-Disposition", "attachment; filename=\"" + name + "\"").header("Accept-Ranges", "bytes") + .build(); + } catch (AlreadyLockedException e) { + logger.debug("Could not acquire lock, getData failed"); + throw new DataNotOnlineException("Data is busy"); + } catch (IOException e) { + if (lock != null) { + lock.release(); + } + logger.error("I/O error " + e.getMessage()); + throw new InternalException(e.getClass() + " " + e.getMessage()); + } catch (IdsException e) { + lock.release(); + throw e; + } + } + + + private Response getData(String sessionId, String investigationIds, String datasetIds, String datafileIds, + final boolean compress, boolean zip, String outname, final long offset, String ip) + throws BadRequestException, InternalException, InsufficientPrivilegesException, NotFoundException, + DataNotOnlineException { + + long start = System.currentTimeMillis(); + + // Log and validate + logger.info(String.format("New webservice request: getData investigationIds=%s, datasetIds=%s, datafileIds=%s", + investigationIds, datasetIds, datafileIds)); + + validateUUID("sessionId", sessionId); + + var serviceProvider = ServiceProvider.getInstance(); + + // TODO: change constructor of DataSelection and receive PropertyHandler and IcatReader from ServiceProvider within + final DataSelection dataSelection = new DataSelection(serviceProvider.getPropertyHandler(), serviceProvider.getIcatReader(), sessionId, + investigationIds, datasetIds, datafileIds, Returns.DATASETS_AND_DATAFILES); + + // Do it + Map dsInfos = dataSelection.getDsInfo(); + Set dfInfos = dataSelection.getDfInfo(); + + Lock lock = null; + try { + lock = serviceProvider.getLockManager().lock(dsInfos.values(), LockType.SHARED); + + if (twoLevel) { + checkOnline(dsInfos.values(), dataSelection.getEmptyDatasets(), dfInfos); + } + checkDatafilesPresent(dfInfos); + + final boolean finalZip = zip ? true : dataSelection.mustZip(); + + /* Construct the name to include in the headers */ + String name; + if (outname == null) { + if (finalZip) { + name = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss").format(new Date()) + ".zip"; + } else { + name = dataSelection.getDfInfo().iterator().next().getDfName(); + } + } else { + if (finalZip) { + String ext = outname.substring(outname.lastIndexOf(".") + 1, outname.length()); + if ("zip".equals(ext)) { + name = outname; + } else { + name = outname + ".zip"; + } + } else { + name = outname; + } + } + + Long transferId = null; + if (serviceProvider.getPropertyHandler().getLogSet().contains(CallType.READ)) { + try { + transferId = atomicLong.getAndIncrement(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { + gen.write("transferId", transferId); + gen.write("userName", serviceProvider.getIcat().getUserName(sessionId)); + addIds(gen, investigationIds, datasetIds, datafileIds); + gen.writeEnd(); + } + serviceProvider.getTransmitter().processMessage("getDataStart", ip, baos.toString(), start); + } catch (IcatException_Exception e) { + logger.error("Failed to prepare jms message " + e.getClass() + " " + e.getMessage()); + } + } + + return Response.status(offset == 0 ? HttpURLConnection.HTTP_OK : HttpURLConnection.HTTP_PARTIAL) + .entity(new SO(dataSelection.getDsInfo(), dataSelection.getDfInfo(), offset, finalZip, compress, lock, + transferId, ip, start, serviceProvider)) + .header("Content-Disposition", "attachment; filename=\"" + name + "\"").header("Accept-Ranges", "bytes") + .build(); + } catch (AlreadyLockedException e) { + logger.debug("Could not acquire lock, getData failed"); + throw new DataNotOnlineException("Data is busy"); + } catch (IOException e) { + if (lock != null) { + lock.release(); + } + logger.error("I/O error " + e.getMessage()); + throw new InternalException(e.getClass() + " " + e.getMessage()); + } catch (IdsException e) { + lock.release(); + throw e; + } + } + + private void checkDatafilesPresent(Set dfInfos) + throws NotFoundException, InternalException { + + var serviceProvider = ServiceProvider.getInstance(); + /* Check that datafiles have not been deleted before locking */ + int n = 0; + StringBuffer sb = new StringBuffer("SELECT COUNT(df) from Datafile df WHERE (df.id in ("); + for (DataFileInfo dfInfo : dfInfos) { + if (n != 0) { + sb.append(','); + } + sb.append(dfInfo.getId()); + if (++n == serviceProvider.getPropertyHandler().getMaxIdsInQuery()) { + try { + if (((Long) serviceProvider.getIcatReader().search(sb.append("))").toString()).get(0)).intValue() != n) { + throw new NotFoundException("One of the data files requested has been deleted"); + } + n = 0; + sb = new StringBuffer("SELECT COUNT(df) from Datafile df WHERE (df.id in ("); + } catch (IcatException_Exception e) { + throw new InternalException(e.getFaultInfo().getType() + " " + e.getMessage()); + } + } + } + if (n != 0) { + try { + if (((Long) serviceProvider.getIcatReader().search(sb.append("))").toString()).get(0)).intValue() != n) { + throw new NotFoundException("One of the datafiles requested has been deleted"); + } + } catch (IcatException_Exception e) { + throw new InternalException(e.getFaultInfo().getType() + " " + e.getMessage()); + } + } + + } + +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/handlers/RequestHandlerBase.java b/src/main/java/org/icatproject/ids/v3/handlers/RequestHandlerBase.java new file mode 100644 index 00000000..497fd990 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/handlers/RequestHandlerBase.java @@ -0,0 +1,207 @@ +package org.icatproject.ids.v3.handlers; + +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.SortedMap; +import java.util.SortedSet; +import java.util.TreeMap; +import java.util.TreeSet; +import java.util.regex.Pattern; + +import org.icatproject.ids.v3.ServiceProvider; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataSetInfo; +import org.icatproject.ids.v3.models.ValueContainer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import jakarta.json.Json; +import jakarta.json.JsonNumber; +import jakarta.json.JsonObject; +import jakarta.json.JsonReader; +import jakarta.json.JsonValue; +import jakarta.json.stream.JsonGenerator; + +import org.icatproject.ids.DataSelection; +import org.icatproject.ids.DeferredOp; +import org.icatproject.ids.Prepared; +import org.icatproject.ids.StorageUnit; +import org.icatproject.ids.exceptions.BadRequestException; +import org.icatproject.ids.exceptions.DataNotOnlineException; +import org.icatproject.ids.exceptions.InsufficientPrivilegesException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.exceptions.NotFoundException; + +public abstract class RequestHandlerBase { + + private List supportedStorageUnits; + protected final static Logger logger = LoggerFactory.getLogger(RequestHandlerBase.class); + protected Path preparedDir; + protected boolean twoLevel; + protected StorageUnit storageUnit; + + /** + * matches standard UUID format of 8-4-4-4-12 hexadecimal digits + */ + public static final Pattern uuidRegExp = Pattern + .compile("^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$"); + + protected RequestHandlerBase(StorageUnit[] supportedStorageUnitsArray ) { + this.supportedStorageUnits = Arrays.asList(supportedStorageUnitsArray); + } + + protected RequestHandlerBase(StorageUnit supportedStorageUnit) { + this(new StorageUnit[]{supportedStorageUnit}); + } + + public boolean supportsStorageUnit(StorageUnit neededStorageUnit) { + return this.supportedStorageUnits.contains(neededStorageUnit); + } + + public void init() throws InternalException { + + var serviceProvider = ServiceProvider.getInstance(); + var propertyHandler = serviceProvider.getPropertyHandler(); + this.preparedDir = propertyHandler.getCacheDir().resolve("prepared"); + + this.storageUnit = propertyHandler.getStorageUnit(); + + var archiveStorage = propertyHandler.getArchiveStorage(); + this.twoLevel = archiveStorage != null; + } + + public abstract ValueContainer handle(HashMap parameters) throws BadRequestException, InternalException, InsufficientPrivilegesException, NotFoundException, DataNotOnlineException; + + + protected static void validateUUID(String thing, String id) throws BadRequestException { + if (id == null || !uuidRegExp.matcher(id).matches()) + throw new BadRequestException("The " + thing + " parameter '" + id + "' is not a valid UUID"); + } + + protected static Prepared unpack(InputStream stream) throws InternalException { + Prepared prepared = new Prepared(); + JsonObject pd; + try (JsonReader jsonReader = Json.createReader(stream)) { + pd = jsonReader.readObject(); + } + prepared.zip = pd.getBoolean("zip"); + prepared.compress = pd.getBoolean("compress"); + SortedMap dsInfos = new TreeMap<>(); + SortedSet dfInfos = new TreeSet<>(); + Set emptyDatasets = new HashSet<>(); + + for (JsonValue itemV : pd.getJsonArray("dfInfo")) { + JsonObject item = (JsonObject) itemV; + String dfLocation = item.isNull("dfLocation") ? null : item.getString("dfLocation"); + dfInfos.add(new DataFileInfo(item.getJsonNumber("dfId").longValueExact(), item.getString("dfName"), + dfLocation, item.getString("createId"), item.getString("modId"), + item.getJsonNumber("dsId").longValueExact())); + + } + prepared.dfInfos = dfInfos; + + for (JsonValue itemV : pd.getJsonArray("dsInfo")) { + JsonObject item = (JsonObject) itemV; + long dsId = item.getJsonNumber("dsId").longValueExact(); + String dsLocation = item.isNull("dsLocation") ? null : item.getString("dsLocation"); + dsInfos.put(dsId, new DataSetInfo(dsId, item.getString("dsName"), dsLocation, + item.getJsonNumber("invId").longValueExact(), item.getString("invName"), item.getString("visitId"), + item.getJsonNumber("facilityId").longValueExact(), item.getString("facilityName"))); + } + prepared.dsInfos = dsInfos; + + for (JsonValue itemV : pd.getJsonArray("emptyDs")) { + emptyDatasets.add(((JsonNumber) itemV).longValueExact()); + } + prepared.emptyDatasets = emptyDatasets; + + return prepared; + } + + //maybo should be moved somewhere else? To DataSelection? + protected void checkOnline(Collection dsInfos, Set emptyDatasets, + Set dfInfos) + throws InternalException, DataNotOnlineException { + if (storageUnit == StorageUnit.DATASET) { + boolean maybeOffline = false; + for (DataSetInfo dsInfo : dsInfos) { + if (restoreIfOffline(dsInfo, emptyDatasets)) { + maybeOffline = true; + } + } + if (maybeOffline) { + throw new DataNotOnlineException( + "Before putting, getting or deleting a datafile, its dataset has to be restored, restoration requested automatically"); + } + } else if (storageUnit == StorageUnit.DATAFILE) { + boolean maybeOffline = false; + for (DataFileInfo dfInfo : dfInfos) { + if (restoreIfOffline(dfInfo)) { + maybeOffline = true; + } + } + if (maybeOffline) { + throw new DataNotOnlineException( + "Before getting a datafile, it must be restored, restoration requested automatically"); + } + } + } + + private boolean restoreIfOffline(DataFileInfo dfInfo) throws InternalException { + boolean maybeOffline = false; + var serviceProvider = ServiceProvider.getInstance(); + if (serviceProvider.getFsm().getDfMaybeOffline().contains(dfInfo)) { + maybeOffline = true; + } else if (!serviceProvider.getMainStorage().exists(dfInfo.getDfLocation())) { + serviceProvider.getFsm().queue(dfInfo, DeferredOp.RESTORE); + maybeOffline = true; + } + return maybeOffline; + } + + private boolean restoreIfOffline(DataSetInfo dsInfo, Set emptyDatasets) throws InternalException { + boolean maybeOffline = false; + var serviceProvider = ServiceProvider.getInstance(); + if (serviceProvider.getFsm().getDsMaybeOffline().contains(dsInfo)) { + maybeOffline = true; + } else if (!emptyDatasets.contains(dsInfo.getId()) && !serviceProvider.getMainStorage().exists(dsInfo)) { + serviceProvider.getFsm().queue(dsInfo, DeferredOp.RESTORE); + maybeOffline = true; + } + return maybeOffline; + } + + protected void addIds(JsonGenerator gen, String investigationIds, String datasetIds, String datafileIds) + throws BadRequestException { + if (investigationIds != null) { + gen.writeStartArray("investigationIds"); + for (long invid : DataSelection.getValidIds("investigationIds", investigationIds)) { + gen.write(invid); + } + gen.writeEnd(); + } + if (datasetIds != null) { + gen.writeStartArray("datasetIds"); + for (long invid : DataSelection.getValidIds("datasetIds", datasetIds)) { + gen.write(invid); + } + gen.writeEnd(); + } + if (datafileIds != null) { + gen.writeStartArray("datafileIds"); + for (long invid : DataSelection.getValidIds("datafileIds", datafileIds)) { + gen.write(invid); + } + gen.writeEnd(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/helper/SO.java b/src/main/java/org/icatproject/ids/v3/helper/SO.java new file mode 100644 index 00000000..2badb253 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/helper/SO.java @@ -0,0 +1,132 @@ +package org.icatproject.ids.v3.helper; + +import java.io.BufferedOutputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Map; +import java.util.Set; +import java.util.zip.ZipEntry; +import java.util.zip.ZipException; +import java.util.zip.ZipOutputStream; + +import org.icatproject.ids.RangeOutputStream; +import org.icatproject.ids.LockManager.Lock; +import org.icatproject.ids.plugin.DsInfo; +import org.icatproject.ids.v3.ServiceProvider; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataSetInfo; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import jakarta.json.Json; +import jakarta.json.stream.JsonGenerator; +import jakarta.ws.rs.core.StreamingOutput; + +public class SO implements StreamingOutput { + + private long offset; + private boolean zip; + private Map dsInfos; + private Lock lock; + private boolean compress; + private Set dfInfos; + private String ip; + private long start; + private Long transferId; + private ServiceProvider serviceProvider; + + private static final int BUFSIZ = 2048; + private final static Logger logger = LoggerFactory.getLogger(SO.class); + + public SO(Map dsInfos, Set dfInfos, long offset, boolean zip, boolean compress, + Lock lock, Long transferId, String ip, long start, ServiceProvider serviceProvider) { + this.offset = offset; + this.zip = zip; + this.dsInfos = dsInfos; + this.dfInfos = dfInfos; + this.lock = lock; + this.compress = compress; + this.transferId = transferId; + this.ip = ip; + this.start = start; + this.serviceProvider = serviceProvider; + } + + @Override + public void write(OutputStream output) throws IOException { + + Object transfer = "??"; + try { + if (offset != 0) { // Wrap the stream if needed + output = new RangeOutputStream(output, offset, null); + } + byte[] bytes = new byte[BUFSIZ]; + if (zip) { + ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(output)); + if (!compress) { + zos.setLevel(0); // Otherwise use default compression + } + + for (DataFileInfo dfInfo : dfInfos) { + logger.debug("Adding " + dfInfo + " to zip"); + transfer = dfInfo; + DsInfo dsInfo = dsInfos.get(dfInfo.getDsId()); + String entryName = this.serviceProvider.getPropertyHandler().getZipMapper().getFullEntryName(dsInfo, dfInfo); + InputStream stream = null; + try { + zos.putNextEntry(new ZipEntry(entryName)); + stream = this.serviceProvider.getMainStorage().get(dfInfo.getDfLocation(), dfInfo.getCreateId(), dfInfo.getModId()); + int length; + while ((length = stream.read(bytes)) >= 0) { + zos.write(bytes, 0, length); + } + } catch (ZipException e) { + logger.debug("Skipped duplicate"); + } + zos.closeEntry(); + if (stream != null) { + stream.close(); + } + } + zos.close(); + } else { + DataFileInfo dfInfo = dfInfos.iterator().next(); + transfer = dfInfo; + InputStream stream = this.serviceProvider.getMainStorage().get(dfInfo.getDfLocation(), dfInfo.getCreateId(), + dfInfo.getModId()); + int length; + while ((length = stream.read(bytes)) >= 0) { + output.write(bytes, 0, length); + } + output.close(); + stream.close(); + } + + if (transferId != null) { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { + gen.write("transferId", transferId); + gen.writeEnd(); + } + this.serviceProvider.getTransmitter().processMessage("getData", ip, baos.toString(), start); + } + + } catch (IOException e) { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { + gen.write("transferId", transferId); + gen.write("exceptionClass", e.getClass().toString()); + gen.write("exceptionMessage", e.getMessage()); + gen.writeEnd(); + } + this.serviceProvider.getTransmitter().processMessage("getData", ip, baos.toString(), start); + logger.error("Failed to stream " + transfer + " due to " + e.getMessage()); + throw e; + } finally { + lock.release(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/model/DataFileInfo.java b/src/main/java/org/icatproject/ids/v3/models/DataFileInfo.java similarity index 97% rename from src/main/java/org/icatproject/ids/v3/model/DataFileInfo.java rename to src/main/java/org/icatproject/ids/v3/models/DataFileInfo.java index 8f3f722c..924d220e 100644 --- a/src/main/java/org/icatproject/ids/v3/model/DataFileInfo.java +++ b/src/main/java/org/icatproject/ids/v3/models/DataFileInfo.java @@ -1,4 +1,4 @@ -package org.icatproject.ids.v3.model; +package org.icatproject.ids.v3.models; import org.icatproject.ids.plugin.DfInfo; /** diff --git a/src/main/java/org/icatproject/ids/v3/model/DataInfoBase.java b/src/main/java/org/icatproject/ids/v3/models/DataInfoBase.java similarity index 96% rename from src/main/java/org/icatproject/ids/v3/model/DataInfoBase.java rename to src/main/java/org/icatproject/ids/v3/models/DataInfoBase.java index f75dfbd5..7037061a 100644 --- a/src/main/java/org/icatproject/ids/v3/model/DataInfoBase.java +++ b/src/main/java/org/icatproject/ids/v3/models/DataInfoBase.java @@ -1,4 +1,4 @@ -package org.icatproject.ids.v3.model; +package org.icatproject.ids.v3.models; /** diff --git a/src/main/java/org/icatproject/ids/v3/model/DataSetInfo.java b/src/main/java/org/icatproject/ids/v3/models/DataSetInfo.java similarity index 98% rename from src/main/java/org/icatproject/ids/v3/model/DataSetInfo.java rename to src/main/java/org/icatproject/ids/v3/models/DataSetInfo.java index cb93d8cc..e89ddd57 100644 --- a/src/main/java/org/icatproject/ids/v3/model/DataSetInfo.java +++ b/src/main/java/org/icatproject/ids/v3/models/DataSetInfo.java @@ -1,4 +1,4 @@ -package org.icatproject.ids.v3.model; +package org.icatproject.ids.v3.models; import org.icatproject.Dataset; import org.icatproject.Facility; diff --git a/src/main/java/org/icatproject/ids/v3/models/ValueContainer.java b/src/main/java/org/icatproject/ids/v3/models/ValueContainer.java new file mode 100644 index 00000000..fa0254e1 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/models/ValueContainer.java @@ -0,0 +1,140 @@ +package org.icatproject.ids.v3.models; + +import org.icatproject.ids.v3.enums.ValueContainerType; +import org.icatproject.ids.exceptions.InternalException; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.ws.rs.core.Response; + +/** + * This class provides a container vor carrying different types of values + */ +public class ValueContainer { + + private Object value; + private ValueContainerType type; + + private ValueContainer(Object value, ValueContainerType type) { + this.value = value; + this.type = type; + } + + /** + * checks if the type of the contained value is the same as the typeToCheck + * @param typeToCheck the type to be checked if the contained value is of + * @throws InternalException if the types don't match an exception is thrown + */ + private void checkType(ValueContainerType typeToCheck) throws InternalException { + if(this.type != typeToCheck) throw new InternalException("This ValueContainer ist not of the needed type " + typeToCheck + " its type is " + this.type + "."); + } + + public static ValueContainer getInvalid() { + return new ValueContainer(); + } + + private ValueContainer() { + this(null, ValueContainerType.INVALID); + } + + /** + * Creates a ValueContainer of type int + * @param value the value contained by the container + */ + public ValueContainer(int value) { + this(value, ValueContainerType.INT); + } + + /** + * Creates a ValueContainer of type String + * @param value the value contained by the container + */ + public ValueContainer(String value) { + this(value, ValueContainerType.STRING); + } + + /** + * Creates a ValueContainer of type boolean + * @param value the value contained by the container + */ + public ValueContainer(boolean value) { + this(value, ValueContainerType.BOOL); + } + + /** + * Creates a ValueContainer of type Request + * @param value the value contained by the container + */ + public ValueContainer(HttpServletRequest value) { + this(value, ValueContainerType.REQUEST); + } + + /** + * Creates a ValueContainer of type Response + * @param value the value contained by the container + */ + public ValueContainer(Response value) { + this(value, ValueContainerType.RESPONSE); + } + + /** + * Informs about the type of the contained value + * @return + */ + public ValueContainerType getType() { + return this.type; + } + + /** + * Tries to return the value of the type int. + * @return + * @throws InternalException if the container has another type an exception will be thrown + */ + public int getInt() throws InternalException { + this.checkType(ValueContainerType.INT); + return (int) this.value; + } + + /** + * Tries to return the value of the type boolean. + * @return + * @throws InternalException if the container has another type an exception will be thrown + */ + public boolean getBool() throws InternalException { + this.checkType(ValueContainerType.BOOL); + return (boolean) this.value; + } + + /** + * Tries to return the value of the type String. + * @return + * @throws InternalException if the container has another type an exception will be thrown + */ + public String getString() throws InternalException { + this.checkType(ValueContainerType.STRING); + return (String) this.value; + } + + /** + * Tries to return the value of the type Request. + * @return + * @throws InternalException if the container has another type an exception will be thrown + */ + public HttpServletRequest getRequest() throws InternalException { + this.checkType(ValueContainerType.REQUEST); + return (HttpServletRequest) this.value; + } + + /** + * Tries to return the value of the type Response. + * @return + * @throws InternalException if the container has another type an exception will be thrown + */ + public Response getResponse() throws InternalException { + this.checkType(ValueContainerType.RESPONSE); + return (Response) this.value; + } + + + + +} \ No newline at end of file diff --git a/src/test/java/org/icatproject/ids/PreparePackingTest.java b/src/test/java/org/icatproject/ids/PreparePackingTest.java index e35929bc..e92a6bf8 100644 --- a/src/test/java/org/icatproject/ids/PreparePackingTest.java +++ b/src/test/java/org/icatproject/ids/PreparePackingTest.java @@ -17,8 +17,8 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import org.icatproject.ids.v3.model.DataFileInfo; -import org.icatproject.ids.v3.model.DataSetInfo; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataSetInfo; import org.junit.Test; public class PreparePackingTest { From 8d97e271bfe45478f91dde4aa075c90a1bdea4d2 Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Tue, 13 Feb 2024 09:54:26 +0100 Subject: [PATCH 04/92] small changes --- .../java/org/icatproject/ids/IdsService.java | 30 +++++++++++++++---- .../ids/v3/handlers/GetDataHandler.java | 6 ++-- 2 files changed, 27 insertions(+), 9 deletions(-) diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index 93c53cbd..5bfa9109 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -66,12 +66,9 @@ public class IdsService { @EJB private IcatReader reader; - private RequestHandlerServiceBase requestHandler; + private RequestHandlerServiceBase requestHandler = null; - - public IdsService() { - this.requestHandler = new RequestHandlerServiceBase(); - } + private Pattern rangeRe; /** * Archive data specified by the investigationIds, datasetIds and @@ -207,6 +204,27 @@ public Response getData(@Context HttpServletRequest request, @QueryParam("prepar parameters.put( "Range", new ValueContainer(range) ); return this.requestHandler.handle(RequestType.GETDATA, parameters).getResponse(); + + // Response response = null; + + // long offset = 0; + // if (range != null) { + + // Matcher m = rangeRe.matcher(range); + // if (!m.matches()) { + // throw new BadRequestException("The range must match " + rangeRe.pattern()); + // } + // offset = Long.parseLong(m.group(1)); + // logger.debug("Range " + range + " -> offset " + offset); + // } + + // if (preparedId != null) { + // response = idsBean.getData(preparedId, outname, offset, request.getRemoteAddr()); + // } else { + // response = idsBean.getData(sessionId, investigationIds, datasetIds, datafileIds, compress, zip, outname, + // offset, request.getRemoteAddr()); + // } + // return response; } /** @@ -351,6 +369,8 @@ public String getStatus(@Context HttpServletRequest request, @QueryParam("prepar @PostConstruct private void init() { logger.info("creating IdsService"); + this.rangeRe = Pattern.compile("bytes=(\\d+)-"); + this.requestHandler = new RequestHandlerServiceBase(); this.requestHandler.init(this.transmitter, this.lockManager, this.fsm, this.reader); logger.info("created IdsService"); } diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java index e45576ef..9eafbbe8 100644 --- a/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java @@ -7,7 +7,6 @@ import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.text.SimpleDateFormat; -import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.Map; @@ -20,7 +19,6 @@ import org.icatproject.ids.DataSelection; import org.icatproject.ids.Prepared; import org.icatproject.ids.DataSelection.Returns; -import org.icatproject.ids.LockManager; import org.icatproject.ids.LockManager.Lock; import org.icatproject.ids.LockManager.LockType; import org.icatproject.ids.StorageUnit; @@ -48,7 +46,7 @@ public class GetDataHandler extends RequestHandlerBase { private static AtomicLong atomicLong = new AtomicLong(); public GetDataHandler() { - super(new StorageUnit[] {StorageUnit.DATAFILE, StorageUnit.DATASET} ); + super(new StorageUnit[] {StorageUnit.DATAFILE, StorageUnit.DATASET, null} ); } public void init() { @@ -73,7 +71,7 @@ public ValueContainer handle(HashMap parameters) throws } var preparedId = parameters.get("preparedId"); - if (preparedId != null) { + if (preparedId.getString() != null) { response = this.getData(preparedId.getString(), parameters.getOrDefault("outname", ValueContainer.getInvalid()).getString(), offset, From 9d75e1dd98063e6bc07f4b21473fd2f0c592b9d4 Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Wed, 14 Feb 2024 10:28:28 +0100 Subject: [PATCH 05/92] Fix for GetDataHandler --- .../org/icatproject/ids/v3/RequestHandlerServiceBase.java | 2 ++ .../java/org/icatproject/ids/v3/handlers/GetDataHandler.java | 5 ++++- .../org/icatproject/ids/v3/handlers/RequestHandlerBase.java | 4 ++++ 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerServiceBase.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerServiceBase.java index 06e072ce..d38e61bc 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerServiceBase.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerServiceBase.java @@ -113,9 +113,11 @@ public void init(Transmitter transmitter, LockManager lockManager , FiniteStateM // logSet = propertyHandler.getLogSet(); + logger.info("Initializing " + this.handlers.size() + " RequestHandlers..."); for(RequestHandlerBase handler : this.handlers.values()) { handler.init(); } + logger.info("RequestHandlers initialized"); inited = true; diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java index 9eafbbe8..b476071a 100644 --- a/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java @@ -49,8 +49,11 @@ public GetDataHandler() { super(new StorageUnit[] {StorageUnit.DATAFILE, StorageUnit.DATASET, null} ); } - public void init() { + public void init() throws InternalException { + logger.info("Initializing GetDataHandler..."); + super.init(); this.rangeRe = Pattern.compile("bytes=(\\d+)-"); + logger.info("GetDataHandler initialized"); } @Override diff --git a/src/main/java/org/icatproject/ids/v3/handlers/RequestHandlerBase.java b/src/main/java/org/icatproject/ids/v3/handlers/RequestHandlerBase.java index 497fd990..05b48489 100644 --- a/src/main/java/org/icatproject/ids/v3/handlers/RequestHandlerBase.java +++ b/src/main/java/org/icatproject/ids/v3/handlers/RequestHandlerBase.java @@ -68,6 +68,8 @@ public boolean supportsStorageUnit(StorageUnit neededStorageUnit) { public void init() throws InternalException { + //logger.info("Initialize RequestHandlerBase..."); + var serviceProvider = ServiceProvider.getInstance(); var propertyHandler = serviceProvider.getPropertyHandler(); this.preparedDir = propertyHandler.getCacheDir().resolve("prepared"); @@ -76,6 +78,8 @@ public void init() throws InternalException { var archiveStorage = propertyHandler.getArchiveStorage(); this.twoLevel = archiveStorage != null; + + //logger.info("RequestHandlerBase initialized"); } public abstract ValueContainer handle(HashMap parameters) throws BadRequestException, InternalException, InsufficientPrivilegesException, NotFoundException, DataNotOnlineException; From ec3deeb161dd78314b9a922e247bf236fd0901ae Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Wed, 14 Feb 2024 13:03:42 +0100 Subject: [PATCH 06/92] Another fix fpr GetDataHandler --- .../java/org/icatproject/ids/IdsService.java | 23 +------------------ .../ids/v3/handlers/GetDataHandler.java | 2 +- 2 files changed, 2 insertions(+), 23 deletions(-) diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index 5bfa9109..ef17979e 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -201,30 +201,9 @@ public Response getData(@Context HttpServletRequest request, @QueryParam("prepar parameters.put( "compress", new ValueContainer(compress) ); parameters.put( "zip", new ValueContainer(zip) ); parameters.put( "outname", new ValueContainer(outname) ); - parameters.put( "Range", new ValueContainer(range) ); + parameters.put( "range", new ValueContainer(range) ); return this.requestHandler.handle(RequestType.GETDATA, parameters).getResponse(); - - // Response response = null; - - // long offset = 0; - // if (range != null) { - - // Matcher m = rangeRe.matcher(range); - // if (!m.matches()) { - // throw new BadRequestException("The range must match " + rangeRe.pattern()); - // } - // offset = Long.parseLong(m.group(1)); - // logger.debug("Range " + range + " -> offset " + offset); - // } - - // if (preparedId != null) { - // response = idsBean.getData(preparedId, outname, offset, request.getRemoteAddr()); - // } else { - // response = idsBean.getData(sessionId, investigationIds, datasetIds, datafileIds, compress, zip, outname, - // offset, request.getRemoteAddr()); - // } - // return response; } /** diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java index b476071a..bfab290c 100644 --- a/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java @@ -62,7 +62,7 @@ public ValueContainer handle(HashMap parameters) throws long offset = 0; var range = parameters.get("range"); - if ( range != null) { + if ( range != null && range.getString() != null) { var rangeValue = range.getString(); Matcher m = rangeRe.matcher(rangeValue); From c229b128e0b5cf05ec18396ed4609a21dc18cbf2 Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Wed, 14 Feb 2024 16:55:45 +0100 Subject: [PATCH 07/92] Moving Methods to DataSelection and FileInfoBase classes --- .../org/icatproject/ids/DataSelection.java | 37 +++ .../java/org/icatproject/ids/IdsBean.java | 296 +----------------- .../java/org/icatproject/ids/IdsService.java | 6 - .../java/org/icatproject/ids/LockManager.java | 1 - .../ids/v3/RequestHandlerServiceBase.java | 3 - .../icatproject/ids/v3/ServiceProvider.java | 1 - .../ids/v3/handlers/GetDataHandler.java | 6 +- .../ids/v3/handlers/RequestHandlerBase.java | 57 ---- .../ids/v3/models/DataFileInfo.java | 15 + .../ids/v3/models/DataInfoBase.java | 4 - .../ids/v3/models/DataSetInfo.java | 17 + .../util/client/TestingClient.java | 2 - 12 files changed, 86 insertions(+), 359 deletions(-) diff --git a/src/main/java/org/icatproject/ids/DataSelection.java b/src/main/java/org/icatproject/ids/DataSelection.java index 246f7461..72d62e3b 100644 --- a/src/main/java/org/icatproject/ids/DataSelection.java +++ b/src/main/java/org/icatproject/ids/DataSelection.java @@ -24,9 +24,11 @@ import org.icatproject.icat.client.IcatException; import org.icatproject.icat.client.Session; import org.icatproject.ids.exceptions.BadRequestException; +import org.icatproject.ids.exceptions.DataNotOnlineException; import org.icatproject.ids.exceptions.InsufficientPrivilegesException; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.exceptions.NotFoundException; +import org.icatproject.ids.v3.ServiceProvider; import org.icatproject.ids.v3.models.DataFileInfo; import org.icatproject.ids.v3.models.DataSetInfo; @@ -92,6 +94,12 @@ public DataSelection(PropertyHandler propertyHandler, IcatReader icatReader, Str resolveDatasetIds(); } + public DataSelection(Map dsInfos, Set dfInfos, Set emptyDatasets) { + this.dfInfos = dfInfos; + this.emptyDatasets = emptyDatasets; + this.dsInfos = dsInfos; + } + /** * Checks to see if the investigation, dataset or datafile id list is a * valid comma separated list of longs. No spaces or leading 0's. Also @@ -316,4 +324,33 @@ public Set getEmptyDatasets() { return emptyDatasets; } + //maybo should be moved somewhere else? To DataSelection? + public void checkOnline() + throws InternalException, DataNotOnlineException { + StorageUnit storageUnit = ServiceProvider.getInstance().getPropertyHandler().getStorageUnit(); + if (storageUnit == StorageUnit.DATASET) { + boolean maybeOffline = false; + for (DataSetInfo dsInfo : dsInfos.values()) { + if (dsInfo.restoreIfOffline(emptyDatasets)) { + maybeOffline = true; + } + } + if (maybeOffline) { + throw new DataNotOnlineException( + "Before putting, getting or deleting a datafile, its dataset has to be restored, restoration requested automatically"); + } + } else if (storageUnit == StorageUnit.DATAFILE) { + boolean maybeOffline = false; + for (DataFileInfo dfInfo : dfInfos) { + if (dfInfo.restoreIfOffline()) { + maybeOffline = true; + } + } + if (maybeOffline) { + throw new DataNotOnlineException( + "Before getting a datafile, it must be restored, restoration requested automatically"); + } + } + } + } diff --git a/src/main/java/org/icatproject/ids/IdsBean.java b/src/main/java/org/icatproject/ids/IdsBean.java index 257ace33..95f5c7fd 100644 --- a/src/main/java/org/icatproject/ids/IdsBean.java +++ b/src/main/java/org/icatproject/ids/IdsBean.java @@ -17,6 +17,7 @@ import java.util.Collections; import java.util.Date; import java.util.GregorianCalendar; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -75,7 +76,6 @@ import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.plugin.AlreadyLockedException; import org.icatproject.ids.plugin.ArchiveStorageInterface; -import org.icatproject.ids.plugin.DfInfo; import org.icatproject.ids.plugin.DsInfo; import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.plugin.ZipMapperInterface; @@ -84,7 +84,6 @@ import org.icatproject.ids.v3.models.DataInfoBase; import org.icatproject.ids.v3.models.DataSetInfo; import org.icatproject.utils.IcatSecurity; -import org.icatproject.utils.ShellCommand; @Stateless public class IdsBean { @@ -103,7 +102,7 @@ public RunPrepDsCheck(Collection toCheck, Set emptyDatasets) public Void call() throws Exception { for (DataSetInfo dsInfo : toCheck) { fsm.checkFailure(dsInfo.getId()); - restoreIfOffline(dsInfo, emptyDatasets); + dsInfo.restoreIfOffline(emptyDatasets); } return null; } @@ -122,7 +121,7 @@ public RunPrepDfCheck(SortedSet toCheck) { public Void call() throws Exception { for (DataFileInfo dfInfo : toCheck) { fsm.checkFailure(dfInfo.getId()); - restoreIfOffline(dfInfo); + dfInfo.restoreIfOffline(); } return null; } @@ -140,7 +139,7 @@ public RestoreDfTask(Set dfInfos) { @Override public Void call() throws Exception { for (DataFileInfo dfInfo : dfInfos) { - restoreIfOffline(dfInfo); + dfInfo.restoreIfOffline(); } return null; } @@ -159,7 +158,7 @@ public RestoreDsTask(Collection dsInfos, Set emptyDs) { @Override public Void call() throws Exception { for (DataSetInfo dsInfo : dsInfos) { - restoreIfOffline(dsInfo, emptyDs); + dsInfo.restoreIfOffline(emptyDs); } return null; } @@ -452,8 +451,6 @@ public static void validateUUID(String thing, String id) throws BadRequestExcept throw new BadRequestException("The " + thing + " parameter '" + id + "' is not a valid UUID"); } - private static AtomicLong atomicLong = new AtomicLong(); - @EJB Transmitter transmitter; @@ -494,8 +491,6 @@ public static void validateUUID(String thing, String id) throws BadRequestExcept private ZipMapperInterface zipMapper; - private int maxIdsInQuery; - private boolean twoLevel; private Set logSet; @@ -581,68 +576,6 @@ public void archive(String sessionId, String investigationIds, String datasetIds } } - private void checkDatafilesPresent(Set dfInfos) - throws NotFoundException, InternalException { - /* Check that datafiles have not been deleted before locking */ - int n = 0; - StringBuffer sb = new StringBuffer("SELECT COUNT(df) from Datafile df WHERE (df.id in ("); - for (DataFileInfo dfInfo : dfInfos) { - if (n != 0) { - sb.append(','); - } - sb.append(dfInfo.getId()); - if (++n == maxIdsInQuery) { - try { - if (((Long) reader.search(sb.append("))").toString()).get(0)).intValue() != n) { - throw new NotFoundException("One of the data files requested has been deleted"); - } - n = 0; - sb = new StringBuffer("SELECT COUNT(df) from Datafile df WHERE (df.id in ("); - } catch (IcatException_Exception e) { - throw new InternalException(e.getFaultInfo().getType() + " " + e.getMessage()); - } - } - } - if (n != 0) { - try { - if (((Long) reader.search(sb.append("))").toString()).get(0)).intValue() != n) { - throw new NotFoundException("One of the datafiles requested has been deleted"); - } - } catch (IcatException_Exception e) { - throw new InternalException(e.getFaultInfo().getType() + " " + e.getMessage()); - } - } - - } - - private void checkOnline(Collection dsInfos, Set emptyDatasets, - Set dfInfos) - throws InternalException, DataNotOnlineException { - if (storageUnit == StorageUnit.DATASET) { - boolean maybeOffline = false; - for (DataSetInfo dsInfo : dsInfos) { - if (restoreIfOffline(dsInfo, emptyDatasets)) { - maybeOffline = true; - } - } - if (maybeOffline) { - throw new DataNotOnlineException( - "Before putting, getting or deleting a datafile, its dataset has to be restored, restoration requested automatically"); - } - } else if (storageUnit == StorageUnit.DATAFILE) { - boolean maybeOffline = false; - for (DataFileInfo dfInfo : dfInfos) { - if (restoreIfOffline(dfInfo)) { - maybeOffline = true; - } - } - if (maybeOffline) { - throw new DataNotOnlineException( - "Before getting a datafile, it must be restored, restoration requested automatically"); - } - } - } - public void delete(String sessionId, String investigationIds, String datasetIds, String datafileIds, String ip) throws NotImplementedException, BadRequestException, InsufficientPrivilegesException, InternalException, NotFoundException, DataNotOnlineException { @@ -663,11 +596,10 @@ public void delete(String sessionId, String investigationIds, String datasetIds, // Do it Collection dsInfos = dataSelection.getDsInfo().values(); - Set dfInfos = dataSelection.getDfInfo(); try (Lock lock = lockManager.lock(dsInfos, LockType.EXCLUSIVE)) { if (storageUnit == StorageUnit.DATASET) { - checkOnline(dsInfos, dataSelection.getEmptyDatasets(), dfInfos); + dataSelection.checkOnline(); } /* Now delete from ICAT */ @@ -748,183 +680,6 @@ public void delete(String sessionId, String investigationIds, String datasetIds, } } - public Response getData(String preparedId, String outname, final long offset, String ip) throws BadRequestException, - NotFoundException, InternalException, InsufficientPrivilegesException, DataNotOnlineException { - - long time = System.currentTimeMillis(); - - // Log and validate - logger.info("New webservice request: getData preparedId = '" + preparedId + "' outname = '" + outname - + "' offset = " + offset); - - validateUUID("preparedId", preparedId); - - // Do it - Prepared prepared; - try (InputStream stream = Files.newInputStream(preparedDir.resolve(preparedId))) { - prepared = unpack(stream); - } catch (NoSuchFileException e) { - throw new NotFoundException("The preparedId " + preparedId + " is not known"); - } catch (IOException e) { - throw new InternalException(e.getClass() + " " + e.getMessage()); - } - - final boolean zip = prepared.zip; - final boolean compress = prepared.compress; - final Set dfInfos = prepared.dfInfos; - final Map dsInfos = prepared.dsInfos; - Set emptyDatasets = prepared.emptyDatasets; - - Lock lock = null; - try { - lock = lockManager.lock(dsInfos.values(), LockType.SHARED); - - if (twoLevel) { - checkOnline(dsInfos.values(), emptyDatasets, dfInfos); - } - checkDatafilesPresent(dfInfos); - - /* Construct the name to include in the headers */ - String name; - if (outname == null) { - if (zip) { - name = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss").format(new Date()) + ".zip"; - } else { - name = dfInfos.iterator().next().getDfName(); - } - } else { - if (zip) { - String ext = outname.substring(outname.lastIndexOf(".") + 1, outname.length()); - if ("zip".equals(ext)) { - name = outname; - } else { - name = outname + ".zip"; - } - } else { - name = outname; - } - } - - Long transferId = null; - if (logSet.contains(CallType.READ)) { - transferId = atomicLong.getAndIncrement(); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { - gen.write("transferId", transferId); - gen.write("preparedId", preparedId); - gen.writeEnd(); - } - transmitter.processMessage("getDataStart", ip, baos.toString(), time); - } - - return Response.status(offset == 0 ? HttpURLConnection.HTTP_OK : HttpURLConnection.HTTP_PARTIAL) - .entity(new SO(dsInfos, dfInfos, offset, zip, compress, lock, transferId, ip, time)) - .header("Content-Disposition", "attachment; filename=\"" + name + "\"").header("Accept-Ranges", "bytes") - .build(); - } catch (AlreadyLockedException e) { - logger.debug("Could not acquire lock, getData failed"); - throw new DataNotOnlineException("Data is busy"); - } catch (IOException e) { - if (lock != null) { - lock.release(); - } - logger.error("I/O error " + e.getMessage()); - throw new InternalException(e.getClass() + " " + e.getMessage()); - } catch (IdsException e) { - lock.release(); - throw e; - } - } - - public Response getData(String sessionId, String investigationIds, String datasetIds, String datafileIds, - final boolean compress, boolean zip, String outname, final long offset, String ip) - throws BadRequestException, InternalException, InsufficientPrivilegesException, NotFoundException, - DataNotOnlineException { - - long start = System.currentTimeMillis(); - - // Log and validate - logger.info(String.format("New webservice request: getData investigationIds=%s, datasetIds=%s, datafileIds=%s", - investigationIds, datasetIds, datafileIds)); - - validateUUID("sessionId", sessionId); - - final DataSelection dataSelection = new DataSelection(propertyHandler, reader, sessionId, - investigationIds, datasetIds, datafileIds, Returns.DATASETS_AND_DATAFILES); - - // Do it - Map dsInfos = dataSelection.getDsInfo(); - Set dfInfos = dataSelection.getDfInfo(); - - Lock lock = null; - try { - lock = lockManager.lock(dsInfos.values(), LockType.SHARED); - - if (twoLevel) { - checkOnline(dsInfos.values(), dataSelection.getEmptyDatasets(), dfInfos); - } - checkDatafilesPresent(dfInfos); - - final boolean finalZip = zip ? true : dataSelection.mustZip(); - - /* Construct the name to include in the headers */ - String name; - if (outname == null) { - if (finalZip) { - name = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss").format(new Date()) + ".zip"; - } else { - name = dataSelection.getDfInfo().iterator().next().getDfName(); - } - } else { - if (finalZip) { - String ext = outname.substring(outname.lastIndexOf(".") + 1, outname.length()); - if ("zip".equals(ext)) { - name = outname; - } else { - name = outname + ".zip"; - } - } else { - name = outname; - } - } - - Long transferId = null; - if (logSet.contains(CallType.READ)) { - try { - transferId = atomicLong.getAndIncrement(); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { - gen.write("transferId", transferId); - gen.write("userName", icat.getUserName(sessionId)); - addIds(gen, investigationIds, datasetIds, datafileIds); - gen.writeEnd(); - } - transmitter.processMessage("getDataStart", ip, baos.toString(), start); - } catch (IcatException_Exception e) { - logger.error("Failed to prepare jms message " + e.getClass() + " " + e.getMessage()); - } - } - - return Response.status(offset == 0 ? HttpURLConnection.HTTP_OK : HttpURLConnection.HTTP_PARTIAL) - .entity(new SO(dataSelection.getDsInfo(), dataSelection.getDfInfo(), offset, finalZip, compress, lock, - transferId, ip, start)) - .header("Content-Disposition", "attachment; filename=\"" + name + "\"").header("Accept-Ranges", "bytes") - .build(); - } catch (AlreadyLockedException e) { - logger.debug("Could not acquire lock, getData failed"); - throw new DataNotOnlineException("Data is busy"); - } catch (IOException e) { - if (lock != null) { - lock.release(); - } - logger.error("I/O error " + e.getMessage()); - throw new InternalException(e.getClass() + " " + e.getMessage()); - } catch (IdsException e) { - lock.release(); - throw e; - } - } - public String getDatafileIds(String preparedId, String ip) throws BadRequestException, InternalException, NotFoundException { @@ -1461,8 +1216,6 @@ private void init() { } } - maxIdsInQuery = propertyHandler.getMaxIdsInQuery(); - threadPool = Executors.newCachedThreadPool(); logSet = propertyHandler.getLogSet(); @@ -1530,7 +1283,7 @@ public Boolean isPrepared(String preparedId, String ip) logger.debug("Will check online status of {} entries", toCheck.size()); for (DataSetInfo dsInfo : toCheck) { fsm.checkFailure(dsInfo.getId()); - if (restoreIfOffline(dsInfo, preparedJson.emptyDatasets)) { + if (dsInfo.restoreIfOffline(preparedJson.emptyDatasets)) { prepared = false; status.fromDsElement = dsInfo.getId(); toCheck = preparedJson.dsInfos.tailMap(status.fromDsElement).values(); @@ -1545,7 +1298,7 @@ public Boolean isPrepared(String preparedId, String ip) logger.debug("Will check finally online status of {} entries", toCheck.size()); for (DataSetInfo dsInfo : toCheck) { fsm.checkFailure(dsInfo.getId()); - if (restoreIfOffline(dsInfo, preparedJson.emptyDatasets)) { + if (dsInfo.restoreIfOffline(preparedJson.emptyDatasets)) { prepared = false; } } @@ -1556,7 +1309,7 @@ public Boolean isPrepared(String preparedId, String ip) logger.debug("Will check online status of {} entries", toCheck.size()); for (DataFileInfo dfInfo : toCheck) { fsm.checkFailure(dfInfo.getId()); - if (restoreIfOffline(dfInfo)) { + if (dfInfo.restoreIfOffline()) { prepared = false; status.fromDfElement = dfInfo; toCheck = preparedJson.dfInfos.tailSet(status.fromDfElement); @@ -1571,7 +1324,7 @@ public Boolean isPrepared(String preparedId, String ip) logger.debug("Will check finally online status of {} entries", toCheck.size()); for (DataFileInfo dfInfo : toCheck) { fsm.checkFailure(dfInfo.getId()); - if (restoreIfOffline(dfInfo)) { + if (dfInfo.restoreIfOffline()) { prepared = false; } } @@ -1776,9 +1529,10 @@ public Response put(InputStream body, String sessionId, String name, String data } throw new InternalException(type + " " + e.getMessage()); } - Set dsInfos = new HashSet<>(); - dsInfos.add(dsInfo); - checkOnline(dsInfos, emptyDatasets, dfInfos); + var dsInfos = new HashMap(); + dsInfos.put(dsInfo.getId(), dsInfo); + DataSelection dataSelection = new DataSelection(dsInfos, dfInfos, emptyDatasets); + dataSelection.checkOnline(); } CRC32 crc = new CRC32(); @@ -2106,28 +1860,6 @@ public void restore(String sessionId, String investigationIds, String datasetIds } } - private boolean restoreIfOffline(DataFileInfo dfInfo) throws InternalException { - boolean maybeOffline = false; - if (fsm.getDfMaybeOffline().contains(dfInfo)) { - maybeOffline = true; - } else if (!mainStorage.exists(dfInfo.getDfLocation())) { - fsm.queue(dfInfo, DeferredOp.RESTORE); - maybeOffline = true; - } - return maybeOffline; - } - - private boolean restoreIfOffline(DataSetInfo dsInfo, Set emptyDatasets) throws InternalException { - boolean maybeOffline = false; - if (fsm.getDsMaybeOffline().contains(dsInfo)) { - maybeOffline = true; - } else if (!emptyDatasets.contains(dsInfo.getId()) && !mainStorage.exists(dsInfo)) { - fsm.queue(dsInfo, DeferredOp.RESTORE); - maybeOffline = true; - } - return maybeOffline; - } - public void write(String sessionId, String investigationIds, String datasetIds, String datafileIds, String ip) throws NotImplementedException, BadRequestException, InsufficientPrivilegesException, InternalException, NotFoundException, DataNotOnlineException { diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index ef17979e..69f646bf 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -5,8 +5,6 @@ import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.HashMap; -import java.util.regex.Matcher; -import java.util.regex.Pattern; import jakarta.annotation.PostConstruct; import jakarta.annotation.PreDestroy; @@ -41,7 +39,6 @@ import org.icatproject.ids.exceptions.NotFoundException; import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.v3.RequestHandlerServiceBase; -import org.icatproject.ids.v3.ServiceProvider; import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.models.ValueContainer; @@ -68,8 +65,6 @@ public class IdsService { private RequestHandlerServiceBase requestHandler = null; - private Pattern rangeRe; - /** * Archive data specified by the investigationIds, datasetIds and * datafileIds specified along with a sessionId. If two level storage is not @@ -348,7 +343,6 @@ public String getStatus(@Context HttpServletRequest request, @QueryParam("prepar @PostConstruct private void init() { logger.info("creating IdsService"); - this.rangeRe = Pattern.compile("bytes=(\\d+)-"); this.requestHandler = new RequestHandlerServiceBase(); this.requestHandler.init(this.transmitter, this.lockManager, this.fsm, this.reader); logger.info("created IdsService"); diff --git a/src/main/java/org/icatproject/ids/LockManager.java b/src/main/java/org/icatproject/ids/LockManager.java index 972e6e98..ff168ad4 100644 --- a/src/main/java/org/icatproject/ids/LockManager.java +++ b/src/main/java/org/icatproject/ids/LockManager.java @@ -13,7 +13,6 @@ import org.slf4j.LoggerFactory; import org.icatproject.ids.plugin.AlreadyLockedException; -import org.icatproject.ids.plugin.DsInfo; import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.v3.models.DataSetInfo; diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerServiceBase.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerServiceBase.java index d38e61bc..1e071dd2 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerServiceBase.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerServiceBase.java @@ -3,9 +3,6 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.HashMap; -import java.util.concurrent.Executors; - -import javax.xml.datatype.DatatypeFactory; import org.icatproject.ids.FiniteStateMachine; import org.icatproject.ids.IcatReader; diff --git a/src/main/java/org/icatproject/ids/v3/ServiceProvider.java b/src/main/java/org/icatproject/ids/v3/ServiceProvider.java index 306e9db5..ecc56429 100644 --- a/src/main/java/org/icatproject/ids/v3/ServiceProvider.java +++ b/src/main/java/org/icatproject/ids/v3/ServiceProvider.java @@ -17,7 +17,6 @@ public class ServiceProvider { private FiniteStateMachine fsm; private LockManager lockManager; private IcatReader icatReader; - private ICAT icat; private ServiceProvider(Transmitter transmitter, FiniteStateMachine fsm, LockManager lockManager, IcatReader reader) { this.transmitter = transmitter; diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java index bfab290c..530ad212 100644 --- a/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java @@ -117,11 +117,11 @@ private Response getData(String preparedId, String outname, final long offset, S throw new InternalException(e.getClass() + " " + e.getMessage()); } + DataSelection dataSelection = new DataSelection(prepared.dsInfos, prepared.dfInfos, prepared.emptyDatasets); final boolean zip = prepared.zip; final boolean compress = prepared.compress; final Set dfInfos = prepared.dfInfos; final Map dsInfos = prepared.dsInfos; - Set emptyDatasets = prepared.emptyDatasets; Lock lock = null; try { @@ -129,7 +129,7 @@ private Response getData(String preparedId, String outname, final long offset, S lock = serviceProvider.getLockManager().lock(dsInfos.values(), LockType.SHARED); if (twoLevel) { - checkOnline(dsInfos.values(), emptyDatasets, dfInfos); + dataSelection.checkOnline(); } checkDatafilesPresent(dfInfos); @@ -214,7 +214,7 @@ private Response getData(String sessionId, String investigationIds, String datas lock = serviceProvider.getLockManager().lock(dsInfos.values(), LockType.SHARED); if (twoLevel) { - checkOnline(dsInfos.values(), dataSelection.getEmptyDatasets(), dfInfos); + dataSelection.checkOnline(); } checkDatafilesPresent(dfInfos); diff --git a/src/main/java/org/icatproject/ids/v3/handlers/RequestHandlerBase.java b/src/main/java/org/icatproject/ids/v3/handlers/RequestHandlerBase.java index 05b48489..8dcb88b1 100644 --- a/src/main/java/org/icatproject/ids/v3/handlers/RequestHandlerBase.java +++ b/src/main/java/org/icatproject/ids/v3/handlers/RequestHandlerBase.java @@ -1,11 +1,8 @@ package org.icatproject.ids.v3.handlers; import java.io.InputStream; -import java.nio.file.Files; import java.nio.file.Path; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -31,7 +28,6 @@ import jakarta.json.stream.JsonGenerator; import org.icatproject.ids.DataSelection; -import org.icatproject.ids.DeferredOp; import org.icatproject.ids.Prepared; import org.icatproject.ids.StorageUnit; import org.icatproject.ids.exceptions.BadRequestException; @@ -130,59 +126,6 @@ protected static Prepared unpack(InputStream stream) throws InternalException { return prepared; } - //maybo should be moved somewhere else? To DataSelection? - protected void checkOnline(Collection dsInfos, Set emptyDatasets, - Set dfInfos) - throws InternalException, DataNotOnlineException { - if (storageUnit == StorageUnit.DATASET) { - boolean maybeOffline = false; - for (DataSetInfo dsInfo : dsInfos) { - if (restoreIfOffline(dsInfo, emptyDatasets)) { - maybeOffline = true; - } - } - if (maybeOffline) { - throw new DataNotOnlineException( - "Before putting, getting or deleting a datafile, its dataset has to be restored, restoration requested automatically"); - } - } else if (storageUnit == StorageUnit.DATAFILE) { - boolean maybeOffline = false; - for (DataFileInfo dfInfo : dfInfos) { - if (restoreIfOffline(dfInfo)) { - maybeOffline = true; - } - } - if (maybeOffline) { - throw new DataNotOnlineException( - "Before getting a datafile, it must be restored, restoration requested automatically"); - } - } - } - - private boolean restoreIfOffline(DataFileInfo dfInfo) throws InternalException { - boolean maybeOffline = false; - var serviceProvider = ServiceProvider.getInstance(); - if (serviceProvider.getFsm().getDfMaybeOffline().contains(dfInfo)) { - maybeOffline = true; - } else if (!serviceProvider.getMainStorage().exists(dfInfo.getDfLocation())) { - serviceProvider.getFsm().queue(dfInfo, DeferredOp.RESTORE); - maybeOffline = true; - } - return maybeOffline; - } - - private boolean restoreIfOffline(DataSetInfo dsInfo, Set emptyDatasets) throws InternalException { - boolean maybeOffline = false; - var serviceProvider = ServiceProvider.getInstance(); - if (serviceProvider.getFsm().getDsMaybeOffline().contains(dsInfo)) { - maybeOffline = true; - } else if (!emptyDatasets.contains(dsInfo.getId()) && !serviceProvider.getMainStorage().exists(dsInfo)) { - serviceProvider.getFsm().queue(dsInfo, DeferredOp.RESTORE); - maybeOffline = true; - } - return maybeOffline; - } - protected void addIds(JsonGenerator gen, String investigationIds, String datasetIds, String datafileIds) throws BadRequestException { if (investigationIds != null) { diff --git a/src/main/java/org/icatproject/ids/v3/models/DataFileInfo.java b/src/main/java/org/icatproject/ids/v3/models/DataFileInfo.java index 924d220e..a5f47e88 100644 --- a/src/main/java/org/icatproject/ids/v3/models/DataFileInfo.java +++ b/src/main/java/org/icatproject/ids/v3/models/DataFileInfo.java @@ -1,5 +1,8 @@ package org.icatproject.ids.v3.models; +import org.icatproject.ids.DeferredOp; +import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.plugin.DfInfo; +import org.icatproject.ids.v3.ServiceProvider; /** * Contains information about a Datafile. Replaces DsInfo in v3 @@ -47,6 +50,18 @@ public int compareTo(DataFileInfo o) { return 0; } + public boolean restoreIfOffline() throws InternalException { + boolean maybeOffline = false; + var serviceProvider = ServiceProvider.getInstance(); + if (serviceProvider.getFsm().getDfMaybeOffline().contains(this)) { + maybeOffline = true; + } else if (!serviceProvider.getMainStorage().exists(this.getDfLocation())) { + serviceProvider.getFsm().queue(this, DeferredOp.RESTORE); + maybeOffline = true; + } + return maybeOffline; + } + // implementing DfInfo @Override public Long getDfId() { return this.getId(); } diff --git a/src/main/java/org/icatproject/ids/v3/models/DataInfoBase.java b/src/main/java/org/icatproject/ids/v3/models/DataInfoBase.java index 7037061a..c74e5505 100644 --- a/src/main/java/org/icatproject/ids/v3/models/DataInfoBase.java +++ b/src/main/java/org/icatproject/ids/v3/models/DataInfoBase.java @@ -1,6 +1,5 @@ package org.icatproject.ids.v3.models; - /** * A Base class for Data objct types. like Datasets or Datafiles */ @@ -16,7 +15,6 @@ protected DataInfoBase(long id, String name, String location){ this.location = location; } - @Override public abstract String toString(); public Long getId() { @@ -31,12 +29,10 @@ public String getLocation() { return location; } - @Override public int hashCode() { return (int) (this.id ^ (this.id >>> 32)); } - @Override public boolean equals(Object obj) { if (obj == this) { return true; diff --git a/src/main/java/org/icatproject/ids/v3/models/DataSetInfo.java b/src/main/java/org/icatproject/ids/v3/models/DataSetInfo.java index e89ddd57..84485957 100644 --- a/src/main/java/org/icatproject/ids/v3/models/DataSetInfo.java +++ b/src/main/java/org/icatproject/ids/v3/models/DataSetInfo.java @@ -1,10 +1,15 @@ package org.icatproject.ids.v3.models; +import java.util.Set; + import org.icatproject.Dataset; import org.icatproject.Facility; import org.icatproject.Investigation; +import org.icatproject.ids.DeferredOp; import org.icatproject.ids.exceptions.InsufficientPrivilegesException; +import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.plugin.DsInfo; +import org.icatproject.ids.v3.ServiceProvider; /** * Contains information about a Dataset. Replaces DsInfo in v3. @@ -58,6 +63,18 @@ public String toString() { + this.name + ")"; } + public boolean restoreIfOffline(Set emptyDatasets) throws InternalException { + boolean maybeOffline = false; + var serviceProvider = ServiceProvider.getInstance(); + if (serviceProvider.getFsm().getDsMaybeOffline().contains(this)) { + maybeOffline = true; + } else if (!emptyDatasets.contains(this.getId()) && !serviceProvider.getMainStorage().exists(this)) { + serviceProvider.getFsm().queue(this, DeferredOp.RESTORE); + maybeOffline = true; + } + return maybeOffline; + } + public Long getFacilityId() { return facilityId; } diff --git a/src/test/java/org/icatproject/ids/integration/util/client/TestingClient.java b/src/test/java/org/icatproject/ids/integration/util/client/TestingClient.java index bc6469a3..fa5d6d28 100644 --- a/src/test/java/org/icatproject/ids/integration/util/client/TestingClient.java +++ b/src/test/java/org/icatproject/ids/integration/util/client/TestingClient.java @@ -7,8 +7,6 @@ import java.net.URI; import java.net.URISyntaxException; import java.net.URL; -import java.nio.file.Path; -import java.nio.file.Paths; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; From 396e92b32ab320b776b9dc89b999b6b99c7876f1 Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Thu, 15 Feb 2024 14:19:26 +0100 Subject: [PATCH 08/92] New DataSelection depending on StorageUnit for v3 --- .../ids/v3/DataSelectionFactory.java | 344 ++++++++++++++++++ .../DataSelectionForSingleLevelStorage.java | 25 ++ .../DataSelectionForStorageUnitDatafile.java | 35 ++ .../DataSelectionForStorageUnitDataset.java | 35 ++ .../ids/v3/DataSelectionV3Base.java | 88 +++++ .../ids/v3/handlers/GetDataHandler.java | 16 +- 6 files changed, 536 insertions(+), 7 deletions(-) create mode 100644 src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java create mode 100644 src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java create mode 100644 src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java create mode 100644 src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java create mode 100644 src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java b/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java new file mode 100644 index 00000000..fcec1012 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java @@ -0,0 +1,344 @@ +package org.icatproject.ids.v3; + +import java.io.ByteArrayInputStream; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.icatproject.Datafile; +import org.icatproject.Dataset; +import org.icatproject.ICAT; +import org.icatproject.IcatExceptionType; +import org.icatproject.IcatException_Exception; +import org.icatproject.icat.client.IcatException; +import org.icatproject.icat.client.Session; +import org.icatproject.ids.IcatReader; +import org.icatproject.ids.IdsBean; +import org.icatproject.ids.PropertyHandler; +import org.icatproject.ids.StorageUnit; +import org.icatproject.ids.exceptions.BadRequestException; +import org.icatproject.ids.exceptions.InsufficientPrivilegesException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.exceptions.NotFoundException; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataSetInfo; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonValue; + + + +public class DataSelectionFactory { + + private final static Logger logger = LoggerFactory.getLogger(DataSelectionFactory.class); + + private static DataSelectionFactory instance = null; + + private PropertyHandler propertyHandler; + private ICAT icat; + private IcatReader icatReader; + private org.icatproject.icat.client.ICAT restIcat; + private int maxEntities; + + public enum Returns { + DATASETS, DATASETS_AND_DATAFILES, DATAFILES + } + + public static DataSelectionFactory getInstance() throws InternalException { + if (instance == null) { + instance = new DataSelectionFactory(); + } + return instance; + } + + public static DataSelectionV3Base get(String userSessionId, + String investigationIds, String datasetIds, String datafileIds, Returns returns) throws InternalException, BadRequestException, NotFoundException, InsufficientPrivilegesException { + + return DataSelectionFactory.getInstance().getSelection(userSessionId, investigationIds, datasetIds, datafileIds, returns); + } + + + + public static DataSelectionV3Base get(Map dsInfos, Set dfInfos, Set emptyDatasets) throws InternalException { + List dsids = new ArrayList(dsInfos.keySet()); + List dfids = new ArrayList(); + for(DataFileInfo dfInfo: dfInfos) { + dfids.add(dfInfo.getId()); + } + return DataSelectionFactory.getInstance().createSelection(dsInfos, dfInfos, emptyDatasets, new ArrayList(), dsids, dfids); + } + + private DataSelectionFactory() throws InternalException + { + logger.info("### Constructing..."); + this.propertyHandler = ServiceProvider.getInstance().getPropertyHandler(); + this.icat = propertyHandler.getIcatService(); + this.icatReader = ServiceProvider.getInstance().getIcatReader(); + this.restIcat = propertyHandler.getRestIcat(); + this.maxEntities = propertyHandler.getMaxEntities(); + logger.info("### Constructing finished"); + } + + private DataSelectionV3Base createSelection(Map dsInfos, Set dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids) throws InternalException { + + StorageUnit storageUnit = this.propertyHandler.getStorageUnit(); + + if(storageUnit == null ) + return new DataSelectionForSingleLevelStorage(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids); + + else if (storageUnit == StorageUnit.DATAFILE) + return new DataSelectionForStorageUnitDatafile(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids); + + else if(storageUnit == StorageUnit.DATASET) + return new DataSelectionForStorageUnitDataset(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids); + + else throw new InternalException("StorageUnit " + storageUnit + " unknown. Maybe you forgot to handle a new StorageUnit here?"); + + } + + private DataSelectionV3Base getSelection( String userSessionId, + String investigationIds, String datasetIds, String datafileIds, Returns returns) throws InternalException, BadRequestException, NotFoundException, InsufficientPrivilegesException { + + List dfids = getValidIds("datafileIds", datafileIds); + List dsids = getValidIds("datasetIds", datasetIds); + List invids = getValidIds("investigationIds", investigationIds); + boolean dfWanted = returns == Returns.DATASETS_AND_DATAFILES || returns == Returns.DATAFILES; + boolean dsWanted = returns == Returns.DATASETS_AND_DATAFILES || returns == Returns.DATASETS; + + Session userRestSession = restIcat.getSession(userSessionId); + // by default use the user's REST ICAT session + Session restSessionToUse = userRestSession; + + try { + logger.debug("useReaderForPerformance = {}", propertyHandler.getUseReaderForPerformance()); + if (propertyHandler.getUseReaderForPerformance()) { + // if this is set, use a REST session for the reader account where possible + // to improve performance due to the final database queries being simpler + restSessionToUse = restIcat.getSession(this.icatReader.getSessionId()); + } + } catch (IcatException_Exception e) { + throw new InternalException(e.getClass() + " " + e.getMessage()); + } + + logger.debug("dfids: {} dsids: {} invids: {}", dfids, dsids, invids); + + return prepareFromIds(dfWanted, dsWanted, dfids, dsids, invids, userSessionId, restSessionToUse, userRestSession); + } + + + + private DataSelectionV3Base prepareFromIds(boolean dfWanted, boolean dsWanted, List dfids, List dsids, List invids, String userSessionId, Session restSessionToUse, Session userRestSession) + throws NotFoundException, InsufficientPrivilegesException, InternalException, BadRequestException { + var dsInfos = new HashMap(); + var emptyDatasets = new HashSet(); + var dfInfos = new HashSet(); + if (dfWanted) { + dfInfos = new HashSet<>(); + } + + try { + + for (Long dfid : dfids) { + List dss = icat.search(userSessionId, + "SELECT ds FROM Dataset ds JOIN ds.datafiles df WHERE df.id = " + dfid + + " AND df.location IS NOT NULL INCLUDE ds.investigation.facility"); + if (dss.size() == 1) { + Dataset ds = (Dataset) dss.get(0); + long dsid = ds.getId(); + dsInfos.put(dsid, new DataSetInfo(ds)); + if (dfWanted) { + Datafile df = (Datafile) icat.get(userSessionId, "Datafile", dfid); + String location = IdsBean.getLocation(dfid, df.getLocation()); + dfInfos.add( + new DataFileInfo(dfid, df.getName(), location, df.getCreateId(), df.getModId(), dsid)); + } + } else { + // Next line may reveal a permissions problem + icat.get(userSessionId, "Datafile", dfid); + throw new NotFoundException("Datafile " + dfid); + } + } + + for (Long dsid : dsids) { + Dataset ds = (Dataset) icat.get(userSessionId, "Dataset ds INCLUDE ds.investigation.facility", dsid); + dsInfos.put(dsid, new DataSetInfo(ds)); + // dataset access for the user has been checked so the REST session for the + // reader account can be used if the IDS setting to allow this is enabled + String query = "SELECT min(df.id), max(df.id), count(df.id) FROM Datafile df WHERE df.dataset.id = " + + dsid + " AND df.location IS NOT NULL"; + JsonArray result = Json.createReader(new ByteArrayInputStream(restSessionToUse.search(query).getBytes())) + .readArray().getJsonArray(0); + if (result.getJsonNumber(2).longValueExact() == 0) { // Count 0 + emptyDatasets.add(dsid); + } else if (dfWanted) { + manyDfs(dfInfos, dsid, restSessionToUse, result); + } + } + + for (Long invid : invids) { + String query = "SELECT min(ds.id), max(ds.id), count(ds.id) FROM Dataset ds WHERE ds.investigation.id = " + + invid; + JsonArray result = Json.createReader(new ByteArrayInputStream(userRestSession.search(query).getBytes())) + .readArray().getJsonArray(0); + manyDss(dsInfos, emptyDatasets, dfInfos, invid, dfWanted, userRestSession, restSessionToUse, result); + + } + + } catch (IcatException_Exception e) { + IcatExceptionType type = e.getFaultInfo().getType(); + if (type == IcatExceptionType.INSUFFICIENT_PRIVILEGES || type == IcatExceptionType.SESSION) { + throw new InsufficientPrivilegesException(e.getMessage()); + } else if (type == IcatExceptionType.NO_SUCH_OBJECT_FOUND) { + throw new NotFoundException(e.getMessage()); + } else { + throw new InternalException(e.getClass() + " " + e.getMessage()); + } + + } catch (IcatException e) { + org.icatproject.icat.client.IcatException.IcatExceptionType type = e.getType(); + if (type == org.icatproject.icat.client.IcatException.IcatExceptionType.INSUFFICIENT_PRIVILEGES + || type == org.icatproject.icat.client.IcatException.IcatExceptionType.SESSION) { + throw new InsufficientPrivilegesException(e.getMessage()); + } else if (type == org.icatproject.icat.client.IcatException.IcatExceptionType.NO_SUCH_OBJECT_FOUND) { + throw new NotFoundException(e.getMessage()); + } else { + throw new InternalException(e.getClass() + " " + e.getMessage()); + } + } + /* + * TODO: don't calculate what is not needed - however this ensures that + * the flag is respected + */ + if (!dsWanted) { + dsInfos = null; + emptyDatasets = null; + } + + return this.createSelection(dsInfos, dfInfos, emptyDatasets, invids, dsids, dfids); + } + + /** + * Checks to see if the investigation, dataset or datafile id list is a + * valid comma separated list of longs. No spaces or leading 0's. Also + * accepts null. + */ + public static List getValidIds(String thing, String idList) throws BadRequestException { + + List result; + if (idList == null) { + result = Collections.emptyList(); + } else { + String[] ids = idList.split("\\s*,\\s*"); + result = new ArrayList<>(ids.length); + for (String id : ids) { + try { + result.add(Long.parseLong(id)); + } catch (NumberFormatException e) { + throw new BadRequestException("The " + thing + " parameter '" + idList + "' is not a valid " + + "string representation of a comma separated list of longs"); + } + } + } + return result; + } + + private void manyDfs(HashSet dfInfos, long dsid, Session restSessionToUse, JsonArray result) + throws IcatException, InsufficientPrivilegesException, InternalException { + // dataset access for the user has been checked so the REST session for the + // reader account can be used if the IDS setting to allow this is enabled + long min = result.getJsonNumber(0).longValueExact(); + long max = result.getJsonNumber(1).longValueExact(); + long count = result.getJsonNumber(2).longValueExact(); + logger.debug("manyDfs min: {} max: {} count: {}", min, max, count); + if (count != 0) { + if (count <= maxEntities) { + String query = "SELECT df.id, df.name, df.location, df.createId, df.modId FROM Datafile df WHERE df.dataset.id = " + + dsid + " AND df.location IS NOT NULL AND df.id BETWEEN " + min + " AND " + max; + result = Json.createReader(new ByteArrayInputStream(restSessionToUse.search(query).getBytes())).readArray(); + for (JsonValue tupV : result) { + JsonArray tup = (JsonArray) tupV; + long dfid = tup.getJsonNumber(0).longValueExact(); + String location = IdsBean.getLocation(dfid, tup.getString(2, null)); + dfInfos.add( + new DataFileInfo(dfid, tup.getString(1), location, tup.getString(3), tup.getString(4), dsid)); + } + } else { + long half = (min + max) / 2; + String query = "SELECT min(df.id), max(df.id), count(df.id) FROM Datafile df WHERE df.dataset.id = " + + dsid + " AND df.location IS NOT NULL AND df.id BETWEEN " + min + " AND " + half; + result = Json.createReader(new ByteArrayInputStream(restSessionToUse.search(query).getBytes())).readArray() + .getJsonArray(0); + manyDfs(dfInfos, dsid, restSessionToUse, result); + query = "SELECT min(df.id), max(df.id), count(df.id) FROM Datafile df WHERE df.dataset.id = " + dsid + + " AND df.location IS NOT NULL AND df.id BETWEEN " + (half + 1) + " AND " + max; + result = Json.createReader(new ByteArrayInputStream(restSessionToUse.search(query).getBytes())).readArray() + .getJsonArray(0); + manyDfs(dfInfos, dsid, restSessionToUse, result); + } + } + } + + private void manyDss(HashMap dsInfos, HashSet emptyDatasets, HashSet dfInfos, Long invid, boolean dfWanted, Session userRestSession, Session restSessionToUseForDfs, JsonArray result) + throws IcatException, InsufficientPrivilegesException, InternalException { + long min = result.getJsonNumber(0).longValueExact(); + long max = result.getJsonNumber(1).longValueExact(); + long count = result.getJsonNumber(2).longValueExact(); + logger.debug("manyDss min: {} max: {} count: {}", min, max, count); + if (count != 0) { + if (count <= maxEntities) { + String query = "SELECT inv.name, inv.visitId, inv.facility.id, inv.facility.name FROM Investigation inv WHERE inv.id = " + + invid; + result = Json.createReader(new ByteArrayInputStream(userRestSession.search(query).getBytes())).readArray(); + if (result.size() == 0) { + return; + } + result = result.getJsonArray(0); + String invName = result.getString(0); + String visitId = result.getString(1); + long facilityId = result.getJsonNumber(2).longValueExact(); + String facilityName = result.getString(3); + + query = "SELECT ds.id, ds.name, ds.location FROM Dataset ds WHERE ds.investigation.id = " + invid + + " AND ds.id BETWEEN " + min + " AND " + max; + result = Json.createReader(new ByteArrayInputStream(userRestSession.search(query).getBytes())).readArray(); + for (JsonValue tupV : result) { + JsonArray tup = (JsonArray) tupV; + long dsid = tup.getJsonNumber(0).longValueExact(); + dsInfos.put(dsid, new DataSetInfo(dsid, tup.getString(1), tup.getString(2, null), invid, invName, + visitId, facilityId, facilityName)); + + query = "SELECT min(df.id), max(df.id), count(df.id) FROM Datafile df WHERE df.dataset.id = " + + dsid + " AND df.location IS NOT NULL"; + result = Json.createReader(new ByteArrayInputStream(userRestSession.search(query).getBytes())) + .readArray().getJsonArray(0); + if (result.getJsonNumber(2).longValueExact() == 0) { + emptyDatasets.add(dsid); + } else if (dfWanted) { + manyDfs(dfInfos, dsid, restSessionToUseForDfs, result); + } + + } + } else { + long half = (min + max) / 2; + String query = "SELECT min(ds.id), max(ds.id), count(ds.id) FROM Dataset ds WHERE ds.investigation.id = " + + invid + " AND ds.id BETWEEN " + min + " AND " + half; + result = Json.createReader(new ByteArrayInputStream(userRestSession.search(query).getBytes())).readArray(); + manyDss(dsInfos, emptyDatasets, dfInfos, invid, dfWanted, userRestSession, restSessionToUseForDfs, result); + query = "SELECT min(ds.id), max(ds.id), count(ds.id) FROM Dataset ds WHERE ds.investigation.id = " + + invid + " AND ds.id BETWEEN " + half + 1 + " AND " + max; + result = Json.createReader(new ByteArrayInputStream(userRestSession.search(query).getBytes())).readArray() + .getJsonArray(0); + manyDss(dsInfos, emptyDatasets, dfInfos, invid, dfWanted, userRestSession, restSessionToUseForDfs, result); + } + } + + } + +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java new file mode 100644 index 00000000..c2bdd8b2 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java @@ -0,0 +1,25 @@ +package org.icatproject.ids.v3; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.icatproject.ids.exceptions.DataNotOnlineException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataSetInfo; + +public class DataSelectionForSingleLevelStorage extends DataSelectionV3Base { + + protected DataSelectionForSingleLevelStorage(Map dsInfos, Set dfInfos, + Set emptyDatasets, List invids2, List dsids, List dfids) { + + super(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids); + } + + @Override + public void checkOnline() throws InternalException, DataNotOnlineException { + // nothing to do here for single level storage + } + +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java new file mode 100644 index 00000000..d1997080 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java @@ -0,0 +1,35 @@ +package org.icatproject.ids.v3; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.icatproject.ids.exceptions.DataNotOnlineException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataSetInfo; + +public class DataSelectionForStorageUnitDatafile extends DataSelectionV3Base { + + protected DataSelectionForStorageUnitDatafile(Map dsInfos, Set dfInfos, + Set emptyDatasets, List invids2, List dsids, List dfids) { + + super(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids); + } + + @Override + public void checkOnline()throws InternalException, DataNotOnlineException { + + boolean maybeOffline = false; + for (DataFileInfo dfInfo : dfInfos) { + if (dfInfo.restoreIfOffline()) { + maybeOffline = true; + } + } + if (maybeOffline) { + throw new DataNotOnlineException( + "Before getting a datafile, it must be restored, restoration requested automatically"); + } + } + +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java new file mode 100644 index 00000000..21fc7697 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java @@ -0,0 +1,35 @@ +package org.icatproject.ids.v3; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.icatproject.ids.exceptions.DataNotOnlineException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataSetInfo; + +public class DataSelectionForStorageUnitDataset extends DataSelectionV3Base { + + protected DataSelectionForStorageUnitDataset(Map dsInfos, Set dfInfos, + Set emptyDatasets, List invids2, List dsids, List dfids) { + + super(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids); + } + + @Override + public void checkOnline() throws InternalException, DataNotOnlineException { + + boolean maybeOffline = false; + for (DataSetInfo dsInfo : dsInfos.values()) { + if (dsInfo.restoreIfOffline(emptyDatasets)) { + maybeOffline = true; + } + } + if (maybeOffline) { + throw new DataNotOnlineException( + "Before putting, getting or deleting a datafile, its dataset has to be restored, restoration requested automatically"); + } + } + +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java b/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java new file mode 100644 index 00000000..a2b41b4d --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java @@ -0,0 +1,88 @@ +package org.icatproject.ids.v3; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.icatproject.ids.exceptions.BadRequestException; +import org.icatproject.ids.exceptions.DataNotOnlineException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataSetInfo; + +public abstract class DataSelectionV3Base { + + protected Map dsInfos; + protected Set dfInfos; + protected Set emptyDatasets; + protected List invids; + protected List dsids; + protected List dfids; + + protected DataSelectionV3Base(Map dsInfos, Set dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids) { + + this.dsInfos = dsInfos; + this.dfInfos = dfInfos; + this.emptyDatasets = emptyDatasets; + this.invids = invids2; + this.dsids = dsids; + this.dfids = dfids; + } + + + public Map getDsInfo() { + return dsInfos; + } + + + public Set getDfInfo() { + return dfInfos; + } + + + public boolean mustZip() { + return dfids.size() > 1L || !dsids.isEmpty() || !invids.isEmpty() + || (dfids.isEmpty() && dsids.isEmpty() && invids.isEmpty()); + } + + public boolean isSingleDataset() { + return dfids.isEmpty() && dsids.size() == 1 && invids.isEmpty(); + } + + + public Set getEmptyDatasets() { + return emptyDatasets; + } + + + /** + * Checks to see if the investigation, dataset or datafile id list is a + * valid comma separated list of longs. No spaces or leading 0's. Also + * accepts null. + */ + public static List getValidIds(String thing, String idList) throws BadRequestException { + + List result; + if (idList == null) { + result = Collections.emptyList(); + } else { + String[] ids = idList.split("\\s*,\\s*"); + result = new ArrayList<>(ids.length); + for (String id : ids) { + try { + result.add(Long.parseLong(id)); + } catch (NumberFormatException e) { + throw new BadRequestException("The " + thing + " parameter '" + idList + "' is not a valid " + + "string representation of a comma separated list of longs"); + } + } + } + return result; + } + + + public abstract void checkOnline() throws InternalException, DataNotOnlineException; + +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java index 530ad212..89033aa5 100644 --- a/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java @@ -7,6 +7,7 @@ import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.text.SimpleDateFormat; +import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.Map; @@ -16,9 +17,7 @@ import java.util.regex.Pattern; import org.icatproject.IcatException_Exception; -import org.icatproject.ids.DataSelection; import org.icatproject.ids.Prepared; -import org.icatproject.ids.DataSelection.Returns; import org.icatproject.ids.LockManager.Lock; import org.icatproject.ids.LockManager.LockType; import org.icatproject.ids.StorageUnit; @@ -29,7 +28,10 @@ import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.exceptions.NotFoundException; import org.icatproject.ids.plugin.AlreadyLockedException; +import org.icatproject.ids.v3.DataSelectionFactory; +import org.icatproject.ids.v3.DataSelectionV3Base; import org.icatproject.ids.v3.ServiceProvider; +import org.icatproject.ids.v3.DataSelectionFactory.Returns; import org.icatproject.ids.v3.enums.CallType; import org.icatproject.ids.v3.helper.SO; import org.icatproject.ids.v3.models.DataFileInfo; @@ -45,10 +47,12 @@ public class GetDataHandler extends RequestHandlerBase { private Pattern rangeRe; private static AtomicLong atomicLong = new AtomicLong(); + public GetDataHandler() { super(new StorageUnit[] {StorageUnit.DATAFILE, StorageUnit.DATASET, null} ); } + public void init() throws InternalException { logger.info("Initializing GetDataHandler..."); super.init(); @@ -56,6 +60,7 @@ public void init() throws InternalException { logger.info("GetDataHandler initialized"); } + @Override public ValueContainer handle(HashMap parameters) throws BadRequestException, NotFoundException, InternalException, InsufficientPrivilegesException, DataNotOnlineException { Response response = null; @@ -95,7 +100,6 @@ public ValueContainer handle(HashMap parameters) throws } - private Response getData(String preparedId, String outname, final long offset, String ip) throws BadRequestException, NotFoundException, InternalException, InsufficientPrivilegesException, DataNotOnlineException { @@ -117,7 +121,7 @@ private Response getData(String preparedId, String outname, final long offset, S throw new InternalException(e.getClass() + " " + e.getMessage()); } - DataSelection dataSelection = new DataSelection(prepared.dsInfos, prepared.dfInfos, prepared.emptyDatasets); + DataSelectionV3Base dataSelection = DataSelectionFactory.get((Map) prepared.dsInfos, (Set) prepared.dfInfos, (Set) prepared.emptyDatasets); final boolean zip = prepared.zip; final boolean compress = prepared.compress; final Set dfInfos = prepared.dfInfos; @@ -201,9 +205,7 @@ private Response getData(String sessionId, String investigationIds, String datas var serviceProvider = ServiceProvider.getInstance(); - // TODO: change constructor of DataSelection and receive PropertyHandler and IcatReader from ServiceProvider within - final DataSelection dataSelection = new DataSelection(serviceProvider.getPropertyHandler(), serviceProvider.getIcatReader(), sessionId, - investigationIds, datasetIds, datafileIds, Returns.DATASETS_AND_DATAFILES); + final DataSelectionV3Base dataSelection = DataSelectionFactory.get(sessionId, investigationIds, datasetIds, datafileIds, Returns.DATASETS_AND_DATAFILES); // Do it Map dsInfos = dataSelection.getDsInfo(); From f8a69f1c09582f44ed8c4098932f1e7471940d45 Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Thu, 15 Feb 2024 14:41:18 +0100 Subject: [PATCH 09/92] moved restoreIfOffline to DataSelection - SoC --- .../org/icatproject/ids/DataSelection.java | 30 +++++++++++++++++-- .../java/org/icatproject/ids/IdsBean.java | 16 +++++----- .../DataSelectionForStorageUnitDatafile.java | 15 +++++++++- .../DataSelectionForStorageUnitDataset.java | 16 +++++++++- .../ids/v3/models/DataFileInfo.java | 16 +--------- .../ids/v3/models/DataSetInfo.java | 12 -------- 6 files changed, 66 insertions(+), 39 deletions(-) diff --git a/src/main/java/org/icatproject/ids/DataSelection.java b/src/main/java/org/icatproject/ids/DataSelection.java index 72d62e3b..34f19ac1 100644 --- a/src/main/java/org/icatproject/ids/DataSelection.java +++ b/src/main/java/org/icatproject/ids/DataSelection.java @@ -331,7 +331,7 @@ public void checkOnline() if (storageUnit == StorageUnit.DATASET) { boolean maybeOffline = false; for (DataSetInfo dsInfo : dsInfos.values()) { - if (dsInfo.restoreIfOffline(emptyDatasets)) { + if (this.restoreIfOffline(dsInfo, emptyDatasets)) { maybeOffline = true; } } @@ -342,7 +342,7 @@ public void checkOnline() } else if (storageUnit == StorageUnit.DATAFILE) { boolean maybeOffline = false; for (DataFileInfo dfInfo : dfInfos) { - if (dfInfo.restoreIfOffline()) { + if (DataSelection.restoreIfOffline(dfInfo)) { maybeOffline = true; } } @@ -353,4 +353,30 @@ public void checkOnline() } } + + public static boolean restoreIfOffline(DataFileInfo dfInfo) throws InternalException { + boolean maybeOffline = false; + var serviceProvider = ServiceProvider.getInstance(); + if (serviceProvider.getFsm().getDfMaybeOffline().contains(dfInfo)) { + maybeOffline = true; + } else if (!serviceProvider.getMainStorage().exists(dfInfo.getDfLocation())) { + serviceProvider.getFsm().queue(dfInfo, DeferredOp.RESTORE); + maybeOffline = true; + } + return maybeOffline; + } + + + public static boolean restoreIfOffline(DataSetInfo dsInfo, Set emptyDatasets) throws InternalException { + boolean maybeOffline = false; + var serviceProvider = ServiceProvider.getInstance(); + if (serviceProvider.getFsm().getDsMaybeOffline().contains(dsInfo)) { + maybeOffline = true; + } else if (!emptyDatasets.contains(dsInfo.getId()) && !serviceProvider.getMainStorage().exists(dsInfo)) { + serviceProvider.getFsm().queue(dsInfo, DeferredOp.RESTORE); + maybeOffline = true; + } + return maybeOffline; + } + } diff --git a/src/main/java/org/icatproject/ids/IdsBean.java b/src/main/java/org/icatproject/ids/IdsBean.java index 95f5c7fd..2458a85e 100644 --- a/src/main/java/org/icatproject/ids/IdsBean.java +++ b/src/main/java/org/icatproject/ids/IdsBean.java @@ -102,7 +102,7 @@ public RunPrepDsCheck(Collection toCheck, Set emptyDatasets) public Void call() throws Exception { for (DataSetInfo dsInfo : toCheck) { fsm.checkFailure(dsInfo.getId()); - dsInfo.restoreIfOffline(emptyDatasets); + DataSelection.restoreIfOffline(dsInfo, emptyDatasets); } return null; } @@ -121,7 +121,7 @@ public RunPrepDfCheck(SortedSet toCheck) { public Void call() throws Exception { for (DataFileInfo dfInfo : toCheck) { fsm.checkFailure(dfInfo.getId()); - dfInfo.restoreIfOffline(); + DataSelection.restoreIfOffline(dfInfo); } return null; } @@ -139,7 +139,7 @@ public RestoreDfTask(Set dfInfos) { @Override public Void call() throws Exception { for (DataFileInfo dfInfo : dfInfos) { - dfInfo.restoreIfOffline(); + DataSelection.restoreIfOffline(dfInfo); } return null; } @@ -158,7 +158,7 @@ public RestoreDsTask(Collection dsInfos, Set emptyDs) { @Override public Void call() throws Exception { for (DataSetInfo dsInfo : dsInfos) { - dsInfo.restoreIfOffline(emptyDs); + DataSelection.restoreIfOffline(dsInfo, emptyDs); } return null; } @@ -1283,7 +1283,7 @@ public Boolean isPrepared(String preparedId, String ip) logger.debug("Will check online status of {} entries", toCheck.size()); for (DataSetInfo dsInfo : toCheck) { fsm.checkFailure(dsInfo.getId()); - if (dsInfo.restoreIfOffline(preparedJson.emptyDatasets)) { + if (DataSelection.restoreIfOffline(dsInfo, preparedJson.emptyDatasets)) { prepared = false; status.fromDsElement = dsInfo.getId(); toCheck = preparedJson.dsInfos.tailMap(status.fromDsElement).values(); @@ -1298,7 +1298,7 @@ public Boolean isPrepared(String preparedId, String ip) logger.debug("Will check finally online status of {} entries", toCheck.size()); for (DataSetInfo dsInfo : toCheck) { fsm.checkFailure(dsInfo.getId()); - if (dsInfo.restoreIfOffline(preparedJson.emptyDatasets)) { + if (DataSelection.restoreIfOffline(dsInfo, preparedJson.emptyDatasets)) { prepared = false; } } @@ -1309,7 +1309,7 @@ public Boolean isPrepared(String preparedId, String ip) logger.debug("Will check online status of {} entries", toCheck.size()); for (DataFileInfo dfInfo : toCheck) { fsm.checkFailure(dfInfo.getId()); - if (dfInfo.restoreIfOffline()) { + if (DataSelection.restoreIfOffline(dfInfo)) { prepared = false; status.fromDfElement = dfInfo; toCheck = preparedJson.dfInfos.tailSet(status.fromDfElement); @@ -1324,7 +1324,7 @@ public Boolean isPrepared(String preparedId, String ip) logger.debug("Will check finally online status of {} entries", toCheck.size()); for (DataFileInfo dfInfo : toCheck) { fsm.checkFailure(dfInfo.getId()); - if (dfInfo.restoreIfOffline()) { + if (DataSelection.restoreIfOffline(dfInfo)) { prepared = false; } } diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java index d1997080..ff8ca52d 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java @@ -4,6 +4,7 @@ import java.util.Map; import java.util.Set; +import org.icatproject.ids.DeferredOp; import org.icatproject.ids.exceptions.DataNotOnlineException; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.v3.models.DataFileInfo; @@ -22,7 +23,7 @@ public void checkOnline()throws InternalException, DataNotOnlineException { boolean maybeOffline = false; for (DataFileInfo dfInfo : dfInfos) { - if (dfInfo.restoreIfOffline()) { + if (this.restoreIfOffline(dfInfo)) { maybeOffline = true; } } @@ -31,5 +32,17 @@ public void checkOnline()throws InternalException, DataNotOnlineException { "Before getting a datafile, it must be restored, restoration requested automatically"); } } + + public boolean restoreIfOffline(DataFileInfo dfInfo) throws InternalException { + boolean maybeOffline = false; + var serviceProvider = ServiceProvider.getInstance(); + if (serviceProvider.getFsm().getDfMaybeOffline().contains(dfInfo)) { + maybeOffline = true; + } else if (!serviceProvider.getMainStorage().exists(dfInfo.getDfLocation())) { + serviceProvider.getFsm().queue(dfInfo, DeferredOp.RESTORE); + maybeOffline = true; + } + return maybeOffline; + } } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java index 21fc7697..ce8aa7b1 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java @@ -4,6 +4,7 @@ import java.util.Map; import java.util.Set; +import org.icatproject.ids.DeferredOp; import org.icatproject.ids.exceptions.DataNotOnlineException; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.v3.models.DataFileInfo; @@ -22,7 +23,7 @@ public void checkOnline() throws InternalException, DataNotOnlineException { boolean maybeOffline = false; for (DataSetInfo dsInfo : dsInfos.values()) { - if (dsInfo.restoreIfOffline(emptyDatasets)) { + if (this.restoreIfOffline(dsInfo, emptyDatasets)) { maybeOffline = true; } } @@ -32,4 +33,17 @@ public void checkOnline() throws InternalException, DataNotOnlineException { } } + + public boolean restoreIfOffline(DataSetInfo dsInfo, Set emptyDatasets) throws InternalException { + boolean maybeOffline = false; + var serviceProvider = ServiceProvider.getInstance(); + if (serviceProvider.getFsm().getDsMaybeOffline().contains(dsInfo)) { + maybeOffline = true; + } else if (!emptyDatasets.contains(dsInfo.getId()) && !serviceProvider.getMainStorage().exists(dsInfo)) { + serviceProvider.getFsm().queue(dsInfo, DeferredOp.RESTORE); + maybeOffline = true; + } + return maybeOffline; + } + } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/models/DataFileInfo.java b/src/main/java/org/icatproject/ids/v3/models/DataFileInfo.java index a5f47e88..f18ee443 100644 --- a/src/main/java/org/icatproject/ids/v3/models/DataFileInfo.java +++ b/src/main/java/org/icatproject/ids/v3/models/DataFileInfo.java @@ -1,8 +1,6 @@ package org.icatproject.ids.v3.models; -import org.icatproject.ids.DeferredOp; -import org.icatproject.ids.exceptions.InternalException; + import org.icatproject.ids.plugin.DfInfo; -import org.icatproject.ids.v3.ServiceProvider; /** * Contains information about a Datafile. Replaces DsInfo in v3 @@ -50,18 +48,6 @@ public int compareTo(DataFileInfo o) { return 0; } - public boolean restoreIfOffline() throws InternalException { - boolean maybeOffline = false; - var serviceProvider = ServiceProvider.getInstance(); - if (serviceProvider.getFsm().getDfMaybeOffline().contains(this)) { - maybeOffline = true; - } else if (!serviceProvider.getMainStorage().exists(this.getDfLocation())) { - serviceProvider.getFsm().queue(this, DeferredOp.RESTORE); - maybeOffline = true; - } - return maybeOffline; - } - // implementing DfInfo @Override public Long getDfId() { return this.getId(); } diff --git a/src/main/java/org/icatproject/ids/v3/models/DataSetInfo.java b/src/main/java/org/icatproject/ids/v3/models/DataSetInfo.java index 84485957..a4ce0e6c 100644 --- a/src/main/java/org/icatproject/ids/v3/models/DataSetInfo.java +++ b/src/main/java/org/icatproject/ids/v3/models/DataSetInfo.java @@ -63,18 +63,6 @@ public String toString() { + this.name + ")"; } - public boolean restoreIfOffline(Set emptyDatasets) throws InternalException { - boolean maybeOffline = false; - var serviceProvider = ServiceProvider.getInstance(); - if (serviceProvider.getFsm().getDsMaybeOffline().contains(this)) { - maybeOffline = true; - } else if (!emptyDatasets.contains(this.getId()) && !serviceProvider.getMainStorage().exists(this)) { - serviceProvider.getFsm().queue(this, DeferredOp.RESTORE); - maybeOffline = true; - } - return maybeOffline; - } - public Long getFacilityId() { return facilityId; } From db8ee8da3ffba344b804a3664d0fa4074961cdfe Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Thu, 15 Feb 2024 14:55:25 +0100 Subject: [PATCH 10/92] some cleaning up --- .../org/icatproject/ids/DataSelection.java | 2 +- .../java/org/icatproject/ids/IdsBean.java | 114 ------------------ .../ids/v3/handlers/GetDataHandler.java | 1 - .../ids/v3/models/DataSetInfo.java | 5 - 4 files changed, 1 insertion(+), 121 deletions(-) diff --git a/src/main/java/org/icatproject/ids/DataSelection.java b/src/main/java/org/icatproject/ids/DataSelection.java index 34f19ac1..60421ab0 100644 --- a/src/main/java/org/icatproject/ids/DataSelection.java +++ b/src/main/java/org/icatproject/ids/DataSelection.java @@ -331,7 +331,7 @@ public void checkOnline() if (storageUnit == StorageUnit.DATASET) { boolean maybeOffline = false; for (DataSetInfo dsInfo : dsInfos.values()) { - if (this.restoreIfOffline(dsInfo, emptyDatasets)) { + if (DataSelection.restoreIfOffline(dsInfo, emptyDatasets)) { maybeOffline = true; } } diff --git a/src/main/java/org/icatproject/ids/IdsBean.java b/src/main/java/org/icatproject/ids/IdsBean.java index 2458a85e..8d3a24ff 100644 --- a/src/main/java/org/icatproject/ids/IdsBean.java +++ b/src/main/java/org/icatproject/ids/IdsBean.java @@ -11,11 +11,9 @@ import java.nio.file.NoSuchFileException; import java.nio.file.Path; import java.security.NoSuchAlgorithmException; -import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.Date; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.HashSet; @@ -33,13 +31,9 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; -import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReentrantLock; import java.util.regex.Pattern; import java.util.zip.CRC32; -import java.util.zip.ZipEntry; -import java.util.zip.ZipException; -import java.util.zip.ZipOutputStream; import javax.xml.datatype.DatatypeFactory; import jakarta.annotation.PostConstruct; @@ -52,7 +46,6 @@ import jakarta.json.JsonValue; import jakarta.json.stream.JsonGenerator; import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.StreamingOutput; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -76,7 +69,6 @@ import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.plugin.AlreadyLockedException; import org.icatproject.ids.plugin.ArchiveStorageInterface; -import org.icatproject.ids.plugin.DsInfo; import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.plugin.ZipMapperInterface; import org.icatproject.ids.v3.enums.CallType; @@ -164,109 +156,6 @@ public Void call() throws Exception { } } - private class SO implements StreamingOutput { - - private long offset; - private boolean zip; - private Map dsInfos; - private Lock lock; - private boolean compress; - private Set dfInfos; - private String ip; - private long start; - private Long transferId; - - SO(Map dsInfos, Set dfInfos, long offset, boolean zip, boolean compress, - Lock lock, Long transferId, String ip, long start) { - this.offset = offset; - this.zip = zip; - this.dsInfos = dsInfos; - this.dfInfos = dfInfos; - this.lock = lock; - this.compress = compress; - this.transferId = transferId; - this.ip = ip; - this.start = start; - } - - @Override - public void write(OutputStream output) throws IOException { - Object transfer = "??"; - try { - if (offset != 0) { // Wrap the stream if needed - output = new RangeOutputStream(output, offset, null); - } - byte[] bytes = new byte[BUFSIZ]; - if (zip) { - ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(output)); - if (!compress) { - zos.setLevel(0); // Otherwise use default compression - } - - for (DataFileInfo dfInfo : dfInfos) { - logger.debug("Adding " + dfInfo + " to zip"); - transfer = dfInfo; - DsInfo dsInfo = dsInfos.get(dfInfo.getDsId()); - String entryName = zipMapper.getFullEntryName(dsInfo, dfInfo); - InputStream stream = null; - try { - zos.putNextEntry(new ZipEntry(entryName)); - stream = mainStorage.get(dfInfo.getDfLocation(), dfInfo.getCreateId(), dfInfo.getModId()); - int length; - while ((length = stream.read(bytes)) >= 0) { - zos.write(bytes, 0, length); - } - } catch (ZipException e) { - logger.debug("Skipped duplicate"); - } - zos.closeEntry(); - if (stream != null) { - stream.close(); - } - } - zos.close(); - } else { - DataFileInfo dfInfo = dfInfos.iterator().next(); - transfer = dfInfo; - InputStream stream = mainStorage.get(dfInfo.getDfLocation(), dfInfo.getCreateId(), - dfInfo.getModId()); - int length; - while ((length = stream.read(bytes)) >= 0) { - output.write(bytes, 0, length); - } - output.close(); - stream.close(); - } - - if (transferId != null) { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { - gen.write("transferId", transferId); - gen.writeEnd(); - } - transmitter.processMessage("getData", ip, baos.toString(), start); - } - - } catch (IOException e) { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { - gen.write("transferId", transferId); - gen.write("exceptionClass", e.getClass().toString()); - gen.write("exceptionMessage", e.getMessage()); - gen.writeEnd(); - } - transmitter.processMessage("getData", ip, baos.toString(), start); - logger.error("Failed to stream " + transfer + " due to " + e.getMessage()); - throw e; - } finally { - lock.release(); - } - } - - } - - private static final int BUFSIZ = 2048; - private static Boolean inited = false; private static String key; @@ -489,8 +378,6 @@ public static void validateUUID(String thing, String id) throws BadRequestExcept private StorageUnit storageUnit; - private ZipMapperInterface zipMapper; - private boolean twoLevel; private Set logSet; @@ -1179,7 +1066,6 @@ private void init() { synchronized (inited) { logger.info("creating IdsBean"); propertyHandler = PropertyHandler.getInstance(); - zipMapper = propertyHandler.getZipMapper(); mainStorage = propertyHandler.getMainStorage(); archiveStorage = propertyHandler.getArchiveStorage(); twoLevel = archiveStorage != null; diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java index 89033aa5..18c2fec1 100644 --- a/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java @@ -7,7 +7,6 @@ import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.text.SimpleDateFormat; -import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.Map; diff --git a/src/main/java/org/icatproject/ids/v3/models/DataSetInfo.java b/src/main/java/org/icatproject/ids/v3/models/DataSetInfo.java index a4ce0e6c..e89ddd57 100644 --- a/src/main/java/org/icatproject/ids/v3/models/DataSetInfo.java +++ b/src/main/java/org/icatproject/ids/v3/models/DataSetInfo.java @@ -1,15 +1,10 @@ package org.icatproject.ids.v3.models; -import java.util.Set; - import org.icatproject.Dataset; import org.icatproject.Facility; import org.icatproject.Investigation; -import org.icatproject.ids.DeferredOp; import org.icatproject.ids.exceptions.InsufficientPrivilegesException; -import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.plugin.DsInfo; -import org.icatproject.ids.v3.ServiceProvider; /** * Contains information about a Dataset. Replaces DsInfo in v3. From edc8e826edd4e0bfef5797b949fd923058aafd64 Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Fri, 16 Feb 2024 15:36:01 +0100 Subject: [PATCH 11/92] Improvement of DataSelectionV3 and handler for archive request --- .../java/org/icatproject/ids/IdsService.java | 17 +++- .../ids/v3/DataSelectionFactory.java | 86 +++++++++++++++---- .../DataSelectionForSingleLevelStorage.java | 18 +++- .../DataSelectionForStorageUnitDatafile.java | 14 ++- .../DataSelectionForStorageUnitDataset.java | 15 +++- .../ids/v3/DataSelectionV3Base.java | 29 ++++++- .../v3/{handlers => }/RequestHandlerBase.java | 31 +++++-- .../ids/v3/RequestHandlerServiceBase.java | 16 ++-- .../icatproject/ids/v3/ServiceProvider.java | 7 ++ .../icatproject/ids/v3/enums/RequestType.java | 2 +- .../ids/v3/enums/ValueContainerType.java | 2 +- .../ids/v3/handlers/ArchiveHandler.java | 73 ++++++++++++++++ .../ids/v3/handlers/GetDataHandler.java | 18 ++-- .../ids/v3/models/ValueContainer.java | 8 ++ 14 files changed, 288 insertions(+), 48 deletions(-) rename src/main/java/org/icatproject/ids/v3/{handlers => }/RequestHandlerBase.java (83%) create mode 100644 src/main/java/org/icatproject/ids/v3/handlers/ArchiveHandler.java diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index 69f646bf..e051dc89 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -80,6 +80,7 @@ public class IdsService { * @throws InsufficientPrivilegesException * @throws InternalException * @throws NotFoundException + * @throws DataNotOnlineException * @summary archive * @statuscode 200 To indicate success */ @@ -90,8 +91,17 @@ public void archive(@Context HttpServletRequest request, @FormParam("sessionId") @FormParam("investigationIds") String investigationIds, @FormParam("datasetIds") String datasetIds, @FormParam("datafileIds") String datafileIds) throws NotImplementedException, BadRequestException, InsufficientPrivilegesException, InternalException, - NotFoundException { - idsBean.archive(sessionId, investigationIds, datasetIds, datafileIds, request.getRemoteAddr()); + NotFoundException, DataNotOnlineException { + + var parameters = new HashMap(); + parameters.put("request", new ValueContainer(request)); + parameters.put("sessionId", new ValueContainer(sessionId)); + parameters.put("investigationIds", new ValueContainer(investigationIds)); + parameters.put("datasetIds", new ValueContainer(datasetIds)); + parameters.put("datafileIds", new ValueContainer(datafileIds)); + parameters.put("ip", new ValueContainer(request.getRemoteAddr())); + + this.requestHandler.handle(RequestType.ARCHIVE, parameters); } /** @@ -172,6 +182,7 @@ public String getVersion() { * @throws InternalException * @throws InsufficientPrivilegesException * @throws DataNotOnlineException + * @throws NotImplementedException * @summary getData * @statuscode 200 To indicate success */ @@ -183,7 +194,7 @@ public Response getData(@Context HttpServletRequest request, @QueryParam("prepar @QueryParam("datasetIds") String datasetIds, @QueryParam("datafileIds") String datafileIds, @QueryParam("compress") boolean compress, @QueryParam("zip") boolean zip, @QueryParam("outname") String outname, @HeaderParam("Range") String range) throws BadRequestException, - NotFoundException, InternalException, InsufficientPrivilegesException, DataNotOnlineException { + NotFoundException, InternalException, InsufficientPrivilegesException, DataNotOnlineException, NotImplementedException { var parameters = new HashMap(); diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java b/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java index fcec1012..2bd9205c 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java @@ -24,6 +24,8 @@ import org.icatproject.ids.exceptions.InsufficientPrivilegesException; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.exceptions.NotFoundException; +import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.models.DataFileInfo; import org.icatproject.ids.v3.models.DataSetInfo; import org.slf4j.Logger; @@ -33,8 +35,6 @@ import jakarta.json.JsonArray; import jakarta.json.JsonValue; - - public class DataSelectionFactory { private final static Logger logger = LoggerFactory.getLogger(DataSelectionFactory.class); @@ -46,6 +46,7 @@ public class DataSelectionFactory { private IcatReader icatReader; private org.icatproject.icat.client.ICAT restIcat; private int maxEntities; + private HashMap requestTypeToReturnsMapping; public enum Returns { DATASETS, DATASETS_AND_DATAFILES, DATAFILES @@ -58,21 +59,21 @@ public static DataSelectionFactory getInstance() throws InternalException { return instance; } - public static DataSelectionV3Base get(String userSessionId, - String investigationIds, String datasetIds, String datafileIds, Returns returns) throws InternalException, BadRequestException, NotFoundException, InsufficientPrivilegesException { + protected static DataSelectionV3Base get(String userSessionId, String investigationIds, String datasetIds, String datafileIds, RequestType requestType) + throws InternalException, BadRequestException, NotFoundException, InsufficientPrivilegesException, NotImplementedException { - return DataSelectionFactory.getInstance().getSelection(userSessionId, investigationIds, datasetIds, datafileIds, returns); + return DataSelectionFactory.getInstance().getSelection(userSessionId, investigationIds, datasetIds, datafileIds, requestType); } - public static DataSelectionV3Base get(Map dsInfos, Set dfInfos, Set emptyDatasets) throws InternalException { + protected static DataSelectionV3Base get(Map dsInfos, Set dfInfos, Set emptyDatasets, RequestType requestType) throws InternalException { List dsids = new ArrayList(dsInfos.keySet()); List dfids = new ArrayList(); for(DataFileInfo dfInfo: dfInfos) { dfids.add(dfInfo.getId()); } - return DataSelectionFactory.getInstance().createSelection(dsInfos, dfInfos, emptyDatasets, new ArrayList(), dsids, dfids); + return DataSelectionFactory.getInstance().createSelection(dsInfos, dfInfos, emptyDatasets, new ArrayList(), dsids, dfids, requestType); } private DataSelectionFactory() throws InternalException @@ -83,32 +84,37 @@ private DataSelectionFactory() throws InternalException this.icatReader = ServiceProvider.getInstance().getIcatReader(); this.restIcat = propertyHandler.getRestIcat(); this.maxEntities = propertyHandler.getMaxEntities(); + + this.createRequestTypeToReturnsMapping(); + logger.info("### Constructing finished"); } - private DataSelectionV3Base createSelection(Map dsInfos, Set dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids) throws InternalException { + private DataSelectionV3Base createSelection(Map dsInfos, Set dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids, RequestType requestType) throws InternalException { StorageUnit storageUnit = this.propertyHandler.getStorageUnit(); if(storageUnit == null ) - return new DataSelectionForSingleLevelStorage(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids); + return new DataSelectionForSingleLevelStorage(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids, requestType); else if (storageUnit == StorageUnit.DATAFILE) - return new DataSelectionForStorageUnitDatafile(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids); + return new DataSelectionForStorageUnitDatafile(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids, requestType); else if(storageUnit == StorageUnit.DATASET) - return new DataSelectionForStorageUnitDataset(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids); + return new DataSelectionForStorageUnitDataset(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids, requestType); else throw new InternalException("StorageUnit " + storageUnit + " unknown. Maybe you forgot to handle a new StorageUnit here?"); } - private DataSelectionV3Base getSelection( String userSessionId, - String investigationIds, String datasetIds, String datafileIds, Returns returns) throws InternalException, BadRequestException, NotFoundException, InsufficientPrivilegesException { + private DataSelectionV3Base getSelection( String userSessionId, String investigationIds, String datasetIds, String datafileIds, RequestType requestType) + throws InternalException, BadRequestException, NotFoundException, InsufficientPrivilegesException, NotImplementedException { List dfids = getValidIds("datafileIds", datafileIds); List dsids = getValidIds("datasetIds", datasetIds); List invids = getValidIds("investigationIds", investigationIds); + + Returns returns = this.getReturns(requestType); boolean dfWanted = returns == Returns.DATASETS_AND_DATAFILES || returns == Returns.DATAFILES; boolean dsWanted = returns == Returns.DATASETS_AND_DATAFILES || returns == Returns.DATASETS; @@ -129,12 +135,12 @@ private DataSelectionV3Base getSelection( String userSessionId, logger.debug("dfids: {} dsids: {} invids: {}", dfids, dsids, invids); - return prepareFromIds(dfWanted, dsWanted, dfids, dsids, invids, userSessionId, restSessionToUse, userRestSession); + return prepareFromIds(dfWanted, dsWanted, dfids, dsids, invids, userSessionId, restSessionToUse, userRestSession, requestType); } - private DataSelectionV3Base prepareFromIds(boolean dfWanted, boolean dsWanted, List dfids, List dsids, List invids, String userSessionId, Session restSessionToUse, Session userRestSession) + private DataSelectionV3Base prepareFromIds(boolean dfWanted, boolean dsWanted, List dfids, List dsids, List invids, String userSessionId, Session restSessionToUse, Session userRestSession, RequestType requestType) throws NotFoundException, InsufficientPrivilegesException, InternalException, BadRequestException { var dsInfos = new HashMap(); var emptyDatasets = new HashSet(); @@ -221,7 +227,7 @@ private DataSelectionV3Base prepareFromIds(boolean dfWanted, boolean dsWanted, L emptyDatasets = null; } - return this.createSelection(dsInfos, dfInfos, emptyDatasets, invids, dsids, dfids); + return this.createSelection(dsInfos, dfInfos, emptyDatasets, invids, dsids, dfids, requestType); } /** @@ -341,4 +347,52 @@ private void manyDss(HashMap dsInfos, HashSet emptyData } + private void createRequestTypeToReturnsMapping() throws InternalException { + + this.requestTypeToReturnsMapping = new HashMap(); + StorageUnit storageUnit = this.propertyHandler.getStorageUnit(); + + //commented out entries: uncomment when you create the new hendlers for the RequestTypes during the current redesign + + //this.requestTypeToReturnsMapping.put(RequestType.DELETE, Returns.DATASETS_AND_DATAFILES); + //this.requestTypeToReturnsMapping.put(RequestType.GETDATAFILEIDS, Returns.DATAFILES); + //this.requestTypeToReturnsMapping.put(RequestType.GETSIZE, Returns.DATASETS_AND_DATAFILES); + //this.requestTypeToReturnsMapping.put(RequestType.PREPAREDATA, Returns.DATASETS_AND_DATAFILES); + //this.requestTypeToReturnsMapping.put(RequestType.RESET, Returns.DATASETS_AND_DATAFILES); + //this.requestTypeToReturnsMapping.put(RequestType.WRITE, Returns.DATASETS_AND_DATAFILES); + this.requestTypeToReturnsMapping.put(RequestType.GETDATA, Returns.DATASETS_AND_DATAFILES); + + if(storageUnit == null ) { + //this.requestTypeToReturnsMapping.put(RequestType.GETSTATUS, Returns.DATASETS); + } + + + else if (storageUnit == StorageUnit.DATAFILE) { + //this.requestTypeToReturnsMapping.put(RequestType.GETSTATUS, Returns.DATAFILES); + //this.requestTypeToReturnsMapping.put(RequestType.RESTORE, Returns.DATAFILES); + this.requestTypeToReturnsMapping.put(RequestType.ARCHIVE, Returns.DATAFILES); + } + + + else if(storageUnit == StorageUnit.DATASET) { + //this.requestTypeToReturnsMapping.put(RequestType.GETSTATUS, Returns.DATASETS); + //this.requestTypeToReturnsMapping.put(RequestType.RESTORE, Returns.DATASETS); + this.requestTypeToReturnsMapping.put(RequestType.ARCHIVE, Returns.DATASETS); + } + + + else throw new InternalException("StorageUnit " + storageUnit + " unknown. Maybe you forgot to handle a new StorageUnit here?"); + } + + private Returns getReturns(RequestType requestType) throws NotImplementedException { + + if(this.requestTypeToReturnsMapping.containsKey(requestType)) + return this.requestTypeToReturnsMapping.get(requestType); + + + if(this.propertyHandler.getStorageUnit() == null) throw new NotImplementedException("This operation is unavailable for single level storage"); + + throw new NotImplementedException("There is to mapping for RequestType." + requestType + " and StorageUnit." + this.propertyHandler.getStorageUnit() + " defined. Did you forgot to register it in createRequestTypeToReturnsMapping()?"); + } + } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java index c2bdd8b2..f66fad4d 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java @@ -4,22 +4,36 @@ import java.util.Map; import java.util.Set; +import org.icatproject.ids.DeferredOp; import org.icatproject.ids.exceptions.DataNotOnlineException; import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.models.DataFileInfo; import org.icatproject.ids.v3.models.DataSetInfo; public class DataSelectionForSingleLevelStorage extends DataSelectionV3Base { protected DataSelectionForSingleLevelStorage(Map dsInfos, Set dfInfos, - Set emptyDatasets, List invids2, List dsids, List dfids) { + Set emptyDatasets, List invids2, List dsids, List dfids, RequestType requestType) { - super(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids); + super(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids, requestType); } + @Override public void checkOnline() throws InternalException, DataNotOnlineException { // nothing to do here for single level storage } + + + @Override + protected void scheduleTask(DeferredOp operation) throws NotImplementedException, InternalException { + + throw new NotImplementedException("This operation is unavailable for single level storage"); + } + + + } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java index ff8ca52d..b00a4d56 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java @@ -7,15 +7,17 @@ import org.icatproject.ids.DeferredOp; import org.icatproject.ids.exceptions.DataNotOnlineException; import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.models.DataFileInfo; import org.icatproject.ids.v3.models.DataSetInfo; public class DataSelectionForStorageUnitDatafile extends DataSelectionV3Base { protected DataSelectionForStorageUnitDatafile(Map dsInfos, Set dfInfos, - Set emptyDatasets, List invids2, List dsids, List dfids) { + Set emptyDatasets, List invids2, List dsids, List dfids, RequestType requestType) { - super(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids); + super(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids, requestType); } @Override @@ -44,5 +46,13 @@ public boolean restoreIfOffline(DataFileInfo dfInfo) throws InternalException { } return maybeOffline; } + + @Override + protected void scheduleTask(DeferredOp operation) throws NotImplementedException, InternalException { + + for (DataFileInfo dfInfo : dfInfos) { + ServiceProvider.getInstance().getFsm().queue(dfInfo, operation); + } + } } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java index ce8aa7b1..de6a9371 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java @@ -7,15 +7,17 @@ import org.icatproject.ids.DeferredOp; import org.icatproject.ids.exceptions.DataNotOnlineException; import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.models.DataFileInfo; import org.icatproject.ids.v3.models.DataSetInfo; public class DataSelectionForStorageUnitDataset extends DataSelectionV3Base { protected DataSelectionForStorageUnitDataset(Map dsInfos, Set dfInfos, - Set emptyDatasets, List invids2, List dsids, List dfids) { + Set emptyDatasets, List invids2, List dsids, List dfids, RequestType requestType) { - super(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids); + super(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids, requestType); } @Override @@ -46,4 +48,13 @@ public boolean restoreIfOffline(DataSetInfo dsInfo, Set emptyDatasets) thr return maybeOffline; } + @Override + protected void scheduleTask(DeferredOp operation) throws NotImplementedException, InternalException { + + for (DataSetInfo dsInfo : dsInfos.values()) { + ServiceProvider.getInstance().getFsm().queue(dsInfo, operation); + } + } + + } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java b/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java index a2b41b4d..4971144e 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java @@ -2,13 +2,17 @@ import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; +import org.icatproject.ids.DeferredOp; import org.icatproject.ids.exceptions.BadRequestException; import org.icatproject.ids.exceptions.DataNotOnlineException; import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.models.DataFileInfo; import org.icatproject.ids.v3.models.DataSetInfo; @@ -20,8 +24,10 @@ public abstract class DataSelectionV3Base { protected List invids; protected List dsids; protected List dfids; + protected RequestType requestType; + protected HashMap requestTypeToDeferredOpMapping; - protected DataSelectionV3Base(Map dsInfos, Set dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids) { + protected DataSelectionV3Base(Map dsInfos, Set dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids, RequestType requestType) { this.dsInfos = dsInfos; this.dfInfos = dfInfos; @@ -29,9 +35,19 @@ protected DataSelectionV3Base(Map dsInfos, Set this.invids = invids2; this.dsids = dsids; this.dfids = dfids; + this.requestType = requestType; + + this.requestTypeToDeferredOpMapping = new HashMap(); + this.requestTypeToDeferredOpMapping.put(RequestType.ARCHIVE, DeferredOp.ARCHIVE); + this.requestTypeToDeferredOpMapping.put(RequestType.GETDATA, null); } + public abstract void checkOnline() throws InternalException, DataNotOnlineException; + + protected abstract void scheduleTask(DeferredOp operation) throws NotImplementedException, InternalException; + + public Map getDsInfo() { return dsInfos; } @@ -83,6 +99,15 @@ public static List getValidIds(String thing, String idList) throws BadRequ } - public abstract void checkOnline() throws InternalException, DataNotOnlineException; + public void scheduleTask() throws NotImplementedException, InternalException { + + + DeferredOp operation = this.requestTypeToDeferredOpMapping.get(this.requestType); + + if(operation == null) throw new InternalException("No DeferredOp defined for RequestType." + this.requestType); + // ... or did you forget to add an entry for your new RequestType in this.requestTypeToDeferredOpMapping (constructor)? + + this.scheduleTask(operation); + } } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/handlers/RequestHandlerBase.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java similarity index 83% rename from src/main/java/org/icatproject/ids/v3/handlers/RequestHandlerBase.java rename to src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java index 8dcb88b1..75d11a3e 100644 --- a/src/main/java/org/icatproject/ids/v3/handlers/RequestHandlerBase.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java @@ -1,4 +1,4 @@ -package org.icatproject.ids.v3.handlers; +package org.icatproject.ids.v3; import java.io.InputStream; import java.nio.file.Path; @@ -6,6 +6,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; @@ -13,7 +14,7 @@ import java.util.TreeSet; import java.util.regex.Pattern; -import org.icatproject.ids.v3.ServiceProvider; +import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.models.DataFileInfo; import org.icatproject.ids.v3.models.DataSetInfo; import org.icatproject.ids.v3.models.ValueContainer; @@ -35,6 +36,7 @@ import org.icatproject.ids.exceptions.InsufficientPrivilegesException; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.exceptions.NotFoundException; +import org.icatproject.ids.exceptions.NotImplementedException; public abstract class RequestHandlerBase { @@ -43,6 +45,7 @@ public abstract class RequestHandlerBase { protected Path preparedDir; protected boolean twoLevel; protected StorageUnit storageUnit; + protected RequestType requestType; /** * matches standard UUID format of 8-4-4-4-12 hexadecimal digits @@ -50,18 +53,34 @@ public abstract class RequestHandlerBase { public static final Pattern uuidRegExp = Pattern .compile("^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$"); - protected RequestHandlerBase(StorageUnit[] supportedStorageUnitsArray ) { + protected RequestHandlerBase(StorageUnit[] supportedStorageUnitsArray, RequestType requestType ) { this.supportedStorageUnits = Arrays.asList(supportedStorageUnitsArray); + this.requestType = requestType; } - protected RequestHandlerBase(StorageUnit supportedStorageUnit) { - this(new StorageUnit[]{supportedStorageUnit}); + protected RequestHandlerBase(StorageUnit supportedStorageUnit, RequestType requestType) { + this(new StorageUnit[]{supportedStorageUnit}, requestType); } public boolean supportsStorageUnit(StorageUnit neededStorageUnit) { return this.supportedStorageUnits.contains(neededStorageUnit); } + public RequestType getRequestType() { + return this.requestType; + } + + public DataSelectionV3Base getDataSelection(Map dsInfos, Set dfInfos, Set emptyDatasets) throws InternalException { + + return DataSelectionFactory.get(dsInfos, dfInfos, emptyDatasets, this.getRequestType()); + } + + public DataSelectionV3Base getDataSelection(String userSessionId, String investigationIds, String datasetIds, String datafileIds) + throws InternalException, BadRequestException, NotFoundException, InsufficientPrivilegesException, NotImplementedException { + + return DataSelectionFactory.get(userSessionId, investigationIds, datasetIds, datafileIds, this.getRequestType()); + } + public void init() throws InternalException { //logger.info("Initialize RequestHandlerBase..."); @@ -78,7 +97,7 @@ public void init() throws InternalException { //logger.info("RequestHandlerBase initialized"); } - public abstract ValueContainer handle(HashMap parameters) throws BadRequestException, InternalException, InsufficientPrivilegesException, NotFoundException, DataNotOnlineException; + public abstract ValueContainer handle(HashMap parameters) throws BadRequestException, InternalException, InsufficientPrivilegesException, NotFoundException, DataNotOnlineException, NotImplementedException; protected static void validateUUID(String thing, String id) throws BadRequestException { diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerServiceBase.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerServiceBase.java index 1e071dd2..8dfa8e29 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerServiceBase.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerServiceBase.java @@ -14,14 +14,16 @@ import org.icatproject.ids.exceptions.InsufficientPrivilegesException; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.exceptions.NotFoundException; +import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.plugin.ArchiveStorageInterface; import org.icatproject.ids.v3.enums.RequestType; +import org.icatproject.ids.v3.handlers.ArchiveHandler; import org.icatproject.ids.v3.handlers.GetDataHandler; -import org.icatproject.ids.v3.handlers.RequestHandlerBase; import org.icatproject.ids.v3.models.ValueContainer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +//TODO: rename to RequestHandlerService public class RequestHandlerServiceBase { private HashMap handlers; @@ -43,23 +45,27 @@ public RequestHandlerServiceBase() { this.unfinishedWorkService = new UnfinishedWorkServiceBase(); this.handlers = new HashMap(); - this.registerHandler(RequestType.GETDATA, new GetDataHandler()); + this.registerHandler(new GetDataHandler()); + this.registerHandler(new ArchiveHandler()); } - private void registerHandler(RequestType requestType, RequestHandlerBase requestHandler) { + + private void registerHandler(RequestHandlerBase requestHandler) { //use only the handlers that supports the configured StorageUnit if( requestHandler.supportsStorageUnit(this.propertyHandler.getStorageUnit()) ) - this.handlers.put(requestType, requestHandler); + this.handlers.put(requestHandler.getRequestType(), requestHandler); } - public ValueContainer handle(RequestType requestType, HashMap parameters) throws InternalException, BadRequestException, InsufficientPrivilegesException, NotFoundException, DataNotOnlineException { + + public ValueContainer handle(RequestType requestType, HashMap parameters) throws InternalException, BadRequestException, InsufficientPrivilegesException, NotFoundException, DataNotOnlineException, NotImplementedException { if(this.handlers.containsKey(requestType)) return this.handlers.get(requestType).handle(parameters); else throw new InternalException("No handler found for RequestType " + requestType + " and StorageUnit " + this.propertyHandler.getStorageUnit() + " in RequestHandlerService. Do you forgot to register?"); } + public void init(Transmitter transmitter, LockManager lockManager , FiniteStateMachine fsm, IcatReader reader) { try { diff --git a/src/main/java/org/icatproject/ids/v3/ServiceProvider.java b/src/main/java/org/icatproject/ids/v3/ServiceProvider.java index ecc56429..791bd889 100644 --- a/src/main/java/org/icatproject/ids/v3/ServiceProvider.java +++ b/src/main/java/org/icatproject/ids/v3/ServiceProvider.java @@ -1,5 +1,7 @@ package org.icatproject.ids.v3; +import java.util.Set; + import org.icatproject.ICAT; import org.icatproject.ids.FiniteStateMachine; import org.icatproject.ids.IcatReader; @@ -8,6 +10,7 @@ import org.icatproject.ids.Transmitter; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.plugin.MainStorageInterface; +import org.icatproject.ids.v3.enums.CallType; public class ServiceProvider { @@ -67,5 +70,9 @@ public ICAT getIcat() { return this.getPropertyHandler().getIcatService(); } + public Set getLogSet() { + return PropertyHandler.getInstance().getLogSet(); + } + } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java index 83baffb5..843a4b98 100644 --- a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java +++ b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java @@ -4,5 +4,5 @@ * This enum contains all defined types of requests to this server */ public enum RequestType { - GETDATA + GETDATA, ARCHIVE } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/enums/ValueContainerType.java b/src/main/java/org/icatproject/ids/v3/enums/ValueContainerType.java index 5aa1a252..8e4cf806 100644 --- a/src/main/java/org/icatproject/ids/v3/enums/ValueContainerType.java +++ b/src/main/java/org/icatproject/ids/v3/enums/ValueContainerType.java @@ -4,5 +4,5 @@ * This enum provides all possible values of a ValueContainer */ public enum ValueContainerType { - INVALID, INT, BOOL, STRING, REQUEST, RESPONSE + INVALID, VOID, INT, BOOL, STRING, REQUEST, RESPONSE } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/handlers/ArchiveHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/ArchiveHandler.java new file mode 100644 index 00000000..9533082b --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/handlers/ArchiveHandler.java @@ -0,0 +1,73 @@ +package org.icatproject.ids.v3.handlers; + +import java.io.ByteArrayOutputStream; +import java.util.HashMap; + +import org.icatproject.IcatException_Exception; +import org.icatproject.ids.StorageUnit; +import org.icatproject.ids.exceptions.BadRequestException; +import org.icatproject.ids.exceptions.DataNotOnlineException; +import org.icatproject.ids.exceptions.InsufficientPrivilegesException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.exceptions.NotFoundException; +import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.DataSelectionV3Base; +import org.icatproject.ids.v3.RequestHandlerBase; +import org.icatproject.ids.v3.ServiceProvider; +import org.icatproject.ids.v3.enums.CallType; +import org.icatproject.ids.v3.enums.RequestType; +import org.icatproject.ids.v3.models.ValueContainer; + +import jakarta.json.Json; +import jakarta.json.stream.JsonGenerator; + +public class ArchiveHandler extends RequestHandlerBase { + + public ArchiveHandler() { + super(new StorageUnit[] {StorageUnit.DATAFILE, StorageUnit.DATASET, null}, RequestType.ARCHIVE); + } + + @Override + public ValueContainer handle(HashMap parameters) throws BadRequestException, + InternalException, InsufficientPrivilegesException, NotFoundException, DataNotOnlineException, NotImplementedException { + + long start = System.currentTimeMillis(); + + String sessionId = parameters.get("sessionId").getString(); + String investigationIds = parameters.get("investigationIds").getString(); + String datasetIds = parameters.get("datasetIds").getString(); + String datafileIds = parameters.get("datafileIds").getString(); + String ip = parameters.get("ip").getString(); + + // Log and validate + logger.info("New webservice request: archive " + "investigationIds='" + investigationIds + "' " + "datasetIds='" + + datasetIds + "' " + "datafileIds='" + datafileIds + "'"); + + //TODO: move that into base class + validateUUID("sessionId", sessionId); + + + // Do it + DataSelectionV3Base dataSelection = this.getDataSelection(sessionId, investigationIds, datasetIds, datafileIds); + dataSelection.scheduleTask(); + + if (ServiceProvider.getInstance().getLogSet().contains(CallType.MIGRATE)) { + try { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { + gen.write("userName", ServiceProvider.getInstance().getIcat().getUserName(sessionId)); + addIds(gen, investigationIds, datasetIds, datafileIds); + gen.writeEnd(); + } + String body = baos.toString(); + ServiceProvider.getInstance().getTransmitter().processMessage("archive", ip, body, start); + } catch (IcatException_Exception e) { + logger.error("Failed to prepare jms message " + e.getClass() + " " + e.getMessage()); + } + } + + return ValueContainer.getVoid(); + + } + +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java index 18c2fec1..3289954b 100644 --- a/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java @@ -26,14 +26,16 @@ import org.icatproject.ids.exceptions.InsufficientPrivilegesException; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.exceptions.NotFoundException; +import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.plugin.AlreadyLockedException; -import org.icatproject.ids.v3.DataSelectionFactory; import org.icatproject.ids.v3.DataSelectionV3Base; +import org.icatproject.ids.v3.RequestHandlerBase; import org.icatproject.ids.v3.ServiceProvider; -import org.icatproject.ids.v3.DataSelectionFactory.Returns; import org.icatproject.ids.v3.enums.CallType; +import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.helper.SO; import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataInfoBase; import org.icatproject.ids.v3.models.DataSetInfo; import org.icatproject.ids.v3.models.ValueContainer; @@ -48,7 +50,7 @@ public class GetDataHandler extends RequestHandlerBase { public GetDataHandler() { - super(new StorageUnit[] {StorageUnit.DATAFILE, StorageUnit.DATASET, null} ); + super(new StorageUnit[] {StorageUnit.DATAFILE, StorageUnit.DATASET, null}, RequestType.GETDATA ); } @@ -61,7 +63,7 @@ public void init() throws InternalException { @Override - public ValueContainer handle(HashMap parameters) throws BadRequestException, NotFoundException, InternalException, InsufficientPrivilegesException, DataNotOnlineException { + public ValueContainer handle(HashMap parameters) throws BadRequestException, NotFoundException, InternalException, InsufficientPrivilegesException, DataNotOnlineException, NotImplementedException { Response response = null; long offset = 0; @@ -120,7 +122,7 @@ private Response getData(String preparedId, String outname, final long offset, S throw new InternalException(e.getClass() + " " + e.getMessage()); } - DataSelectionV3Base dataSelection = DataSelectionFactory.get((Map) prepared.dsInfos, (Set) prepared.dfInfos, (Set) prepared.emptyDatasets); + DataSelectionV3Base dataSelection = this.getDataSelection((Map) prepared.dsInfos, (Set) prepared.dfInfos, (Set) prepared.emptyDatasets); final boolean zip = prepared.zip; final boolean compress = prepared.compress; final Set dfInfos = prepared.dfInfos; @@ -192,7 +194,7 @@ private Response getData(String preparedId, String outname, final long offset, S private Response getData(String sessionId, String investigationIds, String datasetIds, String datafileIds, final boolean compress, boolean zip, String outname, final long offset, String ip) throws BadRequestException, InternalException, InsufficientPrivilegesException, NotFoundException, - DataNotOnlineException { + DataNotOnlineException, NotImplementedException { long start = System.currentTimeMillis(); @@ -204,7 +206,7 @@ private Response getData(String sessionId, String investigationIds, String datas var serviceProvider = ServiceProvider.getInstance(); - final DataSelectionV3Base dataSelection = DataSelectionFactory.get(sessionId, investigationIds, datasetIds, datafileIds, Returns.DATASETS_AND_DATAFILES); + final DataSelectionV3Base dataSelection = this.getDataSelection(sessionId, investigationIds, datasetIds, datafileIds); // Do it Map dsInfos = dataSelection.getDsInfo(); @@ -279,7 +281,7 @@ private Response getData(String sessionId, String investigationIds, String datas } } - private void checkDatafilesPresent(Set dfInfos) + private void checkDatafilesPresent(Set dfInfos) throws NotFoundException, InternalException { var serviceProvider = ServiceProvider.getInstance(); diff --git a/src/main/java/org/icatproject/ids/v3/models/ValueContainer.java b/src/main/java/org/icatproject/ids/v3/models/ValueContainer.java index fa0254e1..8fc65d87 100644 --- a/src/main/java/org/icatproject/ids/v3/models/ValueContainer.java +++ b/src/main/java/org/icatproject/ids/v3/models/ValueContainer.java @@ -32,10 +32,18 @@ public static ValueContainer getInvalid() { return new ValueContainer(); } + public static ValueContainer getVoid() { + return new ValueContainer((Void) null); + } + private ValueContainer() { this(null, ValueContainerType.INVALID); } + private ValueContainer(Void value) { + this(null, ValueContainerType.VOID); + } + /** * Creates a ValueContainer of type int * @param value the value contained by the container From 2e582883132d8eb8cabf15ae8513491eecdb7f5c Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Mon, 19 Feb 2024 14:57:37 +0100 Subject: [PATCH 12/92] Request handler for getIcatUrl --- .../java/org/icatproject/ids/IdsService.java | 26 +++++++++---- .../ids/v3/RequestHandlerBase.java | 5 +++ ...ceBase.java => RequestHandlerService.java} | 8 ++-- .../icatproject/ids/v3/ServiceProvider.java | 4 ++ .../icatproject/ids/v3/enums/RequestType.java | 2 +- .../ids/v3/handlers/GetIcatUrlHandler.java | 38 +++++++++++++++++++ 6 files changed, 71 insertions(+), 12 deletions(-) rename src/main/java/org/icatproject/ids/v3/{RequestHandlerServiceBase.java => RequestHandlerService.java} (95%) create mode 100644 src/main/java/org/icatproject/ids/v3/handlers/GetIcatUrlHandler.java diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index e051dc89..62a47b49 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -38,7 +38,7 @@ import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.exceptions.NotFoundException; import org.icatproject.ids.exceptions.NotImplementedException; -import org.icatproject.ids.v3.RequestHandlerServiceBase; +import org.icatproject.ids.v3.RequestHandlerService; import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.models.ValueContainer; @@ -63,7 +63,7 @@ public class IdsService { @EJB private IcatReader reader; - private RequestHandlerServiceBase requestHandler = null; + private RequestHandlerService requestService = null; /** * Archive data specified by the investigationIds, datasetIds and @@ -101,7 +101,7 @@ public void archive(@Context HttpServletRequest request, @FormParam("sessionId") parameters.put("datafileIds", new ValueContainer(datafileIds)); parameters.put("ip", new ValueContainer(request.getRemoteAddr())); - this.requestHandler.handle(RequestType.ARCHIVE, parameters); + this.requestService.handle(RequestType.ARCHIVE, parameters); } /** @@ -209,7 +209,7 @@ public Response getData(@Context HttpServletRequest request, @QueryParam("prepar parameters.put( "outname", new ValueContainer(outname) ); parameters.put( "range", new ValueContainer(range) ); - return this.requestHandler.handle(RequestType.GETDATA, parameters).getResponse(); + return this.requestService.handle(RequestType.GETDATA, parameters).getResponse(); } /** @@ -252,13 +252,23 @@ public String getDatafileIds(@Context HttpServletRequest request, @QueryParam("p * obtained. * * @return the url of the icat server + * @throws NotImplementedException + * @throws DataNotOnlineException + * @throws NotFoundException + * @throws InsufficientPrivilegesException + * @throws BadRequestException + * @throws InternalException * @statuscode 200 To indicate success */ @GET @Path("getIcatUrl") @Produces(MediaType.TEXT_PLAIN) - public String getIcatUrl(@Context HttpServletRequest request) { - return idsBean.getIcatUrl(request.getRemoteAddr()); + public String getIcatUrl(@Context HttpServletRequest request) throws InternalException, BadRequestException, InsufficientPrivilegesException, NotFoundException, DataNotOnlineException, NotImplementedException { + + var parameters = new HashMap(); + parameters.put("ip",new ValueContainer(request.getRemoteAddr()) ); + + return this.requestService.handle(RequestType.GETICATURL, parameters).getString(); } /** @@ -354,8 +364,8 @@ public String getStatus(@Context HttpServletRequest request, @QueryParam("prepar @PostConstruct private void init() { logger.info("creating IdsService"); - this.requestHandler = new RequestHandlerServiceBase(); - this.requestHandler.init(this.transmitter, this.lockManager, this.fsm, this.reader); + this.requestService = new RequestHandlerService(); + this.requestService.init(this.transmitter, this.lockManager, this.fsm, this.reader); logger.info("created IdsService"); } diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java index 75d11a3e..563d0da8 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java @@ -81,6 +81,11 @@ public DataSelectionV3Base getDataSelection(String userSessionId, String investi return DataSelectionFactory.get(userSessionId, investigationIds, datasetIds, datafileIds, this.getRequestType()); } + /** + * This method initializes the base class part of the RequestHandler. + * You can overload it, but please don't overwrite it, because this base class part has also to be initialized + * @throws InternalException + */ public void init() throws InternalException { //logger.info("Initialize RequestHandlerBase..."); diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerServiceBase.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java similarity index 95% rename from src/main/java/org/icatproject/ids/v3/RequestHandlerServiceBase.java rename to src/main/java/org/icatproject/ids/v3/RequestHandlerService.java index 8dfa8e29..b78a538a 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerServiceBase.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java @@ -19,12 +19,13 @@ import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.handlers.ArchiveHandler; import org.icatproject.ids.v3.handlers.GetDataHandler; +import org.icatproject.ids.v3.handlers.GetIcatUrlHandler; import org.icatproject.ids.v3.models.ValueContainer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; //TODO: rename to RequestHandlerService -public class RequestHandlerServiceBase { +public class RequestHandlerService { private HashMap handlers; private PropertyHandler propertyHandler; @@ -39,14 +40,15 @@ public class RequestHandlerServiceBase { private UnfinishedWorkServiceBase unfinishedWorkService; - public RequestHandlerServiceBase() { + public RequestHandlerService() { this.propertyHandler = PropertyHandler.getInstance(); this.unfinishedWorkService = new UnfinishedWorkServiceBase(); this.handlers = new HashMap(); this.registerHandler(new GetDataHandler()); - this.registerHandler(new ArchiveHandler()); + this.registerHandler(new ArchiveHandler()); + this.registerHandler(new GetIcatUrlHandler()); } diff --git a/src/main/java/org/icatproject/ids/v3/ServiceProvider.java b/src/main/java/org/icatproject/ids/v3/ServiceProvider.java index 791bd889..51d47640 100644 --- a/src/main/java/org/icatproject/ids/v3/ServiceProvider.java +++ b/src/main/java/org/icatproject/ids/v3/ServiceProvider.java @@ -12,6 +12,10 @@ import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.v3.enums.CallType; +/** + * This class serves the developer with multiple services. + * Maybe it is just for the redesign for version 3 and will later be replaced with dependency injection, when it will be more clear where which service is used. + */ public class ServiceProvider { private static ServiceProvider instance = null; diff --git a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java index 843a4b98..3862ee61 100644 --- a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java +++ b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java @@ -4,5 +4,5 @@ * This enum contains all defined types of requests to this server */ public enum RequestType { - GETDATA, ARCHIVE + GETDATA, ARCHIVE, GETICATURL } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetIcatUrlHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetIcatUrlHandler.java new file mode 100644 index 00000000..7e892561 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetIcatUrlHandler.java @@ -0,0 +1,38 @@ +package org.icatproject.ids.v3.handlers; + +import java.util.HashMap; + +import org.icatproject.ids.StorageUnit; +import org.icatproject.ids.exceptions.BadRequestException; +import org.icatproject.ids.exceptions.DataNotOnlineException; +import org.icatproject.ids.exceptions.InsufficientPrivilegesException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.exceptions.NotFoundException; +import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.RequestHandlerBase; +import org.icatproject.ids.v3.ServiceProvider; +import org.icatproject.ids.v3.enums.CallType; +import org.icatproject.ids.v3.enums.RequestType; +import org.icatproject.ids.v3.models.ValueContainer; + +public class GetIcatUrlHandler extends RequestHandlerBase { + + public GetIcatUrlHandler() { + super( new StorageUnit[] {StorageUnit.DATAFILE, StorageUnit.DATASET, null}, RequestType.GETICATURL); + } + + @Override + public ValueContainer handle(HashMap parameters) + throws BadRequestException, InternalException, InsufficientPrivilegesException, NotFoundException, + DataNotOnlineException, NotImplementedException { + + + var propertyHandler = ServiceProvider.getInstance().getPropertyHandler(); + + if (propertyHandler.getLogSet().contains(CallType.INFO)) { + ServiceProvider.getInstance().getTransmitter().processMessage("getIcatUrl", parameters.get("ip").getString(), "{}", System.currentTimeMillis()); + } + return new ValueContainer(propertyHandler.getIcatUrl()); + } + +} \ No newline at end of file From 70c04082d41c0c6cd98c87dd801d3b78c1221d95 Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Tue, 20 Feb 2024 10:17:27 +0100 Subject: [PATCH 13/92] GetDataFileIdsHandler added --- .../java/org/icatproject/ids/IdsService.java | 27 ++- .../ids/v3/DataSelectionFactory.java | 2 +- .../ids/v3/RequestHandlerService.java | 4 +- .../icatproject/ids/v3/enums/RequestType.java | 2 +- .../ids/v3/handlers/ArchiveHandler.java | 2 - .../v3/handlers/GetDataFileIdsHandler.java | 154 ++++++++++++++++++ 6 files changed, 179 insertions(+), 12 deletions(-) create mode 100644 src/main/java/org/icatproject/ids/v3/handlers/GetDataFileIdsHandler.java diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index 62a47b49..a29a6d1b 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -228,6 +228,8 @@ public Response getData(@Context HttpServletRequest request, @QueryParam("prepar * @throws InternalException * @throws NotFoundException * @throws InsufficientPrivilegesException + * @throws NotImplementedException + * @throws DataNotOnlineException * @summary getDatafileIds * @statuscode 200 To indicate success */ @@ -237,13 +239,24 @@ public Response getData(@Context HttpServletRequest request, @QueryParam("prepar public String getDatafileIds(@Context HttpServletRequest request, @QueryParam("preparedId") String preparedId, @QueryParam("sessionId") String sessionId, @QueryParam("investigationIds") String investigationIds, @QueryParam("datasetIds") String datasetIds, @QueryParam("datafileIds") String datafileIds) - throws BadRequestException, InternalException, NotFoundException, InsufficientPrivilegesException { - if (preparedId != null) { - return idsBean.getDatafileIds(preparedId, request.getRemoteAddr()); - } else { - return idsBean.getDatafileIds(sessionId, investigationIds, datasetIds, datafileIds, - request.getRemoteAddr()); - } + throws BadRequestException, InternalException, NotFoundException, InsufficientPrivilegesException, DataNotOnlineException, NotImplementedException { + + var parameters = new HashMap(); + parameters.put("preparedId", new ValueContainer(preparedId)); + parameters.put("sessionId", new ValueContainer(sessionId)); + parameters.put("investigationIds", new ValueContainer(investigationIds)); + parameters.put("datasetIds", new ValueContainer(datasetIds)); + parameters.put("datafileIds", new ValueContainer(datafileIds)); + parameters.put("ip", new ValueContainer(request.getRemoteAddr())); + + return this.requestService.handle(RequestType.GETDATAFILEIDS, parameters).getString(); + + // if (preparedId != null) { + // return idsBean.getDatafileIds(preparedId, request.getRemoteAddr()); + // } else { + // return idsBean.getDatafileIds(sessionId, investigationIds, datasetIds, datafileIds, + // request.getRemoteAddr()); + // } } /** diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java b/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java index 2bd9205c..85290041 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java @@ -355,7 +355,7 @@ private void createRequestTypeToReturnsMapping() throws InternalException { //commented out entries: uncomment when you create the new hendlers for the RequestTypes during the current redesign //this.requestTypeToReturnsMapping.put(RequestType.DELETE, Returns.DATASETS_AND_DATAFILES); - //this.requestTypeToReturnsMapping.put(RequestType.GETDATAFILEIDS, Returns.DATAFILES); + this.requestTypeToReturnsMapping.put(RequestType.GETDATAFILEIDS, Returns.DATAFILES); //this.requestTypeToReturnsMapping.put(RequestType.GETSIZE, Returns.DATASETS_AND_DATAFILES); //this.requestTypeToReturnsMapping.put(RequestType.PREPAREDATA, Returns.DATASETS_AND_DATAFILES); //this.requestTypeToReturnsMapping.put(RequestType.RESET, Returns.DATASETS_AND_DATAFILES); diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java index b78a538a..55284606 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java @@ -18,6 +18,7 @@ import org.icatproject.ids.plugin.ArchiveStorageInterface; import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.handlers.ArchiveHandler; +import org.icatproject.ids.v3.handlers.GetDataFileIdsHandler; import org.icatproject.ids.v3.handlers.GetDataHandler; import org.icatproject.ids.v3.handlers.GetIcatUrlHandler; import org.icatproject.ids.v3.models.ValueContainer; @@ -48,7 +49,8 @@ public RequestHandlerService() { this.handlers = new HashMap(); this.registerHandler(new GetDataHandler()); this.registerHandler(new ArchiveHandler()); - this.registerHandler(new GetIcatUrlHandler()); + this.registerHandler(new GetIcatUrlHandler()); + this.registerHandler(new GetDataFileIdsHandler()); } diff --git a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java index 3862ee61..5475ca98 100644 --- a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java +++ b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java @@ -4,5 +4,5 @@ * This enum contains all defined types of requests to this server */ public enum RequestType { - GETDATA, ARCHIVE, GETICATURL + GETDATA, ARCHIVE, GETICATURL, GETDATAFILEIDS } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/handlers/ArchiveHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/ArchiveHandler.java index 9533082b..d3bdd2ac 100644 --- a/src/main/java/org/icatproject/ids/v3/handlers/ArchiveHandler.java +++ b/src/main/java/org/icatproject/ids/v3/handlers/ArchiveHandler.java @@ -43,9 +43,7 @@ public ValueContainer handle(HashMap parameters) throws logger.info("New webservice request: archive " + "investigationIds='" + investigationIds + "' " + "datasetIds='" + datasetIds + "' " + "datafileIds='" + datafileIds + "'"); - //TODO: move that into base class validateUUID("sessionId", sessionId); - // Do it DataSelectionV3Base dataSelection = this.getDataSelection(sessionId, investigationIds, datasetIds, datafileIds); diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetDataFileIdsHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetDataFileIdsHandler.java new file mode 100644 index 00000000..5274e081 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetDataFileIdsHandler.java @@ -0,0 +1,154 @@ +package org.icatproject.ids.v3.handlers; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.NoSuchFileException; +import java.util.HashMap; +import java.util.Set; + +import org.icatproject.IcatException_Exception; +import org.icatproject.ids.Prepared; +import org.icatproject.ids.StorageUnit; +import org.icatproject.ids.exceptions.BadRequestException; +import org.icatproject.ids.exceptions.DataNotOnlineException; +import org.icatproject.ids.exceptions.InsufficientPrivilegesException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.exceptions.NotFoundException; +import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.DataSelectionV3Base; +import org.icatproject.ids.v3.RequestHandlerBase; +import org.icatproject.ids.v3.ServiceProvider; +import org.icatproject.ids.v3.enums.CallType; +import org.icatproject.ids.v3.enums.RequestType; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.ValueContainer; + +import jakarta.json.Json; +import jakarta.json.stream.JsonGenerator; + +public class GetDataFileIdsHandler extends RequestHandlerBase { + + public GetDataFileIdsHandler() { + super(new StorageUnit[] {StorageUnit.DATAFILE, StorageUnit.DATASET, null}, RequestType.GETDATAFILEIDS); + } + + @Override + public ValueContainer handle(HashMap parameters) + throws BadRequestException, InternalException, InsufficientPrivilegesException, NotFoundException, + DataNotOnlineException, NotImplementedException { + + String preparedId = parameters.get("preparedId").getString(); + String sessionId = parameters.get("sessionId").getString(); + String investigationIds = parameters.get("investigationIds").getString(); + String datasetIds = parameters.get("datasetIds").getString(); + String datafileIds = parameters.get("datafileIds").getString(); + String ip = parameters.get("ip").getString(); + + if (preparedId != null) { + return new ValueContainer(this.getDatafileIds(preparedId, ip)); + } else { + return new ValueContainer(this.getDatafileIds(sessionId, investigationIds, datasetIds, datafileIds, ip)); + } + } + + + private String getDatafileIds(String preparedId, String ip) + throws BadRequestException, InternalException, NotFoundException { + + long start = System.currentTimeMillis(); + + // Log and validate + logger.info("New webservice request: getDatafileIds preparedId = '" + preparedId); + + validateUUID("preparedId", preparedId); + + // Do it + Prepared prepared; + try (InputStream stream = Files.newInputStream(preparedDir.resolve(preparedId))) { + prepared = unpack(stream); + } catch (NoSuchFileException e) { + throw new NotFoundException("The preparedId " + preparedId + " is not known"); + } catch (IOException e) { + throw new InternalException(e.getClass() + " " + e.getMessage()); + } + + final boolean zip = prepared.zip; + final boolean compress = prepared.compress; + final Set dfInfos = prepared.dfInfos; + + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { + gen.write("zip", zip); + gen.write("compress", compress); + gen.writeStartArray("ids"); + for (DataFileInfo dfInfo : dfInfos) { + gen.write(dfInfo.getId()); + } + gen.writeEnd().writeEnd().close(); + } + String resp = baos.toString(); + + var serviceProvider = ServiceProvider.getInstance(); + + if (serviceProvider.getLogSet().contains(CallType.INFO)) { + baos = new ByteArrayOutputStream(); + try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { + gen.write("preparedId", preparedId); + gen.writeEnd(); + } + serviceProvider.getTransmitter().processMessage("getDatafileIds", ip, baos.toString(), start); + } + + return resp; + } + + private String getDatafileIds(String sessionId, String investigationIds, String datasetIds, String datafileIds, + String ip) + throws BadRequestException, NotFoundException, InsufficientPrivilegesException, InternalException, NotImplementedException { + + long start = System.currentTimeMillis(); + + // Log and validate + logger.info(String.format( + "New webservice request: getDatafileIds investigationIds=%s, datasetIds=%s, datafileIds=%s", + investigationIds, datasetIds, datafileIds)); + + validateUUID("sessionId", sessionId); + + final DataSelectionV3Base dataSelection = this.getDataSelection(sessionId, investigationIds, datasetIds, datafileIds); + + // Do it + Set dfInfos = dataSelection.getDfInfo(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { + gen.writeStartArray("ids"); + for (DataFileInfo dfInfo : dfInfos) { + gen.write(dfInfo.getId()); + } + gen.writeEnd().writeEnd().close(); + } + String resp = baos.toString(); + + var serviceProvider = ServiceProvider.getInstance(); + + if (serviceProvider.getLogSet().contains(CallType.INFO)) { + baos = new ByteArrayOutputStream(); + try { + try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { + gen.write("userName", serviceProvider.getIcat().getUserName(sessionId)); + addIds(gen, investigationIds, datasetIds, datafileIds); + gen.writeEnd(); + } + serviceProvider.getTransmitter().processMessage("getDatafileIds", ip, baos.toString(), start); + } catch (IcatException_Exception e) { + logger.error("Failed to prepare jms message " + e.getClass() + " " + e.getMessage()); + } + } + + return resp; + + } + +} \ No newline at end of file From b4da92eb045df91dfc3c716456f1f956c674c4bb Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Tue, 20 Feb 2024 12:27:00 +0100 Subject: [PATCH 14/92] added GetServiceStatusHandler --- .../java/org/icatproject/ids/IdsService.java | 20 +++-- .../ids/v3/DataSelectionFactory.java | 3 +- .../ids/v3/RequestHandlerService.java | 4 +- .../icatproject/ids/v3/enums/RequestType.java | 2 +- .../v3/handlers/GetServiceStatusHandler.java | 83 +++++++++++++++++++ 5 files changed, 100 insertions(+), 12 deletions(-) create mode 100644 src/main/java/org/icatproject/ids/v3/handlers/GetServiceStatusHandler.java diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index a29a6d1b..d64322ef 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -250,13 +250,6 @@ public String getDatafileIds(@Context HttpServletRequest request, @QueryParam("p parameters.put("ip", new ValueContainer(request.getRemoteAddr())); return this.requestService.handle(RequestType.GETDATAFILEIDS, parameters).getString(); - - // if (preparedId != null) { - // return idsBean.getDatafileIds(preparedId, request.getRemoteAddr()); - // } else { - // return idsBean.getDatafileIds(sessionId, investigationIds, datasetIds, datafileIds, - // request.getRemoteAddr()); - // } } /** @@ -293,6 +286,10 @@ public String getIcatUrl(@Context HttpServletRequest request) throws InternalExc * @return a json string. * @throws InternalException * @throws InsufficientPrivilegesException + * @throws NotImplementedException + * @throws DataNotOnlineException + * @throws NotFoundException + * @throws BadRequestException * @summary getServiceStatus * @statuscode 200 To indicate success */ @@ -300,8 +297,13 @@ public String getIcatUrl(@Context HttpServletRequest request) throws InternalExc @Path("getServiceStatus") @Produces(MediaType.APPLICATION_JSON) public String getServiceStatus(@Context HttpServletRequest request, @QueryParam("sessionId") String sessionId) - throws InternalException, InsufficientPrivilegesException { - return idsBean.getServiceStatus(sessionId, request.getRemoteAddr()); + throws InternalException, InsufficientPrivilegesException, BadRequestException, NotFoundException, DataNotOnlineException, NotImplementedException { + + var parameters = new HashMap(); + parameters.put("sessionId", new ValueContainer(sessionId)); + parameters.put("ip", new ValueContainer(request.getRemoteAddr())); + + return this.requestService.handle(RequestType.GETSERVICESTATUS, parameters).getString(); } /** diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java b/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java index 85290041..cd22473c 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java @@ -390,7 +390,8 @@ private Returns getReturns(RequestType requestType) throws NotImplementedExcepti return this.requestTypeToReturnsMapping.get(requestType); - if(this.propertyHandler.getStorageUnit() == null) throw new NotImplementedException("This operation is unavailable for single level storage"); + // is this needed here? + //if(this.propertyHandler.getStorageUnit() == null) throw new NotImplementedException("This operation is unavailable for single level storage"); throw new NotImplementedException("There is to mapping for RequestType." + requestType + " and StorageUnit." + this.propertyHandler.getStorageUnit() + " defined. Did you forgot to register it in createRequestTypeToReturnsMapping()?"); } diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java index 55284606..0339a977 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java @@ -21,6 +21,7 @@ import org.icatproject.ids.v3.handlers.GetDataFileIdsHandler; import org.icatproject.ids.v3.handlers.GetDataHandler; import org.icatproject.ids.v3.handlers.GetIcatUrlHandler; +import org.icatproject.ids.v3.handlers.GetServiceStatusHandler; import org.icatproject.ids.v3.models.ValueContainer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -50,7 +51,8 @@ public RequestHandlerService() { this.registerHandler(new GetDataHandler()); this.registerHandler(new ArchiveHandler()); this.registerHandler(new GetIcatUrlHandler()); - this.registerHandler(new GetDataFileIdsHandler()); + this.registerHandler(new GetDataFileIdsHandler()); + this.registerHandler(new GetServiceStatusHandler()); } diff --git a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java index 5475ca98..576ad443 100644 --- a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java +++ b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java @@ -4,5 +4,5 @@ * This enum contains all defined types of requests to this server */ public enum RequestType { - GETDATA, ARCHIVE, GETICATURL, GETDATAFILEIDS + GETDATA, ARCHIVE, GETICATURL, GETDATAFILEIDS, GETSERVICESTATUS } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetServiceStatusHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetServiceStatusHandler.java new file mode 100644 index 00000000..4cb9676e --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetServiceStatusHandler.java @@ -0,0 +1,83 @@ +package org.icatproject.ids.v3.handlers; + +import java.io.ByteArrayOutputStream; +import java.util.HashMap; +import java.util.Set; + +import org.icatproject.IcatExceptionType; +import org.icatproject.IcatException_Exception; +import org.icatproject.ids.StorageUnit; +import org.icatproject.ids.exceptions.BadRequestException; +import org.icatproject.ids.exceptions.DataNotOnlineException; +import org.icatproject.ids.exceptions.InsufficientPrivilegesException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.exceptions.NotFoundException; +import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.RequestHandlerBase; +import org.icatproject.ids.v3.ServiceProvider; +import org.icatproject.ids.v3.enums.CallType; +import org.icatproject.ids.v3.enums.RequestType; +import org.icatproject.ids.v3.models.ValueContainer; + +import jakarta.json.Json; +import jakarta.json.stream.JsonGenerator; + +public class GetServiceStatusHandler extends RequestHandlerBase { + + private Set rootUserNames; + + public GetServiceStatusHandler() { + super(new StorageUnit[] {StorageUnit.DATAFILE, StorageUnit.DATASET, null}, RequestType.GETSERVICESTATUS); + } + + public void init() throws InternalException { + logger.info("Initializing GetServiceStatusHandler..."); + super.init(); + rootUserNames = ServiceProvider.getInstance().getPropertyHandler().getRootUserNames(); + logger.info("GetDataHandler GetServiceStatusHandler"); + } + + @Override + public ValueContainer handle(HashMap parameters) + throws BadRequestException, InternalException, InsufficientPrivilegesException, NotFoundException, + DataNotOnlineException, NotImplementedException { + + long start = System.currentTimeMillis(); + + String sessionId = parameters.get("sessionId").getString(); + var serviceProvider = ServiceProvider.getInstance(); + + // Log and validate + logger.info("New webservice request: getServiceStatus"); + + try { + String uname = serviceProvider.getIcat().getUserName(sessionId); + if (!rootUserNames.contains(uname)) { + throw new InsufficientPrivilegesException(uname + " is not included in the ids rootUserNames set."); + } + } catch (IcatException_Exception e) { + IcatExceptionType type = e.getFaultInfo().getType(); + if (type == IcatExceptionType.SESSION) { + throw new InsufficientPrivilegesException(e.getClass() + " " + e.getMessage()); + } + throw new InternalException(e.getClass() + " " + e.getMessage()); + } + + if (serviceProvider.getLogSet().contains(CallType.INFO)) { + try { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { + gen.write("userName", serviceProvider.getIcat().getUserName(sessionId)); + gen.writeEnd(); + } + String body = baos.toString(); + serviceProvider.getTransmitter().processMessage("getServiceStatus", parameters.get("ip").getString(), body, start); + } catch (IcatException_Exception e) { + logger.error("Failed to prepare jms message " + e.getClass() + " " + e.getMessage()); + } + } + + return new ValueContainer(serviceProvider.getFsm().getServiceStatus()); + } + +} \ No newline at end of file From fbe3d461731d176825ef59054e53f3832d1f42f7 Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Tue, 20 Feb 2024 13:32:11 +0100 Subject: [PATCH 15/92] RequestHandler for GetSize --- .../java/org/icatproject/ids/IdsService.java | 20 +- .../ids/v3/DataSelectionFactory.java | 2 +- .../ids/v3/RequestHandlerService.java | 4 +- .../icatproject/ids/v3/enums/RequestType.java | 2 +- .../ids/v3/enums/ValueContainerType.java | 2 +- .../ids/v3/handlers/GetSizeHandler.java | 254 ++++++++++++++++++ .../ids/v3/models/ValueContainer.java | 18 ++ 7 files changed, 292 insertions(+), 10 deletions(-) create mode 100644 src/main/java/org/icatproject/ids/v3/handlers/GetSizeHandler.java diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index d64322ef..91670706 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -321,6 +321,8 @@ public String getServiceStatus(@Context HttpServletRequest request, @QueryParam( * @throws NotFoundException * @throws InsufficientPrivilegesException * @throws InternalException + * @throws NotImplementedException + * @throws DataNotOnlineException * @summary getSize * @statuscode 200 To indicate success */ @@ -330,12 +332,18 @@ public String getServiceStatus(@Context HttpServletRequest request, @QueryParam( public long getSize(@Context HttpServletRequest request, @QueryParam("preparedId") String preparedId, @QueryParam("sessionId") String sessionId, @QueryParam("investigationIds") String investigationIds, @QueryParam("datasetIds") String datasetIds, @QueryParam("datafileIds") String datafileIds) - throws BadRequestException, NotFoundException, InsufficientPrivilegesException, InternalException { - if (preparedId != null) { - return idsBean.getSize(preparedId, request.getRemoteAddr()); - } else { - return idsBean.getSize(sessionId, investigationIds, datasetIds, datafileIds, request.getRemoteAddr()); - } + throws BadRequestException, NotFoundException, InsufficientPrivilegesException, InternalException, DataNotOnlineException, NotImplementedException { + + + var parameters = new HashMap(); + parameters.put("preparedId", new ValueContainer(preparedId)); + parameters.put("sessionId", new ValueContainer(sessionId)); + parameters.put("investigationIds", new ValueContainer(investigationIds)); + parameters.put("datasetIds", new ValueContainer(datasetIds)); + parameters.put("datafileIds", new ValueContainer(datafileIds)); + parameters.put("ip", new ValueContainer(request.getRemoteAddr())); + + return this.requestService.handle(RequestType.GETSIZE, parameters).getLong(); } /** diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java b/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java index cd22473c..d9ad24eb 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java @@ -356,7 +356,7 @@ private void createRequestTypeToReturnsMapping() throws InternalException { //this.requestTypeToReturnsMapping.put(RequestType.DELETE, Returns.DATASETS_AND_DATAFILES); this.requestTypeToReturnsMapping.put(RequestType.GETDATAFILEIDS, Returns.DATAFILES); - //this.requestTypeToReturnsMapping.put(RequestType.GETSIZE, Returns.DATASETS_AND_DATAFILES); + this.requestTypeToReturnsMapping.put(RequestType.GETSIZE, Returns.DATASETS_AND_DATAFILES); //this.requestTypeToReturnsMapping.put(RequestType.PREPAREDATA, Returns.DATASETS_AND_DATAFILES); //this.requestTypeToReturnsMapping.put(RequestType.RESET, Returns.DATASETS_AND_DATAFILES); //this.requestTypeToReturnsMapping.put(RequestType.WRITE, Returns.DATASETS_AND_DATAFILES); diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java index 0339a977..1ec7f472 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java @@ -22,6 +22,7 @@ import org.icatproject.ids.v3.handlers.GetDataHandler; import org.icatproject.ids.v3.handlers.GetIcatUrlHandler; import org.icatproject.ids.v3.handlers.GetServiceStatusHandler; +import org.icatproject.ids.v3.handlers.GetSizeHandler; import org.icatproject.ids.v3.models.ValueContainer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -52,7 +53,8 @@ public RequestHandlerService() { this.registerHandler(new ArchiveHandler()); this.registerHandler(new GetIcatUrlHandler()); this.registerHandler(new GetDataFileIdsHandler()); - this.registerHandler(new GetServiceStatusHandler()); + this.registerHandler(new GetServiceStatusHandler()); + this.registerHandler(new GetSizeHandler()); } diff --git a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java index 576ad443..cb5fbbcd 100644 --- a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java +++ b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java @@ -4,5 +4,5 @@ * This enum contains all defined types of requests to this server */ public enum RequestType { - GETDATA, ARCHIVE, GETICATURL, GETDATAFILEIDS, GETSERVICESTATUS + GETDATA, ARCHIVE, GETICATURL, GETDATAFILEIDS, GETSERVICESTATUS, GETSIZE } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/enums/ValueContainerType.java b/src/main/java/org/icatproject/ids/v3/enums/ValueContainerType.java index 8e4cf806..ad018925 100644 --- a/src/main/java/org/icatproject/ids/v3/enums/ValueContainerType.java +++ b/src/main/java/org/icatproject/ids/v3/enums/ValueContainerType.java @@ -4,5 +4,5 @@ * This enum provides all possible values of a ValueContainer */ public enum ValueContainerType { - INVALID, VOID, INT, BOOL, STRING, REQUEST, RESPONSE + INVALID, VOID, INT, LONG, BOOL, STRING, REQUEST, RESPONSE } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetSizeHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetSizeHandler.java new file mode 100644 index 00000000..9150d9ae --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetSizeHandler.java @@ -0,0 +1,254 @@ +package org.icatproject.ids.v3.handlers; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.NoSuchFileException; +import java.util.HashMap; +import java.util.List; +import java.util.Set; + +import org.icatproject.Datafile; +import org.icatproject.IcatException_Exception; +import org.icatproject.ids.Prepared; +import org.icatproject.ids.StorageUnit; +import org.icatproject.ids.exceptions.BadRequestException; +import org.icatproject.ids.exceptions.DataNotOnlineException; +import org.icatproject.ids.exceptions.InsufficientPrivilegesException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.exceptions.NotFoundException; +import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.DataSelectionV3Base; +import org.icatproject.ids.v3.RequestHandlerBase; +import org.icatproject.ids.v3.ServiceProvider; +import org.icatproject.ids.v3.enums.CallType; +import org.icatproject.ids.v3.enums.RequestType; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.ValueContainer; + +import jakarta.json.Json; +import jakarta.json.stream.JsonGenerator; + +public class GetSizeHandler extends RequestHandlerBase { + + public GetSizeHandler() { + super(new StorageUnit[] {StorageUnit.DATAFILE, StorageUnit.DATASET, null}, RequestType.GETSIZE); + } + + @Override + public ValueContainer handle(HashMap parameters) + throws BadRequestException, InternalException, InsufficientPrivilegesException, NotFoundException, + DataNotOnlineException, NotImplementedException { + + String preparedId = parameters.get("preparedId").getString(); + String sessionId = parameters.get("sessionId").getString(); + String investigationIds = parameters.get("investigationIds").getString(); + String datasetIds = parameters.get("datasetIds").getString(); + String datafileIds = parameters.get("datafileIds").getString(); + String ip = parameters.get("ip").getString(); + + if (preparedId != null) { + return new ValueContainer(this.getSize(preparedId, ip)); + } else { + return new ValueContainer(this.getSize(sessionId, investigationIds, datasetIds, datafileIds, ip)); + } + } + + + public long getSize(String preparedId, String ip) + throws BadRequestException, NotFoundException, InsufficientPrivilegesException, InternalException { + + long start = System.currentTimeMillis(); + + var serviceProvider = ServiceProvider.getInstance(); + + // Log and validate + logger.info("New webservice request: getSize preparedId = '{}'", preparedId); + validateUUID("preparedId", preparedId); + + // Do it + Prepared prepared; + try (InputStream stream = Files.newInputStream(preparedDir.resolve(preparedId))) { + prepared = unpack(stream); + } catch (NoSuchFileException e) { + throw new NotFoundException("The preparedId " + preparedId + " is not known"); + } catch (IOException e) { + throw new InternalException(e.getClass() + " " + e.getMessage()); + } + + final Set dfInfos = prepared.dfInfos; + + // Note that the "fast computation for the simple case" (see the other getSize() implementation) is not + // available when calling getSize() with a preparedId. + logger.debug("Slow computation for normal case"); + String sessionId; + try { + sessionId = serviceProvider.getIcatReader().getSessionId(); + } catch (IcatException_Exception e) { + throw new InternalException(e.getFaultInfo().getType() + " " + e.getMessage()); + } + long size = 0; + + StringBuilder sb = new StringBuilder(); + int n = 0; + for (DataFileInfo df : dfInfos) { + if (sb.length() != 0) { + sb.append(','); + } + sb.append(df.getId()); + if (n++ == 500) { + size += getSizeFor(sessionId, sb); + sb = new StringBuilder(); + n = 0; + } + } + if (n > 0) { + size += getSizeFor(sessionId, sb); + } + + if (serviceProvider.getLogSet().contains(CallType.INFO)) { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { + gen.write("preparedId", preparedId); + gen.writeEnd(); + } + String body = baos.toString(); + serviceProvider.getTransmitter().processMessage("getSize", ip, body, start); + } + + return size; + } + + + public long getSize(String sessionId, String investigationIds, String datasetIds, String datafileIds, String ip) + throws BadRequestException, NotFoundException, InsufficientPrivilegesException, InternalException, NotImplementedException { + + long start = System.currentTimeMillis(); + var serviceProvider = ServiceProvider.getInstance(); + + // Log and validate + logger.info(String.format("New webservice request: getSize investigationIds=%s, datasetIds=%s, datafileIds=%s", + investigationIds, datasetIds, datafileIds)); + + validateUUID("sessionId", sessionId); + + List dfids = DataSelectionV3Base.getValidIds("datafileIds", datafileIds); + List dsids = DataSelectionV3Base.getValidIds("datasetIds", datasetIds); + List invids = DataSelectionV3Base.getValidIds("investigationIds", investigationIds); + + long size = 0; + if (dfids.size() + dsids.size() + invids.size() == 1) { + size = getSizeFor(sessionId, invids, "df.dataset.investigation.id") + + getSizeFor(sessionId, dsids, "df.dataset.id") + getSizeFor(sessionId, dfids, "df.id"); + logger.debug("Fast computation for simple case"); + if (size == 0) { + try { + if (dfids.size() != 0) { + Datafile datafile = (Datafile) serviceProvider.getIcat().get(sessionId, "Datafile", dfids.get(0)); + if (datafile.getLocation() == null) { + throw new NotFoundException("Datafile not found"); + } + } + if (dsids.size() != 0) { + serviceProvider.getIcat().get(sessionId, "Dataset", dsids.get(0)); + } + if (invids.size() != 0) { + serviceProvider.getIcat().get(sessionId, "Investigation", invids.get(0)); + } + } catch (IcatException_Exception e) { + throw new NotFoundException(e.getMessage()); + } + } + } else { + logger.debug("Slow computation for normal case"); + final DataSelectionV3Base dataSelection = this.getDataSelection(sessionId, investigationIds, datasetIds, datafileIds); + + StringBuilder sb = new StringBuilder(); + int n = 0; + for (DataFileInfo df : dataSelection.getDfInfo()) { + if (sb.length() != 0) { + sb.append(','); + } + sb.append(df.getId()); + if (n++ == 500) { + size += getSizeFor(sessionId, sb); + sb = new StringBuilder(); + n = 0; + } + } + if (n > 0) { + size += getSizeFor(sessionId, sb); + } + } + + if (serviceProvider.getLogSet().contains(CallType.INFO)) { + try { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { + gen.write("userName", serviceProvider.getIcat().getUserName(sessionId)); + addIds(gen, investigationIds, datasetIds, datafileIds); + gen.writeEnd(); + } + String body = baos.toString(); + serviceProvider.getTransmitter().processMessage("getSize", ip, body, start); + } catch (IcatException_Exception e) { + logger.error("Failed to prepare jms message " + e.getClass() + " " + e.getMessage()); + } + } + + return size; + } + + + private long getSizeFor(String sessionId, StringBuilder sb) throws InternalException { + String query = "SELECT SUM(df.fileSize) from Datafile df WHERE df.id IN (" + sb.toString() + ") AND df.location IS NOT NULL"; + try { + return (Long) ServiceProvider.getInstance().getIcat().search(sessionId, query).get(0); + } catch (IcatException_Exception e) { + throw new InternalException(e.getClass() + " " + e.getMessage()); + } catch (IndexOutOfBoundsException e) { + return 0L; + } + } + + + private long getSizeFor(String sessionId, List ids, String where) throws InternalException { + + long size = 0; + if (ids != null) { + + StringBuilder sb = new StringBuilder(); + int n = 0; + for (Long id : ids) { + if (sb.length() != 0) { + sb.append(','); + } + sb.append(id); + if (n++ == 500) { + size += evalSizeFor(sessionId, where, sb); + sb = new StringBuilder(); + n = 0; + } + } + if (n > 0) { + size += evalSizeFor(sessionId, where, sb); + } + } + return size; + } + + + private long evalSizeFor(String sessionId, String where, StringBuilder sb) throws InternalException { + String query = "SELECT SUM(df.fileSize) from Datafile df WHERE " + where + " IN (" + sb.toString() + ") AND df.location IS NOT NULL"; + logger.debug("icat query for size: {}", query); + try { + return (Long) ServiceProvider.getInstance().getIcat().search(sessionId, query).get(0); + } catch (IcatException_Exception e) { + throw new InternalException(e.getClass() + " " + e.getMessage()); + } catch (IndexOutOfBoundsException e) { + return 0L; + } + } + +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/models/ValueContainer.java b/src/main/java/org/icatproject/ids/v3/models/ValueContainer.java index 8fc65d87..b6363b5b 100644 --- a/src/main/java/org/icatproject/ids/v3/models/ValueContainer.java +++ b/src/main/java/org/icatproject/ids/v3/models/ValueContainer.java @@ -52,6 +52,14 @@ public ValueContainer(int value) { this(value, ValueContainerType.INT); } + /** + * Creates a ValueContainer of type long + * @param value the value contained by the container + */ + public ValueContainer(long value) { + this(value, ValueContainerType.LONG); + } + /** * Creates a ValueContainer of type String * @param value the value contained by the container @@ -102,6 +110,16 @@ public int getInt() throws InternalException { return (int) this.value; } + /** + * Tries to return the value of the type long. + * @return + * @throws InternalException if the container has another type an exception will be thrown + */ + public long getLong() throws InternalException { + this.checkType(ValueContainerType.LONG); + return (long) this.value; + } + /** * Tries to return the value of the type boolean. * @return From 15501df9def4ccb68036f21fefdfa343a7408390 Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Mon, 26 Feb 2024 11:25:56 +0100 Subject: [PATCH 16/92] un-EJB-ing the FiniteStateMachine --- .../icatproject/ids/FiniteStateMachine.java | 43 +++++++++++++------ .../java/org/icatproject/ids/IdsBean.java | 5 ++- .../java/org/icatproject/ids/IdsService.java | 12 ++++-- src/main/java/org/icatproject/ids/Tidier.java | 4 +- 4 files changed, 44 insertions(+), 20 deletions(-) diff --git a/src/main/java/org/icatproject/ids/FiniteStateMachine.java b/src/main/java/org/icatproject/ids/FiniteStateMachine.java index 07883e09..420f1304 100644 --- a/src/main/java/org/icatproject/ids/FiniteStateMachine.java +++ b/src/main/java/org/icatproject/ids/FiniteStateMachine.java @@ -19,11 +19,6 @@ import java.util.TimerTask; import java.util.concurrent.ConcurrentHashMap; -import jakarta.annotation.PostConstruct; -import jakarta.annotation.PreDestroy; -import jakarta.ejb.DependsOn; -import jakarta.ejb.EJB; -import jakarta.ejb.Singleton; import jakarta.json.Json; import jakarta.json.stream.JsonGenerator; @@ -48,10 +43,34 @@ import org.icatproject.ids.v3.models.DataFileInfo; import org.icatproject.ids.v3.models.DataSetInfo; -@Singleton -@DependsOn({"LockManager"}) public class FiniteStateMachine { + + private static FiniteStateMachine instance; + + private FiniteStateMachine(IcatReader reader, LockManager lockManager) { + this.reader = reader; + this.lockManager = lockManager; + } + + public static void createInstance(IcatReader reader, LockManager lockManager) { + + if(instance == null) { + instance = new FiniteStateMachine(reader, lockManager); + } + } + + public static FiniteStateMachine getInstance() { + if(instance != null) { + return instance; + } + + // If this assert was executed: Instance of FiniteStateMachine is not created. At First createInstance() has to be called at least once. + assert(false); + return null; + + } + private class DfProcessQueue extends TimerTask { @Override @@ -307,10 +326,10 @@ public enum RequestedState { private long processQueueIntervalMillis; private PropertyHandler propertyHandler; - @EJB + IcatReader reader; - @EJB + private LockManager lockManager; private StorageUnit storageUnit; @@ -323,8 +342,7 @@ public enum RequestedState { private Set failures = ConcurrentHashMap.newKeySet(); - @PreDestroy - private void exit() { + public void exit() { timer.cancel(); logger.info("Cancelled timer"); } @@ -453,8 +471,7 @@ public String getServiceStatus() throws InternalException { return baos.toString(); } - @PostConstruct - private void init() { + public void init() { try { propertyHandler = PropertyHandler.getInstance(); processQueueIntervalMillis = propertyHandler.getProcessQueueIntervalSeconds() * 1000L; diff --git a/src/main/java/org/icatproject/ids/IdsBean.java b/src/main/java/org/icatproject/ids/IdsBean.java index 8d3a24ff..95ed779e 100644 --- a/src/main/java/org/icatproject/ids/IdsBean.java +++ b/src/main/java/org/icatproject/ids/IdsBean.java @@ -353,8 +353,7 @@ public static void validateUUID(String thing, String id) throws BadRequestExcept private boolean enableWrite; - @EJB - private FiniteStateMachine fsm; + private FiniteStateMachine fsm = null; @EJB private LockManager lockManager; @@ -1065,6 +1064,8 @@ private void init() { try { synchronized (inited) { logger.info("creating IdsBean"); + + this.fsm = FiniteStateMachine.getInstance(); propertyHandler = PropertyHandler.getInstance(); mainStorage = propertyHandler.getMainStorage(); archiveStorage = propertyHandler.getArchiveStorage(); diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index 91670706..d271fa02 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -54,15 +54,14 @@ public class IdsService { @EJB Transmitter transmitter; - @EJB - private FiniteStateMachine fsm; - @EJB private LockManager lockManager; @EJB private IcatReader reader; + private FiniteStateMachine fsm = null; + private RequestHandlerService requestService = null; /** @@ -133,6 +132,7 @@ public void delete(@Context HttpServletRequest request, @QueryParam("sessionId") @PreDestroy private void exit() { + this.fsm.exit(); logger.info("destroyed IdsService"); } @@ -387,8 +387,14 @@ public String getStatus(@Context HttpServletRequest request, @QueryParam("prepar @PostConstruct private void init() { logger.info("creating IdsService"); + + FiniteStateMachine.createInstance(reader, lockManager); + this.fsm = FiniteStateMachine.getInstance(); + this.fsm.init(); + this.requestService = new RequestHandlerService(); this.requestService.init(this.transmitter, this.lockManager, this.fsm, this.reader); + logger.info("created IdsService"); } diff --git a/src/main/java/org/icatproject/ids/Tidier.java b/src/main/java/org/icatproject/ids/Tidier.java index 8e898af0..5717a3ee 100644 --- a/src/main/java/org/icatproject/ids/Tidier.java +++ b/src/main/java/org/icatproject/ids/Tidier.java @@ -208,8 +208,7 @@ static void cleanPreparedDir(Path preparedDir, int preparedCount) throws IOExcep } } - @EJB - private FiniteStateMachine fsm; + private FiniteStateMachine fsm = null; private MainStorageInterface mainStorage; @@ -240,6 +239,7 @@ public void exit() { @PostConstruct public void init() { try { + this.fsm = FiniteStateMachine.getInstance(); PropertyHandler propertyHandler = PropertyHandler.getInstance(); sizeCheckIntervalMillis = propertyHandler.getSizeCheckIntervalMillis(); preparedCount = propertyHandler.getPreparedCount(); From 831cfa065f9fddb1ca9ac34440f44502e25be650 Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Mon, 26 Feb 2024 14:20:12 +0100 Subject: [PATCH 17/92] Child classes for abstract FiniteStateMachine depending on StorageUnit --- .../org/icatproject/ids/DataSelection.java | 1 + .../icatproject/ids/FiniteStateMachine.java | 652 ------------------ .../java/org/icatproject/ids/IdsBean.java | 2 + .../java/org/icatproject/ids/IdsService.java | 5 +- src/main/java/org/icatproject/ids/Tidier.java | 11 +- .../icatproject/ids/thread/DfArchiver.java | 3 +- .../org/icatproject/ids/thread/DfDeleter.java | 3 +- .../icatproject/ids/thread/DfRestorer.java | 3 +- .../org/icatproject/ids/thread/DfWriter.java | 3 +- .../icatproject/ids/thread/DsArchiver.java | 3 +- .../icatproject/ids/thread/DsRestorer.java | 2 +- .../org/icatproject/ids/thread/DsWriter.java | 2 +- .../DataSelectionForSingleLevelStorage.java | 2 +- .../DataSelectionForStorageUnitDatafile.java | 2 +- .../DataSelectionForStorageUnitDataset.java | 2 +- .../ids/v3/DataSelectionV3Base.java | 2 +- .../FiniteStateMachine.java | 292 ++++++++ ...niteStateMachineForSingleLevelStorage.java | 27 + ...iteStateMachineForStorageUnitDatafile.java | 267 +++++++ ...niteStateMachineForStorageUnitDataset.java | 194 ++++++ .../ids/v3/RequestHandlerService.java | 2 +- .../icatproject/ids/v3/ServiceProvider.java | 2 +- .../ids/v3/UnfinishedWorkServiceBase.java | 2 +- .../ids/{ => v3/enums}/DeferredOp.java | 2 +- .../ids/v3/handlers/GetDataHandler.java | 1 - 25 files changed, 811 insertions(+), 676 deletions(-) delete mode 100644 src/main/java/org/icatproject/ids/FiniteStateMachine.java create mode 100644 src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachine.java create mode 100644 src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForSingleLevelStorage.java create mode 100644 src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForStorageUnitDatafile.java create mode 100644 src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForStorageUnitDataset.java rename src/main/java/org/icatproject/ids/{ => v3/enums}/DeferredOp.java (87%) diff --git a/src/main/java/org/icatproject/ids/DataSelection.java b/src/main/java/org/icatproject/ids/DataSelection.java index 60421ab0..29c9f93e 100644 --- a/src/main/java/org/icatproject/ids/DataSelection.java +++ b/src/main/java/org/icatproject/ids/DataSelection.java @@ -29,6 +29,7 @@ import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.exceptions.NotFoundException; import org.icatproject.ids.v3.ServiceProvider; +import org.icatproject.ids.v3.enums.DeferredOp; import org.icatproject.ids.v3.models.DataFileInfo; import org.icatproject.ids.v3.models.DataSetInfo; diff --git a/src/main/java/org/icatproject/ids/FiniteStateMachine.java b/src/main/java/org/icatproject/ids/FiniteStateMachine.java deleted file mode 100644 index 420f1304..00000000 --- a/src/main/java/org/icatproject/ids/FiniteStateMachine.java +++ /dev/null @@ -1,652 +0,0 @@ -package org.icatproject.ids; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.nio.file.FileAlreadyExistsException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.Timer; -import java.util.TimerTask; -import java.util.concurrent.ConcurrentHashMap; - -import jakarta.json.Json; -import jakarta.json.stream.JsonGenerator; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import org.icatproject.Dataset; -import org.icatproject.ids.LockManager.Lock; -import org.icatproject.ids.LockManager.LockInfo; -import org.icatproject.ids.LockManager.LockType; -import org.icatproject.ids.exceptions.InternalException; -import org.icatproject.ids.plugin.AlreadyLockedException; -import org.icatproject.ids.plugin.DfInfo; -import org.icatproject.ids.plugin.DsInfo; -import org.icatproject.ids.thread.DfArchiver; -import org.icatproject.ids.thread.DfDeleter; -import org.icatproject.ids.thread.DfRestorer; -import org.icatproject.ids.thread.DfWriter; -import org.icatproject.ids.thread.DsArchiver; -import org.icatproject.ids.thread.DsRestorer; -import org.icatproject.ids.thread.DsWriter; -import org.icatproject.ids.v3.models.DataFileInfo; -import org.icatproject.ids.v3.models.DataSetInfo; - -public class FiniteStateMachine { - - - private static FiniteStateMachine instance; - - private FiniteStateMachine(IcatReader reader, LockManager lockManager) { - this.reader = reader; - this.lockManager = lockManager; - } - - public static void createInstance(IcatReader reader, LockManager lockManager) { - - if(instance == null) { - instance = new FiniteStateMachine(reader, lockManager); - } - } - - public static FiniteStateMachine getInstance() { - if(instance != null) { - return instance; - } - - // If this assert was executed: Instance of FiniteStateMachine is not created. At First createInstance() has to be called at least once. - assert(false); - return null; - - } - - private class DfProcessQueue extends TimerTask { - - @Override - public void run() { - try { - synchronized (deferredDfOpsQueue) { - if (processOpsTime != null && System.currentTimeMillis() > processOpsTime && !deferredDfOpsQueue.isEmpty()) { - processOpsTime = null; - logger.debug("deferredDfOpsQueue has " + deferredDfOpsQueue.size() + " entries"); - List writes = new ArrayList<>(); - List archives = new ArrayList<>(); - List restores = new ArrayList<>(); - List deletes = new ArrayList<>(); - Map writeLocks = new HashMap<>(); - Map archiveLocks = new HashMap<>(); - Map restoreLocks = new HashMap<>(); - Map deleteLocks = new HashMap<>(); - - Map newOps = new HashMap<>(); - final Iterator> it = deferredDfOpsQueue.entrySet().iterator(); - while (it.hasNext()) { - Entry opEntry = it.next(); - DataFileInfo dfInfo = opEntry.getKey(); - Long dsId = dfInfo.getDsId(); - DataSetInfo dsInfo; - try { - Dataset ds = (Dataset) reader.get("Dataset ds INCLUDE ds.investigation.facility", dsId); - dsInfo = new DataSetInfo(ds); - } catch (Exception e) { - logger.error("Could not get dsInfo {}: {}.", dsId, e.getMessage()); - continue; - } - if (!dfChanging.containsKey(dfInfo)) { - final RequestedState state = opEntry.getValue(); - logger.debug(dfInfo + " " + state); - if (state == RequestedState.WRITE_REQUESTED) { - if (!writeLocks.containsKey(dsId)) { - try { - writeLocks.put(dsId, lockManager.lock(dsInfo, LockType.SHARED)); - } catch (AlreadyLockedException e) { - logger.debug("Could not acquire lock on " + dsId + ", hold back " + state); - continue; - } catch (IOException e) { - logger.error("I/O exception " + e.getMessage() + " locking " + dsId); - continue; - } - } - it.remove(); - dfChanging.put(dfInfo, state); - writes.add(dfInfo); - } else if (state == RequestedState.WRITE_THEN_ARCHIVE_REQUESTED) { - if (!writeLocks.containsKey(dsId)) { - try { - writeLocks.put(dsId, lockManager.lock(dsInfo, LockType.SHARED)); - } catch (AlreadyLockedException e) { - logger.debug("Could not acquire lock on " + dsId + ", hold back " + state); - continue; - } catch (IOException e) { - logger.error("I/O exception " + e.getMessage() + " locking " + dsId); - continue; - } - } - it.remove(); - dfChanging.put(dfInfo, RequestedState.WRITE_REQUESTED); - writes.add(dfInfo); - newOps.put(dfInfo, RequestedState.ARCHIVE_REQUESTED); - } else if (state == RequestedState.ARCHIVE_REQUESTED) { - if (!archiveLocks.containsKey(dsId)) { - try { - archiveLocks.put(dsId, lockManager.lock(dsInfo, LockType.EXCLUSIVE)); - } catch (AlreadyLockedException e) { - logger.debug("Could not acquire lock on " + dsId + ", hold back " + state); - continue; - } catch (IOException e) { - logger.error("I/O exception " + e.getMessage() + " locking " + dsId); - continue; - } - } - it.remove(); - dfChanging.put(dfInfo, state); - archives.add(dfInfo); - } else if (state == RequestedState.RESTORE_REQUESTED) { - if (!restoreLocks.containsKey(dsId)) { - try { - restoreLocks.put(dsId, lockManager.lock(dsInfo, LockType.EXCLUSIVE)); - } catch (AlreadyLockedException e) { - logger.debug("Could not acquire lock on " + dsId + ", hold back " + state); - continue; - } catch (IOException e) { - logger.error("I/O exception " + e.getMessage() + " locking " + dsId); - continue; - } - } - it.remove(); - dfChanging.put(dfInfo, state); - restores.add(dfInfo); - } else if (state == RequestedState.DELETE_REQUESTED) { - if (!deleteLocks.containsKey(dsId)) { - try { - deleteLocks.put(dsId, lockManager.lock(dsInfo, LockType.EXCLUSIVE)); - } catch (AlreadyLockedException e) { - logger.debug("Could not acquire lock on " + dsId + ", hold back " + state); - continue; - } catch (IOException e) { - logger.error("I/O exception " + e.getMessage() + " locking " + dsId); - continue; - } - } - it.remove(); - dfChanging.put(dfInfo, state); - deletes.add(dfInfo); - } else { - throw new AssertionError("Impossible state"); - } - } - } - if (!newOps.isEmpty()) { - deferredDfOpsQueue.putAll(newOps); - logger.debug("Adding {} operations to be scheduled next time round", newOps.size()); - } - if (!deferredDfOpsQueue.isEmpty()) { - processOpsTime = 0L; - } - if (!writes.isEmpty()) { - logger.debug("Launch thread to process " + writes.size() + " writes"); - Thread w = new Thread(new DfWriter(writes, propertyHandler, FiniteStateMachine.this, writeLocks.values())); - w.start(); - } - if (!archives.isEmpty()) { - logger.debug("Launch thread to process " + archives.size() + " archives"); - Thread w = new Thread(new DfArchiver(archives, propertyHandler, FiniteStateMachine.this, archiveLocks.values())); - w.start(); - } - if (!restores.isEmpty()) { - logger.debug("Launch thread to process " + restores.size() + " restores"); - Thread w = new Thread(new DfRestorer(restores, propertyHandler, FiniteStateMachine.this, restoreLocks.values())); - w.start(); - } - if (!deletes.isEmpty()) { - logger.debug("Launch thread to process " + deletes.size() + " deletes"); - Thread w = new Thread(new DfDeleter(deletes, propertyHandler, FiniteStateMachine.this, deleteLocks.values())); - w.start(); - } - } - } - } finally { - timer.schedule(new DfProcessQueue(), processQueueIntervalMillis); - } - - } - - } - - private class DsProcessQueue extends TimerTask { - - @Override - public void run() { - try { - synchronized (deferredDsOpsQueue) { - final long now = System.currentTimeMillis(); - Map newOps = new HashMap<>(); - final Iterator> it = deferredDsOpsQueue.entrySet().iterator(); - while (it.hasNext()) { - final Entry opEntry = it.next(); - final DataSetInfo dsInfo = opEntry.getKey(); - if (!dsChanging.containsKey(dsInfo)) { - final RequestedState state = opEntry.getValue(); - if (state == RequestedState.WRITE_REQUESTED - || state == RequestedState.WRITE_THEN_ARCHIVE_REQUESTED) { - if (now > writeTimes.get(dsInfo)) { - try { - Lock lock = lockManager.lock(dsInfo, LockType.SHARED); - logger.debug("Will process " + dsInfo + " with " + state); - writeTimes.remove(dsInfo); - dsChanging.put(dsInfo, RequestedState.WRITE_REQUESTED); - it.remove(); - final Thread w = new Thread( - new DsWriter(dsInfo, propertyHandler, FiniteStateMachine.this, reader, lock)); - w.start(); - if (state == RequestedState.WRITE_THEN_ARCHIVE_REQUESTED) { - newOps.put(dsInfo, RequestedState.ARCHIVE_REQUESTED); - } - } catch (AlreadyLockedException e) { - logger.debug("Could not acquire lock on " + dsInfo + ", hold back process with " + state); - } catch (IOException e) { - logger.error("I/O exception " + e.getMessage() + " locking " + dsInfo); - } - } - } else if (state == RequestedState.ARCHIVE_REQUESTED) { - try { - Lock lock = lockManager.lock(dsInfo, LockType.EXCLUSIVE); - it.remove(); - long dsId = dsInfo.getId(); - logger.debug("Will process " + dsInfo + " with " + state); - dsChanging.put(dsInfo, state); - final Thread w = new Thread( - new DsArchiver(dsInfo, propertyHandler, FiniteStateMachine.this, lock)); - w.start(); - } catch (AlreadyLockedException e) { - logger.debug("Could not acquire lock on " + dsInfo + ", hold back process with " + state); - } catch (IOException e) { - logger.error("I/O exception " + e.getMessage() + " locking " + dsInfo); - } - } else if (state == RequestedState.RESTORE_REQUESTED) { - try { - Lock lock = lockManager.lock(dsInfo, LockType.EXCLUSIVE); - logger.debug("Will process " + dsInfo + " with " + state); - dsChanging.put(dsInfo, state); - it.remove(); - final Thread w = new Thread( - new DsRestorer(dsInfo, propertyHandler, FiniteStateMachine.this, reader, lock)); - w.start(); - } catch (AlreadyLockedException e) { - logger.debug("Could not acquire lock on " + dsInfo + ", hold back process with " + state); - } catch (IOException e) { - logger.error("I/O exception " + e.getMessage() + " locking " + dsInfo); - } - } - } - } - deferredDsOpsQueue.putAll(newOps); - } - - } finally { - timer.schedule(new DsProcessQueue(), processQueueIntervalMillis); - } - - } - - } - - public enum RequestedState { - ARCHIVE_REQUESTED, DELETE_REQUESTED, RESTORE_REQUESTED, WRITE_REQUESTED, WRITE_THEN_ARCHIVE_REQUESTED - } - - private static Logger logger = LoggerFactory.getLogger(FiniteStateMachine.class); - - /* - * Note that the veriable processOpsDelayMillis is used to either delay all deferred - * datafile operations or to delay dataset writes, depending on the setting of storageUnit. - */ - private long processOpsDelayMillis; - - private Map deferredDfOpsQueue = new HashMap<>(); - - private Map deferredDsOpsQueue = new HashMap<>(); - - private Map dfChanging = new HashMap<>(); - - private Map dsChanging = new HashMap<>(); - - private Path markerDir; - private long processQueueIntervalMillis; - - private PropertyHandler propertyHandler; - - IcatReader reader; - - - private LockManager lockManager; - - private StorageUnit storageUnit; - - private Timer timer = new Timer("FSM Timer"); - - private Long processOpsTime; - - private Map writeTimes = new HashMap<>(); - - private Set failures = ConcurrentHashMap.newKeySet(); - - public void exit() { - timer.cancel(); - logger.info("Cancelled timer"); - } - - /** - * Find any DataFileInfo which may be offline - */ - public Set getDfMaybeOffline() { - Map union; - synchronized (deferredDfOpsQueue) { - union = new HashMap<>(dfChanging); - union.putAll(deferredDfOpsQueue); - } - Set result = new HashSet<>(); - for (Entry entry : union.entrySet()) { - if (entry.getValue() != RequestedState.WRITE_REQUESTED) { - result.add(entry.getKey()); - } - } - return result; - } - - /** - * Find any DataFileInfo which are being restored or are queued for restoration - */ - public Set getDfRestoring() { - Map union; - synchronized (deferredDfOpsQueue) { - union = new HashMap<>(dfChanging); - union.putAll(deferredDfOpsQueue); - } - Set result = new HashSet<>(); - for (Entry entry : union.entrySet()) { - if (entry.getValue() == RequestedState.RESTORE_REQUESTED) { - result.add(entry.getKey()); - } - } - return result; - } - - /** - * Find any DataSetInfo which may be offline - */ - public Set getDsMaybeOffline() { - Map union; - synchronized (deferredDsOpsQueue) { - union = new HashMap<>(dsChanging); - union.putAll(deferredDsOpsQueue); - } - Set result = new HashSet<>(); - for (Entry entry : union.entrySet()) { - if (entry.getValue() != RequestedState.WRITE_REQUESTED) { - result.add(entry.getKey()); - } - } - return result; - } - - /** - * Find any DsInfo which are being restored or are queued for restoration - */ - public Set getDsRestoring() { - Map union; - synchronized (deferredDsOpsQueue) { - union = new HashMap<>(dsChanging); - union.putAll(deferredDsOpsQueue); - } - Set result = new HashSet<>(); - for (Entry entry : union.entrySet()) { - if (entry.getValue() == RequestedState.RESTORE_REQUESTED) { - result.add(entry.getKey()); - } - } - return result; - } - - public String getServiceStatus() throws InternalException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { - if (storageUnit == null) { - gen.writeStartArray("opsQueue").writeEnd(); - } else if (storageUnit == StorageUnit.DATASET) { - Map union; - synchronized (deferredDsOpsQueue) { - union = new HashMap<>(dsChanging); - union.putAll(deferredDsOpsQueue); - } - gen.writeStartArray("opsQueue"); - for (Entry entry : union.entrySet()) { - DsInfo item = entry.getKey(); - gen.writeStartObject().write("data", item.toString()).write("request", entry.getValue().name()) - .writeEnd(); - } - gen.writeEnd(); // end Array("opsQueue") - } else if (storageUnit == StorageUnit.DATAFILE) { - Map union; - synchronized (deferredDfOpsQueue) { - union = new HashMap<>(dfChanging); - union.putAll(deferredDfOpsQueue); - } - gen.writeStartArray("opsQueue"); - for (Entry entry : union.entrySet()) { - DfInfo item = entry.getKey(); - gen.writeStartObject().write("data", item.toString()).write("request", entry.getValue().name()) - .writeEnd(); - } - gen.writeEnd(); // end Array("opsQueue") - } - - Collection lockInfo = lockManager.getLockInfo(); - gen.write("lockCount", lockInfo.size()); - gen.writeStartArray("locks"); - for (LockInfo li : lockInfo) { - gen.writeStartObject().write("id", li.id).write("type", li.type.name()).write("count", li.count).writeEnd(); - } - gen.writeEnd(); // end Array("locks") - - gen.writeStartArray("failures"); - for (Long failure : failures) { - gen.write(failure); - } - gen.writeEnd(); // end Array("failures") - - gen.writeEnd(); // end Object() - } - return baos.toString(); - } - - public void init() { - try { - propertyHandler = PropertyHandler.getInstance(); - processQueueIntervalMillis = propertyHandler.getProcessQueueIntervalSeconds() * 1000L; - storageUnit = propertyHandler.getStorageUnit(); - if (storageUnit == StorageUnit.DATASET) { - processOpsDelayMillis = propertyHandler.getDelayDatasetWrites() * 1000L; - timer.schedule(new DsProcessQueue(), processQueueIntervalMillis); - logger.info("DsProcessQueue scheduled to run in " + processQueueIntervalMillis + " milliseconds"); - } else if (storageUnit == StorageUnit.DATAFILE) { - processOpsDelayMillis = propertyHandler.getDelayDatafileOperations() * 1000L; - timer.schedule(new DfProcessQueue(), processQueueIntervalMillis); - logger.info("DfProcessQueue scheduled to run in " + processQueueIntervalMillis + " milliseconds"); - } - markerDir = propertyHandler.getCacheDir().resolve("marker"); - Files.createDirectories(markerDir); - } catch (IOException e) { - throw new RuntimeException("FiniteStateMachine reports " + e.getClass() + " " + e.getMessage()); - } - } - - public void queue(DataFileInfo dfInfo, DeferredOp deferredOp) throws InternalException { - logger.info("Requesting " + deferredOp + " of datafile " + dfInfo); - - synchronized (deferredDfOpsQueue) { - - if (processOpsTime == null) { - processOpsTime = System.currentTimeMillis() + processOpsDelayMillis; - final Date d = new Date(processOpsTime); - logger.debug("Requesting delay operations till " + d); - } - - final RequestedState state = this.deferredDfOpsQueue.get(dfInfo); - if (state == null) { - if (deferredOp == DeferredOp.WRITE) { - try { - Path marker = markerDir.resolve(Long.toString(dfInfo.getId())); - Files.createFile(marker); - logger.debug("Created marker " + marker); - } catch (FileAlreadyExistsException e) { - // Pass will ignore this - } catch (IOException e) { - throw new InternalException(e.getClass() + " " + e.getMessage()); - } - deferredDfOpsQueue.put(dfInfo, RequestedState.WRITE_REQUESTED); - } else if (deferredOp == DeferredOp.ARCHIVE) { - deferredDfOpsQueue.put(dfInfo, RequestedState.ARCHIVE_REQUESTED); - } else if (deferredOp == DeferredOp.RESTORE) { - deferredDfOpsQueue.put(dfInfo, RequestedState.RESTORE_REQUESTED); - } else if (deferredOp == DeferredOp.DELETE) { - deferredDfOpsQueue.put(dfInfo, RequestedState.DELETE_REQUESTED); - } - } else if (state == RequestedState.ARCHIVE_REQUESTED) { - if (deferredOp == DeferredOp.RESTORE) { - deferredDfOpsQueue.remove(dfInfo); - } else if (deferredOp == DeferredOp.DELETE) { - deferredDfOpsQueue.put(dfInfo, RequestedState.DELETE_REQUESTED); - } - } else if (state == RequestedState.DELETE_REQUESTED) { - // No way out - } else if (state == RequestedState.RESTORE_REQUESTED) { - if (deferredOp == DeferredOp.DELETE) { - deferredDfOpsQueue.put(dfInfo, RequestedState.DELETE_REQUESTED); - } else if (deferredOp == DeferredOp.ARCHIVE) { - deferredDfOpsQueue.put(dfInfo, RequestedState.ARCHIVE_REQUESTED); - } - } else if (state == RequestedState.WRITE_REQUESTED) { - if (deferredOp == DeferredOp.DELETE) { - deferredDfOpsQueue.remove(dfInfo); - } else if (deferredOp == DeferredOp.ARCHIVE) { - deferredDfOpsQueue.put(dfInfo, RequestedState.WRITE_THEN_ARCHIVE_REQUESTED); - } - } else if (state == RequestedState.WRITE_THEN_ARCHIVE_REQUESTED) { - if (deferredOp == DeferredOp.DELETE) { - deferredDfOpsQueue.remove(dfInfo); - } else if (deferredOp == DeferredOp.RESTORE) { - deferredDfOpsQueue.put(dfInfo, RequestedState.WRITE_REQUESTED); - } - } - } - } - - public void queue(DataSetInfo dsInfo, DeferredOp deferredOp) throws InternalException { - logger.info("Requesting " + deferredOp + " of dataset " + dsInfo); - - synchronized (deferredDsOpsQueue) { - - final RequestedState state = this.deferredDsOpsQueue.get(dsInfo); - if (state == null) { - if (deferredOp == DeferredOp.WRITE) { - requestWrite(dsInfo); - } else if (deferredOp == DeferredOp.ARCHIVE) { - deferredDsOpsQueue.put(dsInfo, RequestedState.ARCHIVE_REQUESTED); - } else if (deferredOp == DeferredOp.RESTORE) { - deferredDsOpsQueue.put(dsInfo, RequestedState.RESTORE_REQUESTED); - } - } else if (state == RequestedState.ARCHIVE_REQUESTED) { - if (deferredOp == DeferredOp.WRITE) { - requestWrite(dsInfo); - deferredDsOpsQueue.put(dsInfo, RequestedState.WRITE_THEN_ARCHIVE_REQUESTED); - } else if (deferredOp == DeferredOp.RESTORE) { - deferredDsOpsQueue.put(dsInfo, RequestedState.RESTORE_REQUESTED); - } - } else if (state == RequestedState.RESTORE_REQUESTED) { - if (deferredOp == DeferredOp.WRITE) { - requestWrite(dsInfo); - } else if (deferredOp == DeferredOp.ARCHIVE) { - deferredDsOpsQueue.put(dsInfo, RequestedState.ARCHIVE_REQUESTED); - } - } else if (state == RequestedState.WRITE_REQUESTED) { - if (deferredOp == DeferredOp.WRITE) { - setDelay(dsInfo); - } else if (deferredOp == DeferredOp.ARCHIVE) { - deferredDsOpsQueue.put(dsInfo, RequestedState.WRITE_THEN_ARCHIVE_REQUESTED); - } - } else if (state == RequestedState.WRITE_THEN_ARCHIVE_REQUESTED) { - if (deferredOp == DeferredOp.WRITE) { - setDelay(dsInfo); - } else if (deferredOp == DeferredOp.RESTORE) { - deferredDsOpsQueue.put(dsInfo, RequestedState.WRITE_REQUESTED); - } - } - } - - } - - public void removeFromChanging(DfInfo dfInfo) { - synchronized (deferredDfOpsQueue) { - dfChanging.remove(dfInfo); - } - } - - public void removeFromChanging(DsInfo dsInfo) { - synchronized (deferredDsOpsQueue) { - dsChanging.remove(dsInfo); - } - } - - private void requestWrite(DataSetInfo dsInfo) throws InternalException { - try { - Path marker = markerDir.resolve(Long.toString(dsInfo.getId())); - Files.createFile(marker); - logger.debug("Created marker " + marker); - } catch (FileAlreadyExistsException e) { - // Pass will ignore this - } catch (IOException e) { - throw new InternalException(e.getClass() + " " + e.getMessage()); - } - deferredDsOpsQueue.put(dsInfo, RequestedState.WRITE_REQUESTED); - setDelay(dsInfo); - } - - private void setDelay(DsInfo dsInfo) { - writeTimes.put(dsInfo, System.currentTimeMillis() + processOpsDelayMillis); - if (logger.isDebugEnabled()) { - final Date d = new Date(writeTimes.get(dsInfo)); - logger.debug("Requesting delay of writing of dataset " + dsInfo + " till " + d); - } - } - - public void recordSuccess(Long id) { - if (failures.remove(id)) { - logger.debug("Marking {} OK", id); - } - } - - public void recordFailure(Long id) { - if (failures.add(id)) { - logger.debug("Marking {} as failure", id); - } - } - - public void checkFailure(Long id) throws InternalException { - if (failures.contains(id)) { - throw new InternalException("Restore failed"); - } - } - -} diff --git a/src/main/java/org/icatproject/ids/IdsBean.java b/src/main/java/org/icatproject/ids/IdsBean.java index 95ed779e..2361597c 100644 --- a/src/main/java/org/icatproject/ids/IdsBean.java +++ b/src/main/java/org/icatproject/ids/IdsBean.java @@ -71,7 +71,9 @@ import org.icatproject.ids.plugin.ArchiveStorageInterface; import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.plugin.ZipMapperInterface; +import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; import org.icatproject.ids.v3.enums.CallType; +import org.icatproject.ids.v3.enums.DeferredOp; import org.icatproject.ids.v3.models.DataFileInfo; import org.icatproject.ids.v3.models.DataInfoBase; import org.icatproject.ids.v3.models.DataSetInfo; diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index d271fa02..81e56199 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -39,6 +39,7 @@ import org.icatproject.ids.exceptions.NotFoundException; import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.v3.RequestHandlerService; +import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.models.ValueContainer; @@ -388,7 +389,7 @@ public String getStatus(@Context HttpServletRequest request, @QueryParam("prepar private void init() { logger.info("creating IdsService"); - FiniteStateMachine.createInstance(reader, lockManager); + FiniteStateMachine.createInstance(reader, lockManager, PropertyHandler.getInstance().getStorageUnit()); this.fsm = FiniteStateMachine.getInstance(); this.fsm.init(); @@ -426,7 +427,7 @@ public boolean isPrepared(@Context HttpServletRequest request, @QueryParam("prep } /** - * An ids server can be configured to be read only. This returns the + * An ids server can be configured to be read only. This returns thenew DfProcessQueue() * readOnly status of the server. * * @return true if readonly, else false diff --git a/src/main/java/org/icatproject/ids/Tidier.java b/src/main/java/org/icatproject/ids/Tidier.java index 5717a3ee..eaccdf44 100644 --- a/src/main/java/org/icatproject/ids/Tidier.java +++ b/src/main/java/org/icatproject/ids/Tidier.java @@ -29,6 +29,8 @@ import org.icatproject.ids.plugin.DfInfo; import org.icatproject.ids.plugin.DsInfo; import org.icatproject.ids.plugin.MainStorageInterface; +import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; +import org.icatproject.ids.v3.enums.DeferredOp; import org.icatproject.ids.v3.models.DataFileInfo; import org.icatproject.ids.v3.models.DataSetInfo; @@ -217,6 +219,9 @@ static void cleanPreparedDir(Path preparedDir, int preparedCount) throws IOExcep @EJB IcatReader reader; + @EJB + private LockManager lockManager; + private long sizeCheckIntervalMillis; private long startArchivingLevel; private long stopArchivingLevel; @@ -239,8 +244,12 @@ public void exit() { @PostConstruct public void init() { try { - this.fsm = FiniteStateMachine.getInstance(); + PropertyHandler propertyHandler = PropertyHandler.getInstance(); + FiniteStateMachine.createInstance(reader, lockManager, propertyHandler.getStorageUnit()); + this.fsm = FiniteStateMachine.getInstance(); + this.fsm.init(); //if not yet initialized by IdsService do it now + sizeCheckIntervalMillis = propertyHandler.getSizeCheckIntervalMillis(); preparedCount = propertyHandler.getPreparedCount(); preparedDir = propertyHandler.getCacheDir().resolve("prepared"); diff --git a/src/main/java/org/icatproject/ids/thread/DfArchiver.java b/src/main/java/org/icatproject/ids/thread/DfArchiver.java index cebdbb11..2cce1919 100644 --- a/src/main/java/org/icatproject/ids/thread/DfArchiver.java +++ b/src/main/java/org/icatproject/ids/thread/DfArchiver.java @@ -7,12 +7,11 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - -import org.icatproject.ids.FiniteStateMachine; import org.icatproject.ids.LockManager.Lock; import org.icatproject.ids.PropertyHandler; import org.icatproject.ids.plugin.DfInfo; import org.icatproject.ids.plugin.MainStorageInterface; +import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; /* * Removes datafiles from the fast storage (doesn't write them to archive storage) diff --git a/src/main/java/org/icatproject/ids/thread/DfDeleter.java b/src/main/java/org/icatproject/ids/thread/DfDeleter.java index 71b8a569..2a00c429 100644 --- a/src/main/java/org/icatproject/ids/thread/DfDeleter.java +++ b/src/main/java/org/icatproject/ids/thread/DfDeleter.java @@ -5,12 +5,11 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - -import org.icatproject.ids.FiniteStateMachine; import org.icatproject.ids.LockManager.Lock; import org.icatproject.ids.PropertyHandler; import org.icatproject.ids.plugin.ArchiveStorageInterface; import org.icatproject.ids.plugin.DfInfo; +import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; /** * Delete datafiles from archive diff --git a/src/main/java/org/icatproject/ids/thread/DfRestorer.java b/src/main/java/org/icatproject/ids/thread/DfRestorer.java index 819922b5..6cab48d8 100644 --- a/src/main/java/org/icatproject/ids/thread/DfRestorer.java +++ b/src/main/java/org/icatproject/ids/thread/DfRestorer.java @@ -7,13 +7,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - -import org.icatproject.ids.FiniteStateMachine; import org.icatproject.ids.LockManager.Lock; import org.icatproject.ids.PropertyHandler; import org.icatproject.ids.plugin.ArchiveStorageInterface; import org.icatproject.ids.plugin.DfInfo; import org.icatproject.ids.plugin.MainStorageInterface; +import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; /* * Restores datafiles from the slow to the fast storage. diff --git a/src/main/java/org/icatproject/ids/thread/DfWriter.java b/src/main/java/org/icatproject/ids/thread/DfWriter.java index d22baf49..1ed1373b 100644 --- a/src/main/java/org/icatproject/ids/thread/DfWriter.java +++ b/src/main/java/org/icatproject/ids/thread/DfWriter.java @@ -8,13 +8,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - -import org.icatproject.ids.FiniteStateMachine; import org.icatproject.ids.LockManager.Lock; import org.icatproject.ids.PropertyHandler; import org.icatproject.ids.plugin.ArchiveStorageInterface; import org.icatproject.ids.plugin.DfInfo; import org.icatproject.ids.plugin.MainStorageInterface; +import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; /** * Copies datafiles from main to archive diff --git a/src/main/java/org/icatproject/ids/thread/DsArchiver.java b/src/main/java/org/icatproject/ids/thread/DsArchiver.java index 3bbed5df..d2182753 100644 --- a/src/main/java/org/icatproject/ids/thread/DsArchiver.java +++ b/src/main/java/org/icatproject/ids/thread/DsArchiver.java @@ -5,12 +5,11 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - -import org.icatproject.ids.FiniteStateMachine; import org.icatproject.ids.LockManager.Lock; import org.icatproject.ids.PropertyHandler; import org.icatproject.ids.plugin.DsInfo; import org.icatproject.ids.plugin.MainStorageInterface; +import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; /* * Removes datasets from the fast storage (doesn't write them to slow storage) diff --git a/src/main/java/org/icatproject/ids/thread/DsRestorer.java b/src/main/java/org/icatproject/ids/thread/DsRestorer.java index 8432670d..c68fedab 100644 --- a/src/main/java/org/icatproject/ids/thread/DsRestorer.java +++ b/src/main/java/org/icatproject/ids/thread/DsRestorer.java @@ -16,7 +16,6 @@ import org.icatproject.Datafile; import org.icatproject.Dataset; -import org.icatproject.ids.FiniteStateMachine; import org.icatproject.ids.IcatReader; import org.icatproject.ids.IdsBean; import org.icatproject.ids.LockManager.Lock; @@ -25,6 +24,7 @@ import org.icatproject.ids.plugin.DsInfo; import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.plugin.ZipMapperInterface; +import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; /* * Restores datafiles from the slow to the fast storage. diff --git a/src/main/java/org/icatproject/ids/thread/DsWriter.java b/src/main/java/org/icatproject/ids/thread/DsWriter.java index d196181b..d69d9d5d 100644 --- a/src/main/java/org/icatproject/ids/thread/DsWriter.java +++ b/src/main/java/org/icatproject/ids/thread/DsWriter.java @@ -15,7 +15,6 @@ import org.icatproject.Datafile; import org.icatproject.Dataset; import org.icatproject.ids.DfInfoImpl; -import org.icatproject.ids.FiniteStateMachine; import org.icatproject.ids.IcatReader; import org.icatproject.ids.IdsBean; import org.icatproject.ids.LockManager.Lock; @@ -24,6 +23,7 @@ import org.icatproject.ids.plugin.DsInfo; import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.plugin.ZipMapperInterface; +import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; /** * Copies dataset from main to archive diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java index f66fad4d..c29e94e0 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java @@ -4,10 +4,10 @@ import java.util.Map; import java.util.Set; -import org.icatproject.ids.DeferredOp; import org.icatproject.ids.exceptions.DataNotOnlineException; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.enums.DeferredOp; import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.models.DataFileInfo; import org.icatproject.ids.v3.models.DataSetInfo; diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java index b00a4d56..e86da9cd 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java @@ -4,10 +4,10 @@ import java.util.Map; import java.util.Set; -import org.icatproject.ids.DeferredOp; import org.icatproject.ids.exceptions.DataNotOnlineException; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.enums.DeferredOp; import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.models.DataFileInfo; import org.icatproject.ids.v3.models.DataSetInfo; diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java index de6a9371..8eb7b57a 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java @@ -4,10 +4,10 @@ import java.util.Map; import java.util.Set; -import org.icatproject.ids.DeferredOp; import org.icatproject.ids.exceptions.DataNotOnlineException; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.enums.DeferredOp; import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.models.DataFileInfo; import org.icatproject.ids.v3.models.DataSetInfo; diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java b/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java index 4971144e..a6ec4ed6 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java @@ -7,11 +7,11 @@ import java.util.Map; import java.util.Set; -import org.icatproject.ids.DeferredOp; import org.icatproject.ids.exceptions.BadRequestException; import org.icatproject.ids.exceptions.DataNotOnlineException; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.enums.DeferredOp; import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.models.DataFileInfo; import org.icatproject.ids.v3.models.DataSetInfo; diff --git a/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachine.java b/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachine.java new file mode 100644 index 00000000..08983488 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachine.java @@ -0,0 +1,292 @@ +package org.icatproject.ids.v3.FiniteStateMachine; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.Timer; +import java.util.concurrent.ConcurrentHashMap; + +import jakarta.json.Json; +import jakarta.json.stream.JsonGenerator; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.icatproject.ids.IcatReader; +import org.icatproject.ids.LockManager; +import org.icatproject.ids.PropertyHandler; +import org.icatproject.ids.StorageUnit; +import org.icatproject.ids.LockManager.LockInfo; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.plugin.DfInfo; +import org.icatproject.ids.plugin.DsInfo; +import org.icatproject.ids.v3.enums.DeferredOp; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataInfoBase; +import org.icatproject.ids.v3.models.DataSetInfo; + +public abstract class FiniteStateMachine { + + + private static FiniteStateMachine instance; + + private static Boolean inited = false; + + protected FiniteStateMachine(IcatReader reader, LockManager lockManager) { + this.reader = reader; + this.lockManager = lockManager; + } + + public static void createInstance(IcatReader reader, LockManager lockManager, StorageUnit storageUnit) { + + if(instance == null) { + if(storageUnit == StorageUnit.DATAFILE) + instance = new FiniteStateMachineForStorageUnitDatafile(reader, lockManager); + else if(storageUnit == StorageUnit.DATASET) + instance = new FiniteStateMachineForStorageUnitDataset(reader, lockManager); + else + instance = new FiniteStateMachineForSingleLevelStorage(reader, lockManager); + } + } + + public static FiniteStateMachine getInstance() { + if(instance != null) { + return instance; + } + + // If this assert was executed: Instance of FiniteStateMachine is not created. At First createInstance() has to be called at least once. + throw new RuntimeException("Instance of FiniteStateMachine is not created. At First createInstance() has to be called at least once."); + } + + public enum RequestedState { + ARCHIVE_REQUESTED, DELETE_REQUESTED, RESTORE_REQUESTED, WRITE_REQUESTED, WRITE_THEN_ARCHIVE_REQUESTED + } + + protected static Logger logger = LoggerFactory.getLogger(FiniteStateMachine.class); + + /* + * Note that the veriable processOpsDelayMillis is used to either delay all deferred + * datafile operations or to delay dataset writes, depending on the setting of storageUnit. + */ + protected long processOpsDelayMillis; + + protected Map deferredDfOpsQueue = new HashMap<>(); + + protected Map deferredDsOpsQueue = new HashMap<>(); + + protected Map dfChanging = new HashMap<>(); + + protected Map dsChanging = new HashMap<>(); + + protected Path markerDir; + protected long processQueueIntervalMillis; + + protected PropertyHandler propertyHandler; + + protected IcatReader reader; + + + protected LockManager lockManager; + + protected StorageUnit storageUnit; + + protected Timer timer = new Timer("FSM Timer"); + + protected Long processOpsTime; + + protected Map writeTimes = new HashMap<>(); + + protected Set failures = ConcurrentHashMap.newKeySet(); + + public void exit() { + timer.cancel(); + logger.info("Cancelled timer"); + } + + /** + * Find any DataFileInfo which may be offline + */ + public Set getDfMaybeOffline() { + Map union; + synchronized (deferredDfOpsQueue) { + union = new HashMap<>(dfChanging); + union.putAll(deferredDfOpsQueue); + } + Set result = new HashSet<>(); + for (Entry entry : union.entrySet()) { + if (entry.getValue() != RequestedState.WRITE_REQUESTED) { + result.add(entry.getKey()); + } + } + return result; + } + + /** + * Find any DataFileInfo which are being restored or are queued for restoration + */ + public Set getDfRestoring() { + Map union; + synchronized (deferredDfOpsQueue) { + union = new HashMap<>(dfChanging); + union.putAll(deferredDfOpsQueue); + } + Set result = new HashSet<>(); + for (Entry entry : union.entrySet()) { + if (entry.getValue() == RequestedState.RESTORE_REQUESTED) { + result.add(entry.getKey()); + } + } + return result; + } + + /** + * Find any DataSetInfo which may be offline + */ + public Set getDsMaybeOffline() { + Map union; + synchronized (deferredDsOpsQueue) { + union = new HashMap<>(dsChanging); + union.putAll(deferredDsOpsQueue); + } + Set result = new HashSet<>(); + for (Entry entry : union.entrySet()) { + if (entry.getValue() != RequestedState.WRITE_REQUESTED) { + result.add(entry.getKey()); + } + } + return result; + } + + /** + * Find any DsInfo which are being restored or are queued for restoration + */ + public Set getDsRestoring() { + Map union; + synchronized (deferredDsOpsQueue) { + union = new HashMap<>(dsChanging); + union.putAll(deferredDsOpsQueue); + } + Set result = new HashSet<>(); + for (Entry entry : union.entrySet()) { + if (entry.getValue() == RequestedState.RESTORE_REQUESTED) { + result.add(entry.getKey()); + } + } + return result; + } + + public String getServiceStatus() throws InternalException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { + if (storageUnit == null) { + gen.writeStartArray("opsQueue").writeEnd(); + } else if (storageUnit == StorageUnit.DATASET) { + Map union; + synchronized (deferredDsOpsQueue) { + union = new HashMap<>(dsChanging); + union.putAll(deferredDsOpsQueue); + } + gen.writeStartArray("opsQueue"); + for (Entry entry : union.entrySet()) { + DsInfo item = entry.getKey(); + gen.writeStartObject().write("data", item.toString()).write("request", entry.getValue().name()) + .writeEnd(); + } + gen.writeEnd(); // end Array("opsQueue") + } else if (storageUnit == StorageUnit.DATAFILE) { + Map union; + synchronized (deferredDfOpsQueue) { + union = new HashMap<>(dfChanging); + union.putAll(deferredDfOpsQueue); + } + gen.writeStartArray("opsQueue"); + for (Entry entry : union.entrySet()) { + DfInfo item = entry.getKey(); + gen.writeStartObject().write("data", item.toString()).write("request", entry.getValue().name()) + .writeEnd(); + } + gen.writeEnd(); // end Array("opsQueue") + } + + Collection lockInfo = lockManager.getLockInfo(); + gen.write("lockCount", lockInfo.size()); + gen.writeStartArray("locks"); + for (LockInfo li : lockInfo) { + gen.writeStartObject().write("id", li.id).write("type", li.type.name()).write("count", li.count).writeEnd(); + } + gen.writeEnd(); // end Array("locks") + + gen.writeStartArray("failures"); + for (Long failure : failures) { + gen.write(failure); + } + gen.writeEnd(); // end Array("failures") + + gen.writeEnd(); // end Object() + } + return baos.toString(); + } + + public abstract void queue(DataInfoBase dataInfo, DeferredOp deferredOp) throws InternalException; + protected abstract void scheduleTimer(); + + public void init() { + + try { + synchronized (inited) { + if(!inited) { + propertyHandler = PropertyHandler.getInstance(); + processQueueIntervalMillis = propertyHandler.getProcessQueueIntervalSeconds() * 1000L; + storageUnit = propertyHandler.getStorageUnit(); + + this.scheduleTimer(); + + markerDir = propertyHandler.getCacheDir().resolve("marker"); + Files.createDirectories(markerDir); + inited = true; + } + } + } catch (IOException e) { + throw new RuntimeException("FiniteStateMachine reports " + e.getClass() + " " + e.getMessage()); + } + + } + + public void removeFromChanging(DfInfo dfInfo) { + synchronized (deferredDfOpsQueue) { + dfChanging.remove(dfInfo); + } + } + + public void removeFromChanging(DsInfo dsInfo) { + synchronized (deferredDsOpsQueue) { + dsChanging.remove(dsInfo); + } + } + + public void recordSuccess(Long id) { + if (failures.remove(id)) { + logger.debug("Marking {} OK", id); + } + } + + public void recordFailure(Long id) { + if (failures.add(id)) { + logger.debug("Marking {} as failure", id); + } + } + + public void checkFailure(Long id) throws InternalException { + if (failures.contains(id)) { + throw new InternalException("Restore failed"); + } + } + +} diff --git a/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForSingleLevelStorage.java b/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForSingleLevelStorage.java new file mode 100644 index 00000000..479b5aa3 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForSingleLevelStorage.java @@ -0,0 +1,27 @@ +package org.icatproject.ids.v3.FiniteStateMachine; + +import org.icatproject.ids.IcatReader; +import org.icatproject.ids.LockManager; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.v3.enums.DeferredOp; +import org.icatproject.ids.v3.models.DataInfoBase; + +public class FiniteStateMachineForSingleLevelStorage extends FiniteStateMachine { + + protected FiniteStateMachineForSingleLevelStorage(IcatReader reader, LockManager lockManager) { + super(reader, lockManager); + } + + + @Override + public void queue(DataInfoBase dataInfo, DeferredOp deferredOp) throws InternalException { + throw new InternalException("### Operation is not permitted for single level storage"); + } + + + @Override + protected void scheduleTimer() { + //nothing to do here for single level storage + } + +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForStorageUnitDatafile.java b/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForStorageUnitDatafile.java new file mode 100644 index 00000000..754bdb3d --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForStorageUnitDatafile.java @@ -0,0 +1,267 @@ +package org.icatproject.ids.v3.FiniteStateMachine; + +import java.io.IOException; +import java.nio.file.FileAlreadyExistsException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import java.util.TimerTask; + +import org.icatproject.Dataset; +import org.icatproject.ids.IcatReader; +import org.icatproject.ids.LockManager; +import org.icatproject.ids.LockManager.Lock; +import org.icatproject.ids.LockManager.LockType; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.plugin.AlreadyLockedException; +import org.icatproject.ids.plugin.DfInfo; +import org.icatproject.ids.thread.DfArchiver; +import org.icatproject.ids.thread.DfDeleter; +import org.icatproject.ids.thread.DfRestorer; +import org.icatproject.ids.thread.DfWriter; +import org.icatproject.ids.v3.enums.DeferredOp; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataInfoBase; +import org.icatproject.ids.v3.models.DataSetInfo; + +public class FiniteStateMachineForStorageUnitDatafile extends FiniteStateMachine { + + protected FiniteStateMachineForStorageUnitDatafile(IcatReader icatReader, LockManager lockManager) { + super(icatReader, lockManager); + } + + + @Override + protected void scheduleTimer() { + processOpsDelayMillis = propertyHandler.getDelayDatafileOperations() * 1000L; + timer.schedule(new DfProcessQueue(), processQueueIntervalMillis); + logger.info("DfProcessQueue scheduled to run in " + processQueueIntervalMillis + " milliseconds"); + } + + + public void queue(DataInfoBase dataInfo, DeferredOp deferredOp) throws InternalException { + + var dfInfo = (DataFileInfo) dataInfo; + if(dfInfo == null) throw new InternalException("DataInfoBase object could not be casted into a DataFileInfo. Did you handed over a DataSetInfo instead?"); + + logger.info("Requesting " + deferredOp + " of datafile " + dfInfo); + + synchronized (deferredDfOpsQueue) { + + if (processOpsTime == null) { + processOpsTime = System.currentTimeMillis() + processOpsDelayMillis; + final Date d = new Date(processOpsTime); + logger.debug("Requesting delay operations till " + d); + } + + final RequestedState state = this.deferredDfOpsQueue.get(dfInfo); + if (state == null) { + if (deferredOp == DeferredOp.WRITE) { + try { + Path marker = markerDir.resolve(Long.toString(dfInfo.getId())); + Files.createFile(marker); + logger.debug("Created marker " + marker); + } catch (FileAlreadyExistsException e) { + // Pass will ignore this + } catch (IOException e) { + throw new InternalException(e.getClass() + " " + e.getMessage()); + } + deferredDfOpsQueue.put(dfInfo, RequestedState.WRITE_REQUESTED); + } else if (deferredOp == DeferredOp.ARCHIVE) { + deferredDfOpsQueue.put(dfInfo, RequestedState.ARCHIVE_REQUESTED); + } else if (deferredOp == DeferredOp.RESTORE) { + deferredDfOpsQueue.put(dfInfo, RequestedState.RESTORE_REQUESTED); + } else if (deferredOp == DeferredOp.DELETE) { + deferredDfOpsQueue.put(dfInfo, RequestedState.DELETE_REQUESTED); + } + } else if (state == RequestedState.ARCHIVE_REQUESTED) { + if (deferredOp == DeferredOp.RESTORE) { + deferredDfOpsQueue.remove(dfInfo); + } else if (deferredOp == DeferredOp.DELETE) { + deferredDfOpsQueue.put(dfInfo, RequestedState.DELETE_REQUESTED); + } + } else if (state == RequestedState.DELETE_REQUESTED) { + // No way out + } else if (state == RequestedState.RESTORE_REQUESTED) { + if (deferredOp == DeferredOp.DELETE) { + deferredDfOpsQueue.put(dfInfo, RequestedState.DELETE_REQUESTED); + } else if (deferredOp == DeferredOp.ARCHIVE) { + deferredDfOpsQueue.put(dfInfo, RequestedState.ARCHIVE_REQUESTED); + } + } else if (state == RequestedState.WRITE_REQUESTED) { + if (deferredOp == DeferredOp.DELETE) { + deferredDfOpsQueue.remove(dfInfo); + } else if (deferredOp == DeferredOp.ARCHIVE) { + deferredDfOpsQueue.put(dfInfo, RequestedState.WRITE_THEN_ARCHIVE_REQUESTED); + } + } else if (state == RequestedState.WRITE_THEN_ARCHIVE_REQUESTED) { + if (deferredOp == DeferredOp.DELETE) { + deferredDfOpsQueue.remove(dfInfo); + } else if (deferredOp == DeferredOp.RESTORE) { + deferredDfOpsQueue.put(dfInfo, RequestedState.WRITE_REQUESTED); + } + } + } + } + + + private class DfProcessQueue extends TimerTask { + + @Override + public void run() { + try { + synchronized (deferredDfOpsQueue) { + if (processOpsTime != null && System.currentTimeMillis() > processOpsTime && !deferredDfOpsQueue.isEmpty()) { + processOpsTime = null; + logger.debug("deferredDfOpsQueue has " + deferredDfOpsQueue.size() + " entries"); + List writes = new ArrayList<>(); + List archives = new ArrayList<>(); + List restores = new ArrayList<>(); + List deletes = new ArrayList<>(); + Map writeLocks = new HashMap<>(); + Map archiveLocks = new HashMap<>(); + Map restoreLocks = new HashMap<>(); + Map deleteLocks = new HashMap<>(); + + Map newOps = new HashMap<>(); + final Iterator> it = deferredDfOpsQueue.entrySet().iterator(); + while (it.hasNext()) { + Entry opEntry = it.next(); + DataFileInfo dfInfo = opEntry.getKey(); + Long dsId = dfInfo.getDsId(); + DataSetInfo dsInfo; + try { + Dataset ds = (Dataset) reader.get("Dataset ds INCLUDE ds.investigation.facility", dsId); + dsInfo = new DataSetInfo(ds); + } catch (Exception e) { + logger.error("Could not get dsInfo {}: {}.", dsId, e.getMessage()); + continue; + } + if (!dfChanging.containsKey(dfInfo)) { + final RequestedState state = opEntry.getValue(); + logger.debug(dfInfo + " " + state); + if (state == RequestedState.WRITE_REQUESTED) { + if (!writeLocks.containsKey(dsId)) { + try { + writeLocks.put(dsId, lockManager.lock(dsInfo, LockType.SHARED)); + } catch (AlreadyLockedException e) { + logger.debug("Could not acquire lock on " + dsId + ", hold back " + state); + continue; + } catch (IOException e) { + logger.error("I/O exception " + e.getMessage() + " locking " + dsId); + continue; + } + } + it.remove(); + dfChanging.put(dfInfo, state); + writes.add(dfInfo); + } else if (state == RequestedState.WRITE_THEN_ARCHIVE_REQUESTED) { + if (!writeLocks.containsKey(dsId)) { + try { + writeLocks.put(dsId, lockManager.lock(dsInfo, LockType.SHARED)); + } catch (AlreadyLockedException e) { + logger.debug("Could not acquire lock on " + dsId + ", hold back " + state); + continue; + } catch (IOException e) { + logger.error("I/O exception " + e.getMessage() + " locking " + dsId); + continue; + } + } + it.remove(); + dfChanging.put(dfInfo, RequestedState.WRITE_REQUESTED); + writes.add(dfInfo); + newOps.put(dfInfo, RequestedState.ARCHIVE_REQUESTED); + } else if (state == RequestedState.ARCHIVE_REQUESTED) { + if (!archiveLocks.containsKey(dsId)) { + try { + archiveLocks.put(dsId, lockManager.lock(dsInfo, LockType.EXCLUSIVE)); + } catch (AlreadyLockedException e) { + logger.debug("Could not acquire lock on " + dsId + ", hold back " + state); + continue; + } catch (IOException e) { + logger.error("I/O exception " + e.getMessage() + " locking " + dsId); + continue; + } + } + it.remove(); + dfChanging.put(dfInfo, state); + archives.add(dfInfo); + } else if (state == RequestedState.RESTORE_REQUESTED) { + if (!restoreLocks.containsKey(dsId)) { + try { + restoreLocks.put(dsId, lockManager.lock(dsInfo, LockType.EXCLUSIVE)); + } catch (AlreadyLockedException e) { + logger.debug("Could not acquire lock on " + dsId + ", hold back " + state); + continue; + } catch (IOException e) { + logger.error("I/O exception " + e.getMessage() + " locking " + dsId); + continue; + } + } + it.remove(); + dfChanging.put(dfInfo, state); + restores.add(dfInfo); + } else if (state == RequestedState.DELETE_REQUESTED) { + if (!deleteLocks.containsKey(dsId)) { + try { + deleteLocks.put(dsId, lockManager.lock(dsInfo, LockType.EXCLUSIVE)); + } catch (AlreadyLockedException e) { + logger.debug("Could not acquire lock on " + dsId + ", hold back " + state); + continue; + } catch (IOException e) { + logger.error("I/O exception " + e.getMessage() + " locking " + dsId); + continue; + } + } + it.remove(); + dfChanging.put(dfInfo, state); + deletes.add(dfInfo); + } else { + throw new AssertionError("Impossible state"); + } + } + } + if (!newOps.isEmpty()) { + deferredDfOpsQueue.putAll(newOps); + logger.debug("Adding {} operations to be scheduled next time round", newOps.size()); + } + if (!deferredDfOpsQueue.isEmpty()) { + processOpsTime = 0L; + } + if (!writes.isEmpty()) { + logger.debug("Launch thread to process " + writes.size() + " writes"); + Thread w = new Thread(new DfWriter(writes, propertyHandler, FiniteStateMachineForStorageUnitDatafile.this, writeLocks.values())); + w.start(); + } + if (!archives.isEmpty()) { + logger.debug("Launch thread to process " + archives.size() + " archives"); + Thread w = new Thread(new DfArchiver(archives, propertyHandler, FiniteStateMachineForStorageUnitDatafile.this, archiveLocks.values())); + w.start(); + } + if (!restores.isEmpty()) { + logger.debug("Launch thread to process " + restores.size() + " restores"); + Thread w = new Thread(new DfRestorer(restores, propertyHandler, FiniteStateMachineForStorageUnitDatafile.this, restoreLocks.values())); + w.start(); + } + if (!deletes.isEmpty()) { + logger.debug("Launch thread to process " + deletes.size() + " deletes"); + Thread w = new Thread(new DfDeleter(deletes, propertyHandler, FiniteStateMachineForStorageUnitDatafile.this, deleteLocks.values())); + w.start(); + } + } + } + } finally { + timer.schedule(new DfProcessQueue(), processQueueIntervalMillis); + } + + } + + } +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForStorageUnitDataset.java b/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForStorageUnitDataset.java new file mode 100644 index 00000000..b333a2ab --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForStorageUnitDataset.java @@ -0,0 +1,194 @@ +package org.icatproject.ids.v3.FiniteStateMachine; + +import java.io.IOException; +import java.nio.file.FileAlreadyExistsException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Date; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Map.Entry; +import java.util.TimerTask; + +import org.icatproject.ids.IcatReader; +import org.icatproject.ids.LockManager; +import org.icatproject.ids.LockManager.Lock; +import org.icatproject.ids.LockManager.LockType; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.plugin.AlreadyLockedException; +import org.icatproject.ids.plugin.DsInfo; +import org.icatproject.ids.thread.DsArchiver; +import org.icatproject.ids.thread.DsRestorer; +import org.icatproject.ids.thread.DsWriter; +import org.icatproject.ids.v3.enums.DeferredOp; +import org.icatproject.ids.v3.models.DataInfoBase; +import org.icatproject.ids.v3.models.DataSetInfo; + +public class FiniteStateMachineForStorageUnitDataset extends FiniteStateMachine { + + protected FiniteStateMachineForStorageUnitDataset(IcatReader reader, LockManager lockManager) { + super(reader, lockManager); + } + + + @Override + protected void scheduleTimer() { + processOpsDelayMillis = propertyHandler.getDelayDatasetWrites() * 1000L; + timer.schedule(new DsProcessQueue(), processQueueIntervalMillis); + logger.info("DsProcessQueue scheduled to run in " + processQueueIntervalMillis + " milliseconds"); + } + + + public void queue(DataInfoBase dataInfo, DeferredOp deferredOp) throws InternalException { + + var dsInfo = (DataSetInfo) dataInfo; + if(dsInfo == null) throw new InternalException("DataInfoBase object could not be casted into a DataSetInfo. Did you handed over a DataFileInfo instead?"); + + + logger.info("Requesting " + deferredOp + " of dataset " + dsInfo); + + synchronized (deferredDsOpsQueue) { + + final RequestedState state = this.deferredDsOpsQueue.get(dsInfo); + if (state == null) { + if (deferredOp == DeferredOp.WRITE) { + requestWrite(dsInfo); + } else if (deferredOp == DeferredOp.ARCHIVE) { + deferredDsOpsQueue.put(dsInfo, RequestedState.ARCHIVE_REQUESTED); + } else if (deferredOp == DeferredOp.RESTORE) { + deferredDsOpsQueue.put(dsInfo, RequestedState.RESTORE_REQUESTED); + } + } else if (state == RequestedState.ARCHIVE_REQUESTED) { + if (deferredOp == DeferredOp.WRITE) { + requestWrite(dsInfo); + deferredDsOpsQueue.put(dsInfo, RequestedState.WRITE_THEN_ARCHIVE_REQUESTED); + } else if (deferredOp == DeferredOp.RESTORE) { + deferredDsOpsQueue.put(dsInfo, RequestedState.RESTORE_REQUESTED); + } + } else if (state == RequestedState.RESTORE_REQUESTED) { + if (deferredOp == DeferredOp.WRITE) { + requestWrite(dsInfo); + } else if (deferredOp == DeferredOp.ARCHIVE) { + deferredDsOpsQueue.put(dsInfo, RequestedState.ARCHIVE_REQUESTED); + } + } else if (state == RequestedState.WRITE_REQUESTED) { + if (deferredOp == DeferredOp.WRITE) { + setDelay(dsInfo); + } else if (deferredOp == DeferredOp.ARCHIVE) { + deferredDsOpsQueue.put(dsInfo, RequestedState.WRITE_THEN_ARCHIVE_REQUESTED); + } + } else if (state == RequestedState.WRITE_THEN_ARCHIVE_REQUESTED) { + if (deferredOp == DeferredOp.WRITE) { + setDelay(dsInfo); + } else if (deferredOp == DeferredOp.RESTORE) { + deferredDsOpsQueue.put(dsInfo, RequestedState.WRITE_REQUESTED); + } + } + } + + } + + + private void requestWrite(DataSetInfo dsInfo) throws InternalException { + try { + Path marker = markerDir.resolve(Long.toString(dsInfo.getId())); + Files.createFile(marker); + logger.debug("Created marker " + marker); + } catch (FileAlreadyExistsException e) { + // Pass will ignore this + } catch (IOException e) { + throw new InternalException(e.getClass() + " " + e.getMessage()); + } + deferredDsOpsQueue.put(dsInfo, RequestedState.WRITE_REQUESTED); + setDelay(dsInfo); + } + + + private void setDelay(DsInfo dsInfo) { + writeTimes.put(dsInfo, System.currentTimeMillis() + processOpsDelayMillis); + if (logger.isDebugEnabled()) { + final Date d = new Date(writeTimes.get(dsInfo)); + logger.debug("Requesting delay of writing of dataset " + dsInfo + " till " + d); + } + } + + + private class DsProcessQueue extends TimerTask { + + @Override + public void run() { + try { + synchronized (deferredDsOpsQueue) { + final long now = System.currentTimeMillis(); + Map newOps = new HashMap<>(); + final Iterator> it = deferredDsOpsQueue.entrySet().iterator(); + while (it.hasNext()) { + final Entry opEntry = it.next(); + final DataSetInfo dsInfo = opEntry.getKey(); + if (!dsChanging.containsKey(dsInfo)) { + final RequestedState state = opEntry.getValue(); + if (state == RequestedState.WRITE_REQUESTED + || state == RequestedState.WRITE_THEN_ARCHIVE_REQUESTED) { + if (now > writeTimes.get(dsInfo)) { + try { + Lock lock = lockManager.lock(dsInfo, LockType.SHARED); + logger.debug("Will process " + dsInfo + " with " + state); + writeTimes.remove(dsInfo); + dsChanging.put(dsInfo, RequestedState.WRITE_REQUESTED); + it.remove(); + final Thread w = new Thread( + new DsWriter(dsInfo, propertyHandler, FiniteStateMachineForStorageUnitDataset.this, reader, lock)); + w.start(); + if (state == RequestedState.WRITE_THEN_ARCHIVE_REQUESTED) { + newOps.put(dsInfo, RequestedState.ARCHIVE_REQUESTED); + } + } catch (AlreadyLockedException e) { + logger.debug("Could not acquire lock on " + dsInfo + ", hold back process with " + state); + } catch (IOException e) { + logger.error("I/O exception " + e.getMessage() + " locking " + dsInfo); + } + } + } else if (state == RequestedState.ARCHIVE_REQUESTED) { + try { + Lock lock = lockManager.lock(dsInfo, LockType.EXCLUSIVE); + it.remove(); + logger.debug("Will process " + dsInfo + " with " + state); + dsChanging.put(dsInfo, state); + final Thread w = new Thread( + new DsArchiver(dsInfo, propertyHandler, FiniteStateMachineForStorageUnitDataset.this, lock)); + w.start(); + } catch (AlreadyLockedException e) { + logger.debug("Could not acquire lock on " + dsInfo + ", hold back process with " + state); + } catch (IOException e) { + logger.error("I/O exception " + e.getMessage() + " locking " + dsInfo); + } + } else if (state == RequestedState.RESTORE_REQUESTED) { + try { + Lock lock = lockManager.lock(dsInfo, LockType.EXCLUSIVE); + logger.debug("Will process " + dsInfo + " with " + state); + dsChanging.put(dsInfo, state); + it.remove(); + final Thread w = new Thread( + new DsRestorer(dsInfo, propertyHandler, FiniteStateMachineForStorageUnitDataset.this, reader, lock)); + w.start(); + } catch (AlreadyLockedException e) { + logger.debug("Could not acquire lock on " + dsInfo + ", hold back process with " + state); + } catch (IOException e) { + logger.error("I/O exception " + e.getMessage() + " locking " + dsInfo); + } + } + } + } + deferredDsOpsQueue.putAll(newOps); + } + + } finally { + timer.schedule(new DsProcessQueue(), processQueueIntervalMillis); + } + + } + + } + +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java index 1ec7f472..7cbae32f 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java @@ -4,7 +4,6 @@ import java.nio.file.Path; import java.util.HashMap; -import org.icatproject.ids.FiniteStateMachine; import org.icatproject.ids.IcatReader; import org.icatproject.ids.LockManager; import org.icatproject.ids.PropertyHandler; @@ -16,6 +15,7 @@ import org.icatproject.ids.exceptions.NotFoundException; import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.plugin.ArchiveStorageInterface; +import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.handlers.ArchiveHandler; import org.icatproject.ids.v3.handlers.GetDataFileIdsHandler; diff --git a/src/main/java/org/icatproject/ids/v3/ServiceProvider.java b/src/main/java/org/icatproject/ids/v3/ServiceProvider.java index 51d47640..38fe5f29 100644 --- a/src/main/java/org/icatproject/ids/v3/ServiceProvider.java +++ b/src/main/java/org/icatproject/ids/v3/ServiceProvider.java @@ -3,13 +3,13 @@ import java.util.Set; import org.icatproject.ICAT; -import org.icatproject.ids.FiniteStateMachine; import org.icatproject.ids.IcatReader; import org.icatproject.ids.LockManager; import org.icatproject.ids.PropertyHandler; import org.icatproject.ids.Transmitter; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.plugin.MainStorageInterface; +import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; import org.icatproject.ids.v3.enums.CallType; /** diff --git a/src/main/java/org/icatproject/ids/v3/UnfinishedWorkServiceBase.java b/src/main/java/org/icatproject/ids/v3/UnfinishedWorkServiceBase.java index 6d863c73..cfa46755 100644 --- a/src/main/java/org/icatproject/ids/v3/UnfinishedWorkServiceBase.java +++ b/src/main/java/org/icatproject/ids/v3/UnfinishedWorkServiceBase.java @@ -10,10 +10,10 @@ import org.icatproject.Dataset; import org.icatproject.IcatExceptionType; import org.icatproject.IcatException_Exception; -import org.icatproject.ids.DeferredOp; import org.icatproject.ids.StorageUnit; import org.icatproject.ids.exceptions.InsufficientPrivilegesException; import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.v3.enums.DeferredOp; import org.icatproject.ids.v3.models.DataFileInfo; import org.icatproject.ids.v3.models.DataSetInfo; import org.icatproject.utils.IcatSecurity; diff --git a/src/main/java/org/icatproject/ids/DeferredOp.java b/src/main/java/org/icatproject/ids/v3/enums/DeferredOp.java similarity index 87% rename from src/main/java/org/icatproject/ids/DeferredOp.java rename to src/main/java/org/icatproject/ids/v3/enums/DeferredOp.java index 31ea7908..60c955c7 100644 --- a/src/main/java/org/icatproject/ids/DeferredOp.java +++ b/src/main/java/org/icatproject/ids/v3/enums/DeferredOp.java @@ -1,4 +1,4 @@ -package org.icatproject.ids; +package org.icatproject.ids.v3.enums; /* * Represents the type of action that was requested by a user. diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java index 3289954b..5d89e9da 100644 --- a/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java @@ -35,7 +35,6 @@ import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.helper.SO; import org.icatproject.ids.v3.models.DataFileInfo; -import org.icatproject.ids.v3.models.DataInfoBase; import org.icatproject.ids.v3.models.DataSetInfo; import org.icatproject.ids.v3.models.ValueContainer; From 2d92b52fde3119e7f50487fb0ea500e4a8b098eb Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Mon, 26 Feb 2024 15:44:23 +0100 Subject: [PATCH 18/92] FiniteStateMachine base class only uses DataInfoBase now --- .../org/icatproject/ids/DataSelection.java | 4 +- .../java/org/icatproject/ids/IdsBean.java | 16 +- .../icatproject/ids/thread/DfArchiver.java | 8 +- .../org/icatproject/ids/thread/DfDeleter.java | 8 +- .../icatproject/ids/thread/DfRestorer.java | 23 ++- .../org/icatproject/ids/thread/DfWriter.java | 22 +-- .../icatproject/ids/thread/DsArchiver.java | 6 +- .../icatproject/ids/thread/DsRestorer.java | 6 +- .../org/icatproject/ids/thread/DsWriter.java | 6 +- .../DataSelectionForStorageUnitDatafile.java | 2 +- .../DataSelectionForStorageUnitDataset.java | 2 +- .../FiniteStateMachine.java | 142 +++++------------- ...niteStateMachineForSingleLevelStorage.java | 8 + ...iteStateMachineForStorageUnitDatafile.java | 78 +++++----- ...niteStateMachineForStorageUnitDataset.java | 73 +++++---- 15 files changed, 187 insertions(+), 217 deletions(-) diff --git a/src/main/java/org/icatproject/ids/DataSelection.java b/src/main/java/org/icatproject/ids/DataSelection.java index 29c9f93e..9860ad75 100644 --- a/src/main/java/org/icatproject/ids/DataSelection.java +++ b/src/main/java/org/icatproject/ids/DataSelection.java @@ -358,7 +358,7 @@ public void checkOnline() public static boolean restoreIfOffline(DataFileInfo dfInfo) throws InternalException { boolean maybeOffline = false; var serviceProvider = ServiceProvider.getInstance(); - if (serviceProvider.getFsm().getDfMaybeOffline().contains(dfInfo)) { + if (serviceProvider.getFsm().getMaybeOffline().contains(dfInfo)) { maybeOffline = true; } else if (!serviceProvider.getMainStorage().exists(dfInfo.getDfLocation())) { serviceProvider.getFsm().queue(dfInfo, DeferredOp.RESTORE); @@ -371,7 +371,7 @@ public static boolean restoreIfOffline(DataFileInfo dfInfo) throws InternalExcep public static boolean restoreIfOffline(DataSetInfo dsInfo, Set emptyDatasets) throws InternalException { boolean maybeOffline = false; var serviceProvider = ServiceProvider.getInstance(); - if (serviceProvider.getFsm().getDsMaybeOffline().contains(dsInfo)) { + if (serviceProvider.getFsm().getMaybeOffline().contains(dsInfo)) { maybeOffline = true; } else if (!emptyDatasets.contains(dsInfo.getId()) && !serviceProvider.getMainStorage().exists(dsInfo)) { serviceProvider.getFsm().queue(dsInfo, DeferredOp.RESTORE); diff --git a/src/main/java/org/icatproject/ids/IdsBean.java b/src/main/java/org/icatproject/ids/IdsBean.java index 2361597c..d8de0225 100644 --- a/src/main/java/org/icatproject/ids/IdsBean.java +++ b/src/main/java/org/icatproject/ids/IdsBean.java @@ -923,8 +923,8 @@ public String getStatus(String preparedId, String ip) Status status = Status.ONLINE; if (storageUnit == StorageUnit.DATASET) { - Set restoring = fsm.getDsRestoring(); - Set maybeOffline = fsm.getDsMaybeOffline(); + Set restoring = fsm.getRestoring(); + Set maybeOffline = fsm.getMaybeOffline(); for (DataSetInfo dsInfo : dsInfos.values()) { fsm.checkFailure(dsInfo.getId()); if (restoring.contains(dsInfo)) { @@ -938,8 +938,8 @@ public String getStatus(String preparedId, String ip) } } } else if (storageUnit == StorageUnit.DATAFILE) { - Set restoring = fsm.getDfRestoring(); - Set maybeOffline = fsm.getDfMaybeOffline(); + Set restoring = fsm.getRestoring(); + Set maybeOffline = fsm.getMaybeOffline(); for (DataFileInfo dfInfo : dfInfos) { fsm.checkFailure(dfInfo.getId()); if (restoring.contains(dfInfo)) { @@ -998,8 +998,8 @@ public String getStatus(String sessionId, String investigationIds, String datase investigationIds, datasetIds, datafileIds, Returns.DATASETS); Map dsInfos = dataSelection.getDsInfo(); - Set restoring = fsm.getDsRestoring(); - Set maybeOffline = fsm.getDsMaybeOffline(); + Set restoring = fsm.getRestoring(); + Set maybeOffline = fsm.getMaybeOffline(); Set emptyDatasets = dataSelection.getEmptyDatasets(); for (DataSetInfo dsInfo : dsInfos.values()) { fsm.checkFailure(dsInfo.getId()); @@ -1018,8 +1018,8 @@ public String getStatus(String sessionId, String investigationIds, String datase investigationIds, datasetIds, datafileIds, Returns.DATAFILES); Set dfInfos = dataSelection.getDfInfo(); - Set restoring = fsm.getDfRestoring(); - Set maybeOffline = fsm.getDfMaybeOffline(); + Set restoring = fsm.getRestoring(); + Set maybeOffline = fsm.getMaybeOffline(); for (DataFileInfo dfInfo : dfInfos) { fsm.checkFailure(dfInfo.getId()); if (restoring.contains(dfInfo)) { diff --git a/src/main/java/org/icatproject/ids/thread/DfArchiver.java b/src/main/java/org/icatproject/ids/thread/DfArchiver.java index 2cce1919..76b64485 100644 --- a/src/main/java/org/icatproject/ids/thread/DfArchiver.java +++ b/src/main/java/org/icatproject/ids/thread/DfArchiver.java @@ -9,9 +9,9 @@ import org.slf4j.LoggerFactory; import org.icatproject.ids.LockManager.Lock; import org.icatproject.ids.PropertyHandler; -import org.icatproject.ids.plugin.DfInfo; import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; +import org.icatproject.ids.v3.models.DataFileInfo; /* * Removes datafiles from the fast storage (doesn't write them to archive storage) @@ -21,11 +21,11 @@ public class DfArchiver implements Runnable { private MainStorageInterface mainStorageInterface; private FiniteStateMachine fsm; - private List dfInfos; + private List dfInfos; private Path markerDir; private Collection locks; - public DfArchiver(List dfInfos, PropertyHandler propertyHandler, FiniteStateMachine fsm, Collection locks) { + public DfArchiver(List dfInfos, PropertyHandler propertyHandler, FiniteStateMachine fsm, Collection locks) { this.dfInfos = dfInfos; this.fsm = fsm; this.locks = locks; @@ -36,7 +36,7 @@ public DfArchiver(List dfInfos, PropertyHandler propertyHandler, FiniteS @Override public void run() { try { - for (DfInfo dfInfo : dfInfos) { + for (DataFileInfo dfInfo : dfInfos) { try { if (Files.exists(markerDir.resolve(Long.toString(dfInfo.getDfId())))) { logger.error("Archive of " + dfInfo diff --git a/src/main/java/org/icatproject/ids/thread/DfDeleter.java b/src/main/java/org/icatproject/ids/thread/DfDeleter.java index 2a00c429..43bf3405 100644 --- a/src/main/java/org/icatproject/ids/thread/DfDeleter.java +++ b/src/main/java/org/icatproject/ids/thread/DfDeleter.java @@ -8,8 +8,8 @@ import org.icatproject.ids.LockManager.Lock; import org.icatproject.ids.PropertyHandler; import org.icatproject.ids.plugin.ArchiveStorageInterface; -import org.icatproject.ids.plugin.DfInfo; import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; +import org.icatproject.ids.v3.models.DataFileInfo; /** * Delete datafiles from archive @@ -20,10 +20,10 @@ public class DfDeleter implements Runnable { private FiniteStateMachine fsm; private ArchiveStorageInterface archiveStorageInterface; - private List dfInfos; + private List dfInfos; private Collection locks; - public DfDeleter(List dfInfos, PropertyHandler propertyHandler, FiniteStateMachine fsm, Collection locks) { + public DfDeleter(List dfInfos, PropertyHandler propertyHandler, FiniteStateMachine fsm, Collection locks) { this.dfInfos = dfInfos; this.fsm = fsm; this.locks = locks; @@ -33,7 +33,7 @@ public DfDeleter(List dfInfos, PropertyHandler propertyHandler, FiniteSt @Override public void run() { try { - for (DfInfo dfInfo : dfInfos) { + for (DataFileInfo dfInfo : dfInfos) { try { String dfLocation = dfInfo.getDfLocation(); archiveStorageInterface.delete(dfLocation); diff --git a/src/main/java/org/icatproject/ids/thread/DfRestorer.java b/src/main/java/org/icatproject/ids/thread/DfRestorer.java index 6cab48d8..e30c39d5 100644 --- a/src/main/java/org/icatproject/ids/thread/DfRestorer.java +++ b/src/main/java/org/icatproject/ids/thread/DfRestorer.java @@ -1,5 +1,6 @@ package org.icatproject.ids.thread; +import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; @@ -13,6 +14,7 @@ import org.icatproject.ids.plugin.DfInfo; import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; +import org.icatproject.ids.v3.models.DataFileInfo; /* * Restores datafiles from the slow to the fast storage. @@ -24,11 +26,11 @@ public class DfRestorer implements Runnable { private MainStorageInterface mainStorageInterface; private ArchiveStorageInterface archiveStorageInterface; private FiniteStateMachine fsm; - private List dfInfos; + private List dataFileInfos; private Collection locks; - public DfRestorer(List dfInfos, PropertyHandler propertyHandler, FiniteStateMachine fsm, Collection locks) { - this.dfInfos = dfInfos; + public DfRestorer(List dfInfos, PropertyHandler propertyHandler, FiniteStateMachine fsm, Collection locks) { + this.dataFileInfos = dfInfos; this.fsm = fsm; this.locks = locks; @@ -50,17 +52,24 @@ public void run() { * generally remove anything from the list of files to restore as * pointless restores are normally filtered out earlier. */ - Iterator iter = dfInfos.iterator(); + Iterator iter = dataFileInfos.iterator(); while (iter.hasNext()) { - DfInfo dfInfo = iter.next(); + DataFileInfo dfInfo = iter.next(); if (mainStorageInterface.exists(dfInfo.getDfLocation())) { iter.remove(); fsm.removeFromChanging(dfInfo); } } + //TODO: This is additional conversion caused by the redesign :-( + List dfInfos = new ArrayList<>(); + for(DfInfo dfInfo : this.dataFileInfos) { + dfInfos.add(dfInfo); + } + + Set failures = archiveStorageInterface.restore(mainStorageInterface, dfInfos); - for (DfInfo dfInfo : dfInfos) { + for (DataFileInfo dfInfo : dataFileInfos) { if (failures.contains(dfInfo)) { fsm.recordFailure(dfInfo.getDfId()); logger.error("Restore of " + dfInfo + " failed"); @@ -71,7 +80,7 @@ public void run() { fsm.removeFromChanging(dfInfo); } } catch (Exception e) { - for (DfInfo dfInfo : dfInfos) { + for (DataFileInfo dfInfo : dataFileInfos) { logger.error("Restore of " + dfInfo + " failed " + e.getClass() + " " + e.getMessage()); fsm.removeFromChanging(dfInfo); } diff --git a/src/main/java/org/icatproject/ids/thread/DfWriter.java b/src/main/java/org/icatproject/ids/thread/DfWriter.java index 1ed1373b..ebfedaaa 100644 --- a/src/main/java/org/icatproject/ids/thread/DfWriter.java +++ b/src/main/java/org/icatproject/ids/thread/DfWriter.java @@ -11,9 +11,9 @@ import org.icatproject.ids.LockManager.Lock; import org.icatproject.ids.PropertyHandler; import org.icatproject.ids.plugin.ArchiveStorageInterface; -import org.icatproject.ids.plugin.DfInfo; import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; +import org.icatproject.ids.v3.models.DataFileInfo; /** * Copies datafiles from main to archive @@ -26,11 +26,11 @@ public class DfWriter implements Runnable { private MainStorageInterface mainStorageInterface; private ArchiveStorageInterface archiveStorageInterface; private Path markerDir; - private List dfInfos; + private List dataFileInfos; private Collection locks; - public DfWriter(List dfInfos, PropertyHandler propertyHandler, FiniteStateMachine fsm, Collection locks) { - this.dfInfos = dfInfos; + public DfWriter(List dfInfos, PropertyHandler propertyHandler, FiniteStateMachine fsm, Collection locks) { + this.dataFileInfos = dfInfos; this.fsm = fsm; this.locks = locks; mainStorageInterface = propertyHandler.getMainStorage(); @@ -41,18 +41,18 @@ public DfWriter(List dfInfos, PropertyHandler propertyHandler, FiniteSta @Override public void run() { try { - for (DfInfo dfInfo : dfInfos) { - String dfLocation = dfInfo.getDfLocation(); - try (InputStream is = mainStorageInterface.get(dfLocation, dfInfo.getCreateId(), dfInfo.getModId())) { + for (DataFileInfo dataFileInfo : dataFileInfos) { + String dfLocation = dataFileInfo.getDfLocation(); + try (InputStream is = mainStorageInterface.get(dfLocation, dataFileInfo.getCreateId(), dataFileInfo.getModId())) { archiveStorageInterface.put(is, dfLocation); - Path marker = markerDir.resolve(Long.toString(dfInfo.getDfId())); + Path marker = markerDir.resolve(Long.toString(dataFileInfo.getDfId())); Files.deleteIfExists(marker); logger.debug("Removed marker " + marker); - logger.debug("Write of " + dfInfo + " completed"); + logger.debug("Write of " + dataFileInfo + " completed"); } catch (Exception e) { - logger.error("Write of " + dfInfo + " failed due to " + e.getClass() + " " + e.getMessage()); + logger.error("Write of " + dataFileInfo + " failed due to " + e.getClass() + " " + e.getMessage()); } finally { - fsm.removeFromChanging(dfInfo); + fsm.removeFromChanging(dataFileInfo); } } } finally { diff --git a/src/main/java/org/icatproject/ids/thread/DsArchiver.java b/src/main/java/org/icatproject/ids/thread/DsArchiver.java index d2182753..8ff993b1 100644 --- a/src/main/java/org/icatproject/ids/thread/DsArchiver.java +++ b/src/main/java/org/icatproject/ids/thread/DsArchiver.java @@ -7,23 +7,23 @@ import org.slf4j.LoggerFactory; import org.icatproject.ids.LockManager.Lock; import org.icatproject.ids.PropertyHandler; -import org.icatproject.ids.plugin.DsInfo; import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; +import org.icatproject.ids.v3.models.DataSetInfo; /* * Removes datasets from the fast storage (doesn't write them to slow storage) */ public class DsArchiver implements Runnable { private final static Logger logger = LoggerFactory.getLogger(DsArchiver.class); - private DsInfo dsInfo; + private DataSetInfo dsInfo; private MainStorageInterface mainStorageInterface; private FiniteStateMachine fsm; private Path markerDir; private Lock lock; - public DsArchiver(DsInfo dsInfo, PropertyHandler propertyHandler, FiniteStateMachine fsm, Lock lock) { + public DsArchiver(DataSetInfo dsInfo, PropertyHandler propertyHandler, FiniteStateMachine fsm, Lock lock) { this.dsInfo = dsInfo; this.fsm = fsm; mainStorageInterface = propertyHandler.getMainStorage(); diff --git a/src/main/java/org/icatproject/ids/thread/DsRestorer.java b/src/main/java/org/icatproject/ids/thread/DsRestorer.java index c68fedab..194ecb64 100644 --- a/src/main/java/org/icatproject/ids/thread/DsRestorer.java +++ b/src/main/java/org/icatproject/ids/thread/DsRestorer.java @@ -21,10 +21,10 @@ import org.icatproject.ids.LockManager.Lock; import org.icatproject.ids.PropertyHandler; import org.icatproject.ids.plugin.ArchiveStorageInterface; -import org.icatproject.ids.plugin.DsInfo; import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.plugin.ZipMapperInterface; import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; +import org.icatproject.ids.v3.models.DataSetInfo; /* * Restores datafiles from the slow to the fast storage. @@ -33,7 +33,7 @@ public class DsRestorer implements Runnable { private final static Logger logger = LoggerFactory.getLogger(DsRestorer.class); - private DsInfo dsInfo; + private DataSetInfo dsInfo; private MainStorageInterface mainStorageInterface; private ArchiveStorageInterface archiveStorageInterface; @@ -46,7 +46,7 @@ public class DsRestorer implements Runnable { private ZipMapperInterface zipMapper; private Lock lock; - public DsRestorer(DsInfo dsInfo, PropertyHandler propertyHandler, FiniteStateMachine fsm, IcatReader reader, Lock lock) { + public DsRestorer(DataSetInfo dsInfo, PropertyHandler propertyHandler, FiniteStateMachine fsm, IcatReader reader, Lock lock) { this.dsInfo = dsInfo; this.fsm = fsm; zipMapper = propertyHandler.getZipMapper(); diff --git a/src/main/java/org/icatproject/ids/thread/DsWriter.java b/src/main/java/org/icatproject/ids/thread/DsWriter.java index d69d9d5d..62f81048 100644 --- a/src/main/java/org/icatproject/ids/thread/DsWriter.java +++ b/src/main/java/org/icatproject/ids/thread/DsWriter.java @@ -20,10 +20,10 @@ import org.icatproject.ids.LockManager.Lock; import org.icatproject.ids.PropertyHandler; import org.icatproject.ids.plugin.ArchiveStorageInterface; -import org.icatproject.ids.plugin.DsInfo; import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.plugin.ZipMapperInterface; import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; +import org.icatproject.ids.v3.models.DataSetInfo; /** * Copies dataset from main to archive @@ -32,7 +32,7 @@ public class DsWriter implements Runnable { private final static Logger logger = LoggerFactory.getLogger(DsWriter.class); private static final int BUFSIZ = 1024; - private DsInfo dsInfo; + private DataSetInfo dsInfo; private FiniteStateMachine fsm; private MainStorageInterface mainStorageInterface; @@ -43,7 +43,7 @@ public class DsWriter implements Runnable { private ZipMapperInterface zipMapper; private Lock lock; - public DsWriter(DsInfo dsInfo, PropertyHandler propertyHandler, FiniteStateMachine fsm, IcatReader reader, Lock lock) { + public DsWriter(DataSetInfo dsInfo, PropertyHandler propertyHandler, FiniteStateMachine fsm, IcatReader reader, Lock lock) { this.dsInfo = dsInfo; this.fsm = fsm; this.zipMapper = propertyHandler.getZipMapper(); diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java index e86da9cd..942d38d7 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java @@ -38,7 +38,7 @@ public void checkOnline()throws InternalException, DataNotOnlineException { public boolean restoreIfOffline(DataFileInfo dfInfo) throws InternalException { boolean maybeOffline = false; var serviceProvider = ServiceProvider.getInstance(); - if (serviceProvider.getFsm().getDfMaybeOffline().contains(dfInfo)) { + if (serviceProvider.getFsm().getMaybeOffline().contains(dfInfo)) { maybeOffline = true; } else if (!serviceProvider.getMainStorage().exists(dfInfo.getDfLocation())) { serviceProvider.getFsm().queue(dfInfo, DeferredOp.RESTORE); diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java index 8eb7b57a..00c5f6e3 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java @@ -39,7 +39,7 @@ public void checkOnline() throws InternalException, DataNotOnlineException { public boolean restoreIfOffline(DataSetInfo dsInfo, Set emptyDatasets) throws InternalException { boolean maybeOffline = false; var serviceProvider = ServiceProvider.getInstance(); - if (serviceProvider.getFsm().getDsMaybeOffline().contains(dsInfo)) { + if (serviceProvider.getFsm().getMaybeOffline().contains(dsInfo)) { maybeOffline = true; } else if (!emptyDatasets.contains(dsInfo.getId()) && !serviceProvider.getMainStorage().exists(dsInfo)) { serviceProvider.getFsm().queue(dsInfo, DeferredOp.RESTORE); diff --git a/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachine.java b/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachine.java index 08983488..ad5d2eaa 100644 --- a/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachine.java +++ b/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachine.java @@ -25,12 +25,8 @@ import org.icatproject.ids.StorageUnit; import org.icatproject.ids.LockManager.LockInfo; import org.icatproject.ids.exceptions.InternalException; -import org.icatproject.ids.plugin.DfInfo; -import org.icatproject.ids.plugin.DsInfo; import org.icatproject.ids.v3.enums.DeferredOp; -import org.icatproject.ids.v3.models.DataFileInfo; import org.icatproject.ids.v3.models.DataInfoBase; -import org.icatproject.ids.v3.models.DataSetInfo; public abstract class FiniteStateMachine { @@ -65,6 +61,10 @@ public static FiniteStateMachine getInstance() { throw new RuntimeException("Instance of FiniteStateMachine is not created. At First createInstance() has to be called at least once."); } + public abstract void queue(DataInfoBase dataInfo, DeferredOp deferredOp) throws InternalException; + protected abstract void scheduleTimer(); + protected abstract void addDataInfoJson(JsonGenerator gen); + public enum RequestedState { ARCHIVE_REQUESTED, DELETE_REQUESTED, RESTORE_REQUESTED, WRITE_REQUESTED, WRITE_THEN_ARCHIVE_REQUESTED } @@ -77,13 +77,9 @@ public enum RequestedState { */ protected long processOpsDelayMillis; - protected Map deferredDfOpsQueue = new HashMap<>(); - - protected Map deferredDsOpsQueue = new HashMap<>(); - - protected Map dfChanging = new HashMap<>(); + protected Map deferredOpsQueue = new HashMap<>(); - protected Map dsChanging = new HashMap<>(); + protected Map dataInfoChanging = new HashMap<>(); protected Path markerDir; protected long processQueueIntervalMillis; @@ -92,17 +88,12 @@ public enum RequestedState { protected IcatReader reader; - protected LockManager lockManager; - protected StorageUnit storageUnit; - protected Timer timer = new Timer("FSM Timer"); protected Long processOpsTime; - protected Map writeTimes = new HashMap<>(); - protected Set failures = ConcurrentHashMap.newKeySet(); public void exit() { @@ -113,14 +104,14 @@ public void exit() { /** * Find any DataFileInfo which may be offline */ - public Set getDfMaybeOffline() { - Map union; - synchronized (deferredDfOpsQueue) { - union = new HashMap<>(dfChanging); - union.putAll(deferredDfOpsQueue); - } - Set result = new HashSet<>(); - for (Entry entry : union.entrySet()) { + public Set getMaybeOffline() { + Map union; + synchronized (deferredOpsQueue) { + union = new HashMap<>(dataInfoChanging); + union.putAll(deferredOpsQueue); + } + Set result = new HashSet<>(); + for (Entry entry : union.entrySet()) { if (entry.getValue() != RequestedState.WRITE_REQUESTED) { result.add(entry.getKey()); } @@ -131,14 +122,14 @@ public Set getDfMaybeOffline() { /** * Find any DataFileInfo which are being restored or are queued for restoration */ - public Set getDfRestoring() { - Map union; - synchronized (deferredDfOpsQueue) { - union = new HashMap<>(dfChanging); - union.putAll(deferredDfOpsQueue); - } - Set result = new HashSet<>(); - for (Entry entry : union.entrySet()) { + public Set getRestoring() { + Map union; + synchronized (deferredOpsQueue) { + union = new HashMap<>(dataInfoChanging); + union.putAll(deferredOpsQueue); + } + Set result = new HashSet<>(); + for (Entry entry : union.entrySet()) { if (entry.getValue() == RequestedState.RESTORE_REQUESTED) { result.add(entry.getKey()); } @@ -146,74 +137,26 @@ public Set getDfRestoring() { return result; } - /** - * Find any DataSetInfo which may be offline - */ - public Set getDsMaybeOffline() { - Map union; - synchronized (deferredDsOpsQueue) { - union = new HashMap<>(dsChanging); - union.putAll(deferredDsOpsQueue); - } - Set result = new HashSet<>(); - for (Entry entry : union.entrySet()) { - if (entry.getValue() != RequestedState.WRITE_REQUESTED) { - result.add(entry.getKey()); - } - } - return result; - } - - /** - * Find any DsInfo which are being restored or are queued for restoration - */ - public Set getDsRestoring() { - Map union; - synchronized (deferredDsOpsQueue) { - union = new HashMap<>(dsChanging); - union.putAll(deferredDsOpsQueue); + protected void addDataInfoJsonFromDeferredOpsQueue(JsonGenerator gen) { + Map union; + synchronized (deferredOpsQueue) { + union = new HashMap<>(dataInfoChanging); + union.putAll(deferredOpsQueue); } - Set result = new HashSet<>(); - for (Entry entry : union.entrySet()) { - if (entry.getValue() == RequestedState.RESTORE_REQUESTED) { - result.add(entry.getKey()); - } + gen.writeStartArray("opsQueue"); + for (Entry entry : union.entrySet()) { + DataInfoBase item = entry.getKey(); + gen.writeStartObject().write("data", item.toString()).write("request", entry.getValue().name()) + .writeEnd(); } - return result; + gen.writeEnd(); // end Array("opsQueue") } public String getServiceStatus() throws InternalException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { - if (storageUnit == null) { - gen.writeStartArray("opsQueue").writeEnd(); - } else if (storageUnit == StorageUnit.DATASET) { - Map union; - synchronized (deferredDsOpsQueue) { - union = new HashMap<>(dsChanging); - union.putAll(deferredDsOpsQueue); - } - gen.writeStartArray("opsQueue"); - for (Entry entry : union.entrySet()) { - DsInfo item = entry.getKey(); - gen.writeStartObject().write("data", item.toString()).write("request", entry.getValue().name()) - .writeEnd(); - } - gen.writeEnd(); // end Array("opsQueue") - } else if (storageUnit == StorageUnit.DATAFILE) { - Map union; - synchronized (deferredDfOpsQueue) { - union = new HashMap<>(dfChanging); - union.putAll(deferredDfOpsQueue); - } - gen.writeStartArray("opsQueue"); - for (Entry entry : union.entrySet()) { - DfInfo item = entry.getKey(); - gen.writeStartObject().write("data", item.toString()).write("request", entry.getValue().name()) - .writeEnd(); - } - gen.writeEnd(); // end Array("opsQueue") - } + + this.addDataInfoJson(gen); Collection lockInfo = lockManager.getLockInfo(); gen.write("lockCount", lockInfo.size()); @@ -234,8 +177,6 @@ public String getServiceStatus() throws InternalException { return baos.toString(); } - public abstract void queue(DataInfoBase dataInfo, DeferredOp deferredOp) throws InternalException; - protected abstract void scheduleTimer(); public void init() { @@ -244,7 +185,6 @@ public void init() { if(!inited) { propertyHandler = PropertyHandler.getInstance(); processQueueIntervalMillis = propertyHandler.getProcessQueueIntervalSeconds() * 1000L; - storageUnit = propertyHandler.getStorageUnit(); this.scheduleTimer(); @@ -259,15 +199,9 @@ public void init() { } - public void removeFromChanging(DfInfo dfInfo) { - synchronized (deferredDfOpsQueue) { - dfChanging.remove(dfInfo); - } - } - - public void removeFromChanging(DsInfo dsInfo) { - synchronized (deferredDsOpsQueue) { - dsChanging.remove(dsInfo); + public void removeFromChanging(DataInfoBase dfInfo) { + synchronized (deferredOpsQueue) { + dataInfoChanging.remove(dfInfo); } } diff --git a/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForSingleLevelStorage.java b/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForSingleLevelStorage.java index 479b5aa3..b47f724c 100644 --- a/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForSingleLevelStorage.java +++ b/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForSingleLevelStorage.java @@ -6,6 +6,8 @@ import org.icatproject.ids.v3.enums.DeferredOp; import org.icatproject.ids.v3.models.DataInfoBase; +import jakarta.json.stream.JsonGenerator; + public class FiniteStateMachineForSingleLevelStorage extends FiniteStateMachine { protected FiniteStateMachineForSingleLevelStorage(IcatReader reader, LockManager lockManager) { @@ -24,4 +26,10 @@ protected void scheduleTimer() { //nothing to do here for single level storage } + + @Override + protected void addDataInfoJson(JsonGenerator gen) { + gen.writeStartArray("opsQueue").writeEnd(); + } + } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForStorageUnitDatafile.java b/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForStorageUnitDatafile.java index 754bdb3d..32bf0c19 100644 --- a/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForStorageUnitDatafile.java +++ b/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForStorageUnitDatafile.java @@ -21,7 +21,6 @@ import org.icatproject.ids.LockManager.LockType; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.plugin.AlreadyLockedException; -import org.icatproject.ids.plugin.DfInfo; import org.icatproject.ids.thread.DfArchiver; import org.icatproject.ids.thread.DfDeleter; import org.icatproject.ids.thread.DfRestorer; @@ -31,6 +30,8 @@ import org.icatproject.ids.v3.models.DataInfoBase; import org.icatproject.ids.v3.models.DataSetInfo; +import jakarta.json.stream.JsonGenerator; + public class FiniteStateMachineForStorageUnitDatafile extends FiniteStateMachine { protected FiniteStateMachineForStorageUnitDatafile(IcatReader icatReader, LockManager lockManager) { @@ -46,6 +47,12 @@ protected void scheduleTimer() { } + @Override + protected void addDataInfoJson(JsonGenerator gen) { + this.addDataInfoJsonFromDeferredOpsQueue(gen); + } + + public void queue(DataInfoBase dataInfo, DeferredOp deferredOp) throws InternalException { var dfInfo = (DataFileInfo) dataInfo; @@ -53,7 +60,7 @@ public void queue(DataInfoBase dataInfo, DeferredOp deferredOp) throws InternalE logger.info("Requesting " + deferredOp + " of datafile " + dfInfo); - synchronized (deferredDfOpsQueue) { + synchronized (deferredOpsQueue) { if (processOpsTime == null) { processOpsTime = System.currentTimeMillis() + processOpsDelayMillis; @@ -61,7 +68,7 @@ public void queue(DataInfoBase dataInfo, DeferredOp deferredOp) throws InternalE logger.debug("Requesting delay operations till " + d); } - final RequestedState state = this.deferredDfOpsQueue.get(dfInfo); + final RequestedState state = this.deferredOpsQueue.get(dfInfo); if (state == null) { if (deferredOp == DeferredOp.WRITE) { try { @@ -73,39 +80,39 @@ public void queue(DataInfoBase dataInfo, DeferredOp deferredOp) throws InternalE } catch (IOException e) { throw new InternalException(e.getClass() + " " + e.getMessage()); } - deferredDfOpsQueue.put(dfInfo, RequestedState.WRITE_REQUESTED); + deferredOpsQueue.put(dfInfo, RequestedState.WRITE_REQUESTED); } else if (deferredOp == DeferredOp.ARCHIVE) { - deferredDfOpsQueue.put(dfInfo, RequestedState.ARCHIVE_REQUESTED); + deferredOpsQueue.put(dfInfo, RequestedState.ARCHIVE_REQUESTED); } else if (deferredOp == DeferredOp.RESTORE) { - deferredDfOpsQueue.put(dfInfo, RequestedState.RESTORE_REQUESTED); + deferredOpsQueue.put(dfInfo, RequestedState.RESTORE_REQUESTED); } else if (deferredOp == DeferredOp.DELETE) { - deferredDfOpsQueue.put(dfInfo, RequestedState.DELETE_REQUESTED); + deferredOpsQueue.put(dfInfo, RequestedState.DELETE_REQUESTED); } } else if (state == RequestedState.ARCHIVE_REQUESTED) { if (deferredOp == DeferredOp.RESTORE) { - deferredDfOpsQueue.remove(dfInfo); + deferredOpsQueue.remove(dfInfo); } else if (deferredOp == DeferredOp.DELETE) { - deferredDfOpsQueue.put(dfInfo, RequestedState.DELETE_REQUESTED); + deferredOpsQueue.put(dfInfo, RequestedState.DELETE_REQUESTED); } } else if (state == RequestedState.DELETE_REQUESTED) { // No way out } else if (state == RequestedState.RESTORE_REQUESTED) { if (deferredOp == DeferredOp.DELETE) { - deferredDfOpsQueue.put(dfInfo, RequestedState.DELETE_REQUESTED); + deferredOpsQueue.put(dfInfo, RequestedState.DELETE_REQUESTED); } else if (deferredOp == DeferredOp.ARCHIVE) { - deferredDfOpsQueue.put(dfInfo, RequestedState.ARCHIVE_REQUESTED); + deferredOpsQueue.put(dfInfo, RequestedState.ARCHIVE_REQUESTED); } } else if (state == RequestedState.WRITE_REQUESTED) { if (deferredOp == DeferredOp.DELETE) { - deferredDfOpsQueue.remove(dfInfo); + deferredOpsQueue.remove(dfInfo); } else if (deferredOp == DeferredOp.ARCHIVE) { - deferredDfOpsQueue.put(dfInfo, RequestedState.WRITE_THEN_ARCHIVE_REQUESTED); + deferredOpsQueue.put(dfInfo, RequestedState.WRITE_THEN_ARCHIVE_REQUESTED); } } else if (state == RequestedState.WRITE_THEN_ARCHIVE_REQUESTED) { if (deferredOp == DeferredOp.DELETE) { - deferredDfOpsQueue.remove(dfInfo); + deferredOpsQueue.remove(dfInfo); } else if (deferredOp == DeferredOp.RESTORE) { - deferredDfOpsQueue.put(dfInfo, RequestedState.WRITE_REQUESTED); + deferredOpsQueue.put(dfInfo, RequestedState.WRITE_REQUESTED); } } } @@ -117,24 +124,27 @@ private class DfProcessQueue extends TimerTask { @Override public void run() { try { - synchronized (deferredDfOpsQueue) { - if (processOpsTime != null && System.currentTimeMillis() > processOpsTime && !deferredDfOpsQueue.isEmpty()) { + synchronized (deferredOpsQueue) { + if (processOpsTime != null && System.currentTimeMillis() > processOpsTime && !deferredOpsQueue.isEmpty()) { processOpsTime = null; - logger.debug("deferredDfOpsQueue has " + deferredDfOpsQueue.size() + " entries"); - List writes = new ArrayList<>(); - List archives = new ArrayList<>(); - List restores = new ArrayList<>(); - List deletes = new ArrayList<>(); + logger.debug("deferredDfOpsQueue has " + deferredOpsQueue.size() + " entries"); + List writes = new ArrayList<>(); + List archives = new ArrayList<>(); + List restores = new ArrayList<>(); + List deletes = new ArrayList<>(); Map writeLocks = new HashMap<>(); Map archiveLocks = new HashMap<>(); Map restoreLocks = new HashMap<>(); Map deleteLocks = new HashMap<>(); - Map newOps = new HashMap<>(); - final Iterator> it = deferredDfOpsQueue.entrySet().iterator(); + Map newOps = new HashMap<>(); + final Iterator> it = deferredOpsQueue.entrySet().iterator(); while (it.hasNext()) { - Entry opEntry = it.next(); - DataFileInfo dfInfo = opEntry.getKey(); + Entry opEntry = it.next(); + var dfInfo = (DataFileInfo) opEntry.getKey(); + + if(dfInfo == null) throw new RuntimeException("Could not cast DataInfoBase to DataFileInfo. Did you handed over another sub type?"); + Long dsId = dfInfo.getDsId(); DataSetInfo dsInfo; try { @@ -144,7 +154,7 @@ public void run() { logger.error("Could not get dsInfo {}: {}.", dsId, e.getMessage()); continue; } - if (!dfChanging.containsKey(dfInfo)) { + if (!dataInfoChanging.containsKey(dfInfo)) { final RequestedState state = opEntry.getValue(); logger.debug(dfInfo + " " + state); if (state == RequestedState.WRITE_REQUESTED) { @@ -160,7 +170,7 @@ public void run() { } } it.remove(); - dfChanging.put(dfInfo, state); + dataInfoChanging.put(dfInfo, state); writes.add(dfInfo); } else if (state == RequestedState.WRITE_THEN_ARCHIVE_REQUESTED) { if (!writeLocks.containsKey(dsId)) { @@ -175,7 +185,7 @@ public void run() { } } it.remove(); - dfChanging.put(dfInfo, RequestedState.WRITE_REQUESTED); + dataInfoChanging.put(dfInfo, RequestedState.WRITE_REQUESTED); writes.add(dfInfo); newOps.put(dfInfo, RequestedState.ARCHIVE_REQUESTED); } else if (state == RequestedState.ARCHIVE_REQUESTED) { @@ -191,7 +201,7 @@ public void run() { } } it.remove(); - dfChanging.put(dfInfo, state); + dataInfoChanging.put(dfInfo, state); archives.add(dfInfo); } else if (state == RequestedState.RESTORE_REQUESTED) { if (!restoreLocks.containsKey(dsId)) { @@ -206,7 +216,7 @@ public void run() { } } it.remove(); - dfChanging.put(dfInfo, state); + dataInfoChanging.put(dfInfo, state); restores.add(dfInfo); } else if (state == RequestedState.DELETE_REQUESTED) { if (!deleteLocks.containsKey(dsId)) { @@ -221,7 +231,7 @@ public void run() { } } it.remove(); - dfChanging.put(dfInfo, state); + dataInfoChanging.put(dfInfo, state); deletes.add(dfInfo); } else { throw new AssertionError("Impossible state"); @@ -229,10 +239,10 @@ public void run() { } } if (!newOps.isEmpty()) { - deferredDfOpsQueue.putAll(newOps); + deferredOpsQueue.putAll(newOps); logger.debug("Adding {} operations to be scheduled next time round", newOps.size()); } - if (!deferredDfOpsQueue.isEmpty()) { + if (!deferredOpsQueue.isEmpty()) { processOpsTime = 0L; } if (!writes.isEmpty()) { diff --git a/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForStorageUnitDataset.java b/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForStorageUnitDataset.java index b333a2ab..bc630190 100644 --- a/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForStorageUnitDataset.java +++ b/src/main/java/org/icatproject/ids/v3/FiniteStateMachine/FiniteStateMachineForStorageUnitDataset.java @@ -17,7 +17,6 @@ import org.icatproject.ids.LockManager.LockType; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.plugin.AlreadyLockedException; -import org.icatproject.ids.plugin.DsInfo; import org.icatproject.ids.thread.DsArchiver; import org.icatproject.ids.thread.DsRestorer; import org.icatproject.ids.thread.DsWriter; @@ -25,8 +24,12 @@ import org.icatproject.ids.v3.models.DataInfoBase; import org.icatproject.ids.v3.models.DataSetInfo; +import jakarta.json.stream.JsonGenerator; + public class FiniteStateMachineForStorageUnitDataset extends FiniteStateMachine { + protected Map writeTimes = new HashMap<>(); + protected FiniteStateMachineForStorageUnitDataset(IcatReader reader, LockManager lockManager) { super(reader, lockManager); } @@ -39,50 +42,52 @@ protected void scheduleTimer() { logger.info("DsProcessQueue scheduled to run in " + processQueueIntervalMillis + " milliseconds"); } + @Override + protected void addDataInfoJson(JsonGenerator gen) { + this.addDataInfoJsonFromDeferredOpsQueue(gen); + } - public void queue(DataInfoBase dataInfo, DeferredOp deferredOp) throws InternalException { - var dsInfo = (DataSetInfo) dataInfo; - if(dsInfo == null) throw new InternalException("DataInfoBase object could not be casted into a DataSetInfo. Did you handed over a DataFileInfo instead?"); + public void queue(DataInfoBase dataInfo, DeferredOp deferredOp) throws InternalException { - logger.info("Requesting " + deferredOp + " of dataset " + dsInfo); + logger.info("Requesting " + deferredOp + " of dataset " + dataInfo); - synchronized (deferredDsOpsQueue) { + synchronized (deferredOpsQueue) { - final RequestedState state = this.deferredDsOpsQueue.get(dsInfo); + final RequestedState state = this.deferredOpsQueue.get(dataInfo); if (state == null) { if (deferredOp == DeferredOp.WRITE) { - requestWrite(dsInfo); + requestWrite(dataInfo); } else if (deferredOp == DeferredOp.ARCHIVE) { - deferredDsOpsQueue.put(dsInfo, RequestedState.ARCHIVE_REQUESTED); + deferredOpsQueue.put(dataInfo, RequestedState.ARCHIVE_REQUESTED); } else if (deferredOp == DeferredOp.RESTORE) { - deferredDsOpsQueue.put(dsInfo, RequestedState.RESTORE_REQUESTED); + deferredOpsQueue.put(dataInfo, RequestedState.RESTORE_REQUESTED); } } else if (state == RequestedState.ARCHIVE_REQUESTED) { if (deferredOp == DeferredOp.WRITE) { - requestWrite(dsInfo); - deferredDsOpsQueue.put(dsInfo, RequestedState.WRITE_THEN_ARCHIVE_REQUESTED); + requestWrite(dataInfo); + deferredOpsQueue.put(dataInfo, RequestedState.WRITE_THEN_ARCHIVE_REQUESTED); } else if (deferredOp == DeferredOp.RESTORE) { - deferredDsOpsQueue.put(dsInfo, RequestedState.RESTORE_REQUESTED); + deferredOpsQueue.put(dataInfo, RequestedState.RESTORE_REQUESTED); } } else if (state == RequestedState.RESTORE_REQUESTED) { if (deferredOp == DeferredOp.WRITE) { - requestWrite(dsInfo); + requestWrite(dataInfo); } else if (deferredOp == DeferredOp.ARCHIVE) { - deferredDsOpsQueue.put(dsInfo, RequestedState.ARCHIVE_REQUESTED); + deferredOpsQueue.put(dataInfo, RequestedState.ARCHIVE_REQUESTED); } } else if (state == RequestedState.WRITE_REQUESTED) { if (deferredOp == DeferredOp.WRITE) { - setDelay(dsInfo); + setDelay(dataInfo); } else if (deferredOp == DeferredOp.ARCHIVE) { - deferredDsOpsQueue.put(dsInfo, RequestedState.WRITE_THEN_ARCHIVE_REQUESTED); + deferredOpsQueue.put(dataInfo, RequestedState.WRITE_THEN_ARCHIVE_REQUESTED); } } else if (state == RequestedState.WRITE_THEN_ARCHIVE_REQUESTED) { if (deferredOp == DeferredOp.WRITE) { - setDelay(dsInfo); + setDelay(dataInfo); } else if (deferredOp == DeferredOp.RESTORE) { - deferredDsOpsQueue.put(dsInfo, RequestedState.WRITE_REQUESTED); + deferredOpsQueue.put(dataInfo, RequestedState.WRITE_REQUESTED); } } } @@ -90,7 +95,7 @@ public void queue(DataInfoBase dataInfo, DeferredOp deferredOp) throws InternalE } - private void requestWrite(DataSetInfo dsInfo) throws InternalException { + private void requestWrite(DataInfoBase dsInfo) throws InternalException { try { Path marker = markerDir.resolve(Long.toString(dsInfo.getId())); Files.createFile(marker); @@ -100,12 +105,12 @@ private void requestWrite(DataSetInfo dsInfo) throws InternalException { } catch (IOException e) { throw new InternalException(e.getClass() + " " + e.getMessage()); } - deferredDsOpsQueue.put(dsInfo, RequestedState.WRITE_REQUESTED); + deferredOpsQueue.put(dsInfo, RequestedState.WRITE_REQUESTED); setDelay(dsInfo); } - private void setDelay(DsInfo dsInfo) { + private void setDelay(DataInfoBase dsInfo) { writeTimes.put(dsInfo, System.currentTimeMillis() + processOpsDelayMillis); if (logger.isDebugEnabled()) { final Date d = new Date(writeTimes.get(dsInfo)); @@ -119,14 +124,18 @@ private class DsProcessQueue extends TimerTask { @Override public void run() { try { - synchronized (deferredDsOpsQueue) { + synchronized (deferredOpsQueue) { final long now = System.currentTimeMillis(); - Map newOps = new HashMap<>(); - final Iterator> it = deferredDsOpsQueue.entrySet().iterator(); + Map newOps = new HashMap<>(); + final Iterator> it = deferredOpsQueue.entrySet().iterator(); while (it.hasNext()) { - final Entry opEntry = it.next(); - final DataSetInfo dsInfo = opEntry.getKey(); - if (!dsChanging.containsKey(dsInfo)) { + final Entry opEntry = it.next(); + final var dsInfo = (DataSetInfo) opEntry.getKey(); + + + if(dsInfo == null) throw new RuntimeException("Could not cast DataInfoBase to DataSetInfo. Did you handed over another sub type?"); + + if (!dataInfoChanging.containsKey(dsInfo)) { final RequestedState state = opEntry.getValue(); if (state == RequestedState.WRITE_REQUESTED || state == RequestedState.WRITE_THEN_ARCHIVE_REQUESTED) { @@ -135,7 +144,7 @@ public void run() { Lock lock = lockManager.lock(dsInfo, LockType.SHARED); logger.debug("Will process " + dsInfo + " with " + state); writeTimes.remove(dsInfo); - dsChanging.put(dsInfo, RequestedState.WRITE_REQUESTED); + dataInfoChanging.put(dsInfo, RequestedState.WRITE_REQUESTED); it.remove(); final Thread w = new Thread( new DsWriter(dsInfo, propertyHandler, FiniteStateMachineForStorageUnitDataset.this, reader, lock)); @@ -154,7 +163,7 @@ public void run() { Lock lock = lockManager.lock(dsInfo, LockType.EXCLUSIVE); it.remove(); logger.debug("Will process " + dsInfo + " with " + state); - dsChanging.put(dsInfo, state); + dataInfoChanging.put(dsInfo, state); final Thread w = new Thread( new DsArchiver(dsInfo, propertyHandler, FiniteStateMachineForStorageUnitDataset.this, lock)); w.start(); @@ -167,7 +176,7 @@ public void run() { try { Lock lock = lockManager.lock(dsInfo, LockType.EXCLUSIVE); logger.debug("Will process " + dsInfo + " with " + state); - dsChanging.put(dsInfo, state); + dataInfoChanging.put(dsInfo, state); it.remove(); final Thread w = new Thread( new DsRestorer(dsInfo, propertyHandler, FiniteStateMachineForStorageUnitDataset.this, reader, lock)); @@ -180,7 +189,7 @@ public void run() { } } } - deferredDsOpsQueue.putAll(newOps); + deferredOpsQueue.putAll(newOps); } } finally { From eb02b46b84d548851c23aaeecb3d2995dc2b0f51 Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Tue, 27 Feb 2024 12:59:56 +0100 Subject: [PATCH 19/92] Added GetStatugHandler --- .../java/org/icatproject/ids/IdsBean.java | 25 ++- .../java/org/icatproject/ids/IdsService.java | 19 ++- .../java/org/icatproject/ids/LockManager.java | 9 +- .../ids/v3/DataSelectionFactory.java | 31 ++-- .../DataSelectionForSingleLevelStorage.java | 22 ++- .../DataSelectionForStorageUnitDatafile.java | 24 ++- .../DataSelectionForStorageUnitDataset.java | 27 ++- .../ids/v3/DataSelectionV3Base.java | 45 ++++- .../org/icatproject/ids/v3/PreparedV3.java | 14 ++ .../ids/v3/RequestHandlerBase.java | 16 +- .../ids/v3/RequestHandlerService.java | 2 + .../icatproject/ids/v3/ServiceProvider.java | 4 +- .../icatproject/ids/v3/enums/RequestType.java | 2 +- .../v3/handlers/GetDataFileIdsHandler.java | 17 +- .../ids/v3/handlers/GetDataHandler.java | 31 ++-- .../ids/v3/handlers/GetSizeHandler.java | 13 +- .../ids/v3/handlers/GetStatusHandler.java | 154 ++++++++++++++++++ .../org/icatproject/ids/v3/helper/SO.java | 21 +-- 18 files changed, 371 insertions(+), 105 deletions(-) create mode 100644 src/main/java/org/icatproject/ids/v3/PreparedV3.java create mode 100644 src/main/java/org/icatproject/ids/v3/handlers/GetStatusHandler.java diff --git a/src/main/java/org/icatproject/ids/IdsBean.java b/src/main/java/org/icatproject/ids/IdsBean.java index d8de0225..4f9fc245 100644 --- a/src/main/java/org/icatproject/ids/IdsBean.java +++ b/src/main/java/org/icatproject/ids/IdsBean.java @@ -483,7 +483,12 @@ public void delete(String sessionId, String investigationIds, String datasetIds, investigationIds, datasetIds, datafileIds, Returns.DATASETS_AND_DATAFILES); // Do it - Collection dsInfos = dataSelection.getDsInfo().values(); + Collection dsInfos = new ArrayList(); + + //this is just a workaround during the v3 redesign + for(DataInfoBase dataInfo : dataSelection.getDsInfo().values()) { + dsInfos.add(dataInfo); + } try (Lock lock = lockManager.lock(dsInfos, LockType.EXCLUSIVE)) { if (storageUnit == StorageUnit.DATASET) { @@ -539,7 +544,7 @@ public void delete(String sessionId, String investigationIds, String datasetIds, } if (storageUnit == StorageUnit.DATASET) { - for (DataSetInfo dsInfo : dsInfos) { + for (DataInfoBase dsInfo : dsInfos) { fsm.queue(dsInfo, DeferredOp.WRITE); } } @@ -1749,6 +1754,7 @@ public void restore(String sessionId, String investigationIds, String datasetIds } } + @SuppressWarnings("unchecked") public void write(String sessionId, String investigationIds, String datasetIds, String datafileIds, String ip) throws NotImplementedException, BadRequestException, InsufficientPrivilegesException, InternalException, NotFoundException, DataNotOnlineException { @@ -1769,16 +1775,21 @@ public void write(String sessionId, String investigationIds, String datasetIds, investigationIds, datasetIds, datafileIds, Returns.DATASETS_AND_DATAFILES); // Do it - Map dsInfos = dataSelection.getDsInfo(); Set dfInfos = dataSelection.getDfInfo(); - try (Lock lock = lockManager.lock(dsInfos.values(), LockType.SHARED)) { + //this is only a workaround during the V3 redesign + var dsInfos = new ArrayList(); + for(DataSetInfo dataSetInfo : dataSelection.getDsInfo().values()) { + dsInfos.add(dataSetInfo); + } + + try (Lock lock = lockManager.lock(dsInfos, LockType.SHARED)) { if (twoLevel) { boolean maybeOffline = false; if (storageUnit == StorageUnit.DATASET) { - for (DataSetInfo dsInfo : dsInfos.values()) { + for (DataInfoBase dsInfo : dsInfos) { if (!dataSelection.getEmptyDatasets().contains(dsInfo.getId()) && - !mainStorage.exists(dsInfo)) { + !mainStorage.exists((DataSetInfo)dsInfo)) { maybeOffline = true; } } @@ -1795,7 +1806,7 @@ public void write(String sessionId, String investigationIds, String datasetIds, } if (storageUnit == StorageUnit.DATASET) { - for (DataSetInfo dsInfo : dsInfos.values()) { + for (DataInfoBase dsInfo : dsInfos) { fsm.queue(dsInfo, DeferredOp.WRITE); } } else if (storageUnit == StorageUnit.DATAFILE) { diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index 81e56199..a7c92722 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -368,6 +368,8 @@ public long getSize(@Context HttpServletRequest request, @QueryParam("preparedId * @throws NotFoundException * @throws InsufficientPrivilegesException * @throws InternalException + * @throws NotImplementedException + * @throws DataNotOnlineException * @summary getStatus * @statuscode 200 To indicate success */ @@ -377,12 +379,17 @@ public long getSize(@Context HttpServletRequest request, @QueryParam("preparedId public String getStatus(@Context HttpServletRequest request, @QueryParam("preparedId") String preparedId, @QueryParam("sessionId") String sessionId, @QueryParam("investigationIds") String investigationIds, @QueryParam("datasetIds") String datasetIds, @QueryParam("datafileIds") String datafileIds) - throws BadRequestException, NotFoundException, InsufficientPrivilegesException, InternalException { - if (preparedId != null) { - return idsBean.getStatus(preparedId, request.getRemoteAddr()); - } else { - return idsBean.getStatus(sessionId, investigationIds, datasetIds, datafileIds, request.getRemoteAddr()); - } + throws BadRequestException, NotFoundException, InsufficientPrivilegesException, InternalException, DataNotOnlineException, NotImplementedException { + + var parameters = new HashMap(); + parameters.put("preparedId", new ValueContainer(preparedId)); + parameters.put("sessionId", new ValueContainer(sessionId)); + parameters.put("investigationIds", new ValueContainer(investigationIds)); + parameters.put("datasetIds", new ValueContainer(datasetIds)); + parameters.put("datafileIds", new ValueContainer(datafileIds)); + parameters.put("ip", new ValueContainer(request.getRemoteAddr())); + + return this.requestService.handle(RequestType.GETSTATUS, parameters).getString(); } @PostConstruct diff --git a/src/main/java/org/icatproject/ids/LockManager.java b/src/main/java/org/icatproject/ids/LockManager.java index ff168ad4..6b7673df 100644 --- a/src/main/java/org/icatproject/ids/LockManager.java +++ b/src/main/java/org/icatproject/ids/LockManager.java @@ -11,9 +11,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - +import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.plugin.AlreadyLockedException; import org.icatproject.ids.plugin.MainStorageInterface; +import org.icatproject.ids.v3.models.DataInfoBase; import org.icatproject.ids.v3.models.DataSetInfo; @Singleton @@ -155,10 +156,12 @@ public Lock lock(DataSetInfo ds, LockType type) throws AlreadyLockedException, I } } - public Lock lock(Collection datasets, LockType type) throws AlreadyLockedException, IOException { + public Lock lock(Collection datasets, LockType type) throws AlreadyLockedException, IOException, InternalException { LockCollection locks = new LockCollection(); try { - for (DataSetInfo ds : datasets) { + for (DataInfoBase dataInfo : datasets) { + DataSetInfo ds = (DataSetInfo) dataInfo; + if(ds == null) throw new InternalException("Could not cast " + dataInfo.getClass() + " to DataSetInfo. Did you handed over another sub type of DataInfoBase? "); locks.add(lock(ds, type)); } } catch (AlreadyLockedException | IOException e) { diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java b/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java index d9ad24eb..513b693f 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java @@ -27,6 +27,7 @@ import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataInfoBase; import org.icatproject.ids.v3.models.DataSetInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -67,11 +68,13 @@ protected static DataSelectionV3Base get(String userSessionId, String investigat - protected static DataSelectionV3Base get(Map dsInfos, Set dfInfos, Set emptyDatasets, RequestType requestType) throws InternalException { + protected static DataSelectionV3Base get(Map dsInfos, Map dfInfos, Set emptyDatasets, RequestType requestType) throws InternalException { List dsids = new ArrayList(dsInfos.keySet()); List dfids = new ArrayList(); - for(DataFileInfo dfInfo: dfInfos) { + var dataFileInfos = new HashMap(); + for(DataInfoBase dfInfo: dfInfos.values()) { dfids.add(dfInfo.getId()); + dataFileInfos.put(dfInfo.getId(), dfInfo); } return DataSelectionFactory.getInstance().createSelection(dsInfos, dfInfos, emptyDatasets, new ArrayList(), dsids, dfids, requestType); } @@ -90,7 +93,7 @@ private DataSelectionFactory() throws InternalException logger.info("### Constructing finished"); } - private DataSelectionV3Base createSelection(Map dsInfos, Set dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids, RequestType requestType) throws InternalException { + private DataSelectionV3Base createSelection(Map dsInfos, Map dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids, RequestType requestType) throws InternalException { StorageUnit storageUnit = this.propertyHandler.getStorageUnit(); @@ -142,11 +145,11 @@ private DataSelectionV3Base getSelection( String userSessionId, String investiga private DataSelectionV3Base prepareFromIds(boolean dfWanted, boolean dsWanted, List dfids, List dsids, List invids, String userSessionId, Session restSessionToUse, Session userRestSession, RequestType requestType) throws NotFoundException, InsufficientPrivilegesException, InternalException, BadRequestException { - var dsInfos = new HashMap(); + var dsInfos = new HashMap(); var emptyDatasets = new HashSet(); - var dfInfos = new HashSet(); - if (dfWanted) { - dfInfos = new HashSet<>(); + var dfInfos = new HashMap(); + if (dfWanted) { //redundant ? + dfInfos = new HashMap(); } try { @@ -162,7 +165,7 @@ private DataSelectionV3Base prepareFromIds(boolean dfWanted, boolean dsWanted, L if (dfWanted) { Datafile df = (Datafile) icat.get(userSessionId, "Datafile", dfid); String location = IdsBean.getLocation(dfid, df.getLocation()); - dfInfos.add( + dfInfos.put( df.getId(), new DataFileInfo(dfid, df.getName(), location, df.getCreateId(), df.getModId(), dsid)); } } else { @@ -255,7 +258,7 @@ public static List getValidIds(String thing, String idList) throws BadRequ return result; } - private void manyDfs(HashSet dfInfos, long dsid, Session restSessionToUse, JsonArray result) + private void manyDfs(Map dfInfos, long dsid, Session restSessionToUse, JsonArray result) throws IcatException, InsufficientPrivilegesException, InternalException { // dataset access for the user has been checked so the REST session for the // reader account can be used if the IDS setting to allow this is enabled @@ -272,7 +275,7 @@ private void manyDfs(HashSet dfInfos, long dsid, Session restSessi JsonArray tup = (JsonArray) tupV; long dfid = tup.getJsonNumber(0).longValueExact(); String location = IdsBean.getLocation(dfid, tup.getString(2, null)); - dfInfos.add( + dfInfos.put(dfid, new DataFileInfo(dfid, tup.getString(1), location, tup.getString(3), tup.getString(4), dsid)); } } else { @@ -291,7 +294,7 @@ private void manyDfs(HashSet dfInfos, long dsid, Session restSessi } } - private void manyDss(HashMap dsInfos, HashSet emptyDatasets, HashSet dfInfos, Long invid, boolean dfWanted, Session userRestSession, Session restSessionToUseForDfs, JsonArray result) + private void manyDss(Map dsInfos, HashSet emptyDatasets, Map dfInfos, Long invid, boolean dfWanted, Session userRestSession, Session restSessionToUseForDfs, JsonArray result) throws IcatException, InsufficientPrivilegesException, InternalException { long min = result.getJsonNumber(0).longValueExact(); long max = result.getJsonNumber(1).longValueExact(); @@ -363,19 +366,19 @@ private void createRequestTypeToReturnsMapping() throws InternalException { this.requestTypeToReturnsMapping.put(RequestType.GETDATA, Returns.DATASETS_AND_DATAFILES); if(storageUnit == null ) { - //this.requestTypeToReturnsMapping.put(RequestType.GETSTATUS, Returns.DATASETS); + this.requestTypeToReturnsMapping.put(RequestType.GETSTATUS, Returns.DATASETS); } else if (storageUnit == StorageUnit.DATAFILE) { - //this.requestTypeToReturnsMapping.put(RequestType.GETSTATUS, Returns.DATAFILES); + this.requestTypeToReturnsMapping.put(RequestType.GETSTATUS, Returns.DATAFILES); //this.requestTypeToReturnsMapping.put(RequestType.RESTORE, Returns.DATAFILES); this.requestTypeToReturnsMapping.put(RequestType.ARCHIVE, Returns.DATAFILES); } else if(storageUnit == StorageUnit.DATASET) { - //this.requestTypeToReturnsMapping.put(RequestType.GETSTATUS, Returns.DATASETS); + this.requestTypeToReturnsMapping.put(RequestType.GETSTATUS, Returns.DATASETS); //this.requestTypeToReturnsMapping.put(RequestType.RESTORE, Returns.DATASETS); this.requestTypeToReturnsMapping.put(RequestType.ARCHIVE, Returns.DATASETS); } diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java index c29e94e0..b925ce7d 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java @@ -1,5 +1,7 @@ package org.icatproject.ids.v3; +import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; @@ -9,12 +11,11 @@ import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.v3.enums.DeferredOp; import org.icatproject.ids.v3.enums.RequestType; -import org.icatproject.ids.v3.models.DataFileInfo; -import org.icatproject.ids.v3.models.DataSetInfo; +import org.icatproject.ids.v3.models.DataInfoBase; public class DataSelectionForSingleLevelStorage extends DataSelectionV3Base { - protected DataSelectionForSingleLevelStorage(Map dsInfos, Set dfInfos, + protected DataSelectionForSingleLevelStorage(Map dsInfos, Map dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids, RequestType requestType) { super(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids, requestType); @@ -30,7 +31,20 @@ public void checkOnline() throws InternalException, DataNotOnlineException { @Override protected void scheduleTask(DeferredOp operation) throws NotImplementedException, InternalException { - throw new NotImplementedException("This operation is unavailable for single level storage"); + throw new InternalException("This operation is unavailable for single level storage"); + } + + + @Override + protected Collection getDataInfosForStatusCheck() { + return new ArrayList(); + } + + + @Override + protected boolean existsInMainStorage(DataInfoBase dataInfo) throws InternalException { + + throw new InternalException("This operation is unavailable for single level storage"); } diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java index 942d38d7..19cabdb0 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java @@ -1,5 +1,6 @@ package org.icatproject.ids.v3; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; @@ -9,12 +10,11 @@ import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.v3.enums.DeferredOp; import org.icatproject.ids.v3.enums.RequestType; -import org.icatproject.ids.v3.models.DataFileInfo; -import org.icatproject.ids.v3.models.DataSetInfo; +import org.icatproject.ids.v3.models.DataInfoBase; public class DataSelectionForStorageUnitDatafile extends DataSelectionV3Base { - protected DataSelectionForStorageUnitDatafile(Map dsInfos, Set dfInfos, + protected DataSelectionForStorageUnitDatafile(Map dsInfos, Map dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids, RequestType requestType) { super(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids, requestType); @@ -24,7 +24,7 @@ protected DataSelectionForStorageUnitDatafile(Map dsInfos, Se public void checkOnline()throws InternalException, DataNotOnlineException { boolean maybeOffline = false; - for (DataFileInfo dfInfo : dfInfos) { + for (DataInfoBase dfInfo : dfInfos.values()) { if (this.restoreIfOffline(dfInfo)) { maybeOffline = true; } @@ -35,12 +35,12 @@ public void checkOnline()throws InternalException, DataNotOnlineException { } } - public boolean restoreIfOffline(DataFileInfo dfInfo) throws InternalException { + public boolean restoreIfOffline(DataInfoBase dfInfo) throws InternalException { boolean maybeOffline = false; var serviceProvider = ServiceProvider.getInstance(); if (serviceProvider.getFsm().getMaybeOffline().contains(dfInfo)) { maybeOffline = true; - } else if (!serviceProvider.getMainStorage().exists(dfInfo.getDfLocation())) { + } else if (!serviceProvider.getMainStorage().exists(dfInfo.getLocation())) { serviceProvider.getFsm().queue(dfInfo, DeferredOp.RESTORE); maybeOffline = true; } @@ -50,9 +50,19 @@ public boolean restoreIfOffline(DataFileInfo dfInfo) throws InternalException { @Override protected void scheduleTask(DeferredOp operation) throws NotImplementedException, InternalException { - for (DataFileInfo dfInfo : dfInfos) { + for (DataInfoBase dfInfo : dfInfos.values()) { ServiceProvider.getInstance().getFsm().queue(dfInfo, operation); } } + + @Override + protected Collection getDataInfosForStatusCheck() { + return this.dfInfos.values(); + } + + @Override + protected boolean existsInMainStorage(DataInfoBase dataInfo) throws InternalException { + return ServiceProvider.getInstance().getMainStorage().exists(dataInfo.getLocation()); + } } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java index 00c5f6e3..f965fc0e 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java @@ -1,5 +1,6 @@ package org.icatproject.ids.v3; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; @@ -9,12 +10,12 @@ import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.v3.enums.DeferredOp; import org.icatproject.ids.v3.enums.RequestType; -import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataInfoBase; import org.icatproject.ids.v3.models.DataSetInfo; public class DataSelectionForStorageUnitDataset extends DataSelectionV3Base { - protected DataSelectionForStorageUnitDataset(Map dsInfos, Set dfInfos, + protected DataSelectionForStorageUnitDataset(Map dsInfos, Map dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids, RequestType requestType) { super(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids, requestType); @@ -24,7 +25,7 @@ protected DataSelectionForStorageUnitDataset(Map dsInfos, Set public void checkOnline() throws InternalException, DataNotOnlineException { boolean maybeOffline = false; - for (DataSetInfo dsInfo : dsInfos.values()) { + for (DataInfoBase dsInfo : dsInfos.values()) { if (this.restoreIfOffline(dsInfo, emptyDatasets)) { maybeOffline = true; } @@ -36,12 +37,12 @@ public void checkOnline() throws InternalException, DataNotOnlineException { } - public boolean restoreIfOffline(DataSetInfo dsInfo, Set emptyDatasets) throws InternalException { + public boolean restoreIfOffline(DataInfoBase dsInfo, Set emptyDatasets) throws InternalException { boolean maybeOffline = false; var serviceProvider = ServiceProvider.getInstance(); if (serviceProvider.getFsm().getMaybeOffline().contains(dsInfo)) { maybeOffline = true; - } else if (!emptyDatasets.contains(dsInfo.getId()) && !serviceProvider.getMainStorage().exists(dsInfo)) { + } else if (!emptyDatasets.contains(dsInfo.getId()) && !serviceProvider.getMainStorage().exists((DataSetInfo) dsInfo)) { //TODO: casting to DataSetInfo save? serviceProvider.getFsm().queue(dsInfo, DeferredOp.RESTORE); maybeOffline = true; } @@ -51,10 +52,24 @@ public boolean restoreIfOffline(DataSetInfo dsInfo, Set emptyDatasets) thr @Override protected void scheduleTask(DeferredOp operation) throws NotImplementedException, InternalException { - for (DataSetInfo dsInfo : dsInfos.values()) { + for (DataInfoBase dsInfo : dsInfos.values()) { ServiceProvider.getInstance().getFsm().queue(dsInfo, operation); } } + + @Override + protected Collection getDataInfosForStatusCheck() { + return this.dsInfos.values(); + } + + @Override + protected boolean existsInMainStorage(DataInfoBase dataInfo) throws InternalException { + + var dsInfo = (DataSetInfo) dataInfo; + if(dsInfo == null) throw new InternalException("Could not cast DataInfoBase to DataSetInfo. Did you handed over another sub type?"); + + return emptyDatasets.contains(dataInfo.getId()) || ServiceProvider.getInstance().getMainStorage().exists(dsInfo); + } } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java b/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java index a6ec4ed6..3923f213 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java @@ -1,25 +1,26 @@ package org.icatproject.ids.v3; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; +import org.icatproject.ids.Status; import org.icatproject.ids.exceptions.BadRequestException; import org.icatproject.ids.exceptions.DataNotOnlineException; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.v3.enums.DeferredOp; import org.icatproject.ids.v3.enums.RequestType; -import org.icatproject.ids.v3.models.DataFileInfo; -import org.icatproject.ids.v3.models.DataSetInfo; +import org.icatproject.ids.v3.models.DataInfoBase; public abstract class DataSelectionV3Base { - protected Map dsInfos; - protected Set dfInfos; + protected Map dsInfos; + protected Map dfInfos; protected Set emptyDatasets; protected List invids; protected List dsids; @@ -27,7 +28,7 @@ public abstract class DataSelectionV3Base { protected RequestType requestType; protected HashMap requestTypeToDeferredOpMapping; - protected DataSelectionV3Base(Map dsInfos, Set dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids, RequestType requestType) { + protected DataSelectionV3Base(Map dsInfos, Map dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids, RequestType requestType) { this.dsInfos = dsInfos; this.dfInfos = dfInfos; @@ -47,13 +48,21 @@ protected DataSelectionV3Base(Map dsInfos, Set protected abstract void scheduleTask(DeferredOp operation) throws NotImplementedException, InternalException; + /** + * Should it be better to have a get + * @return + */ + protected abstract Collection getDataInfosForStatusCheck(); + + protected abstract boolean existsInMainStorage(DataInfoBase dataInfo) throws InternalException; - public Map getDsInfo() { + + public Map getDsInfo() { return dsInfos; } - public Set getDfInfo() { + public Map getDfInfo() { return dfInfos; } @@ -110,4 +119,26 @@ public void scheduleTask() throws NotImplementedException, InternalException { this.scheduleTask(operation); } + public Status getStatus() throws InternalException { + Status status = Status.ONLINE; + var serviceProvider = ServiceProvider.getInstance(); + + Set restoring = serviceProvider.getFsm().getRestoring(); + Set maybeOffline = serviceProvider.getFsm().getMaybeOffline(); + for (DataInfoBase dataInfo : this.getDataInfosForStatusCheck()) { + serviceProvider.getFsm().checkFailure(dataInfo.getId()); + if (restoring.contains(dataInfo)) { + status = Status.RESTORING; + } else if (maybeOffline.contains(dataInfo)) { + status = Status.ARCHIVED; + break; + } else if (!this.existsInMainStorage(dataInfo)) { + status = Status.ARCHIVED; + break; + } + } + + return status; + } + } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/PreparedV3.java b/src/main/java/org/icatproject/ids/v3/PreparedV3.java new file mode 100644 index 00000000..a68e1f9b --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/PreparedV3.java @@ -0,0 +1,14 @@ +package org.icatproject.ids.v3; + +import java.util.Set; +import java.util.SortedMap; +import org.icatproject.ids.v3.models.DataInfoBase; + +/* This is a POJO with only package access so don't make data private */ +public class PreparedV3 { + public boolean zip; + public boolean compress; + public SortedMap dfInfos; + public SortedMap dsInfos; + public Set emptyDatasets; +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java index 563d0da8..74a852f7 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java @@ -9,13 +9,12 @@ import java.util.Map; import java.util.Set; import java.util.SortedMap; -import java.util.SortedSet; import java.util.TreeMap; -import java.util.TreeSet; import java.util.regex.Pattern; import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataInfoBase; import org.icatproject.ids.v3.models.DataSetInfo; import org.icatproject.ids.v3.models.ValueContainer; import org.slf4j.Logger; @@ -70,7 +69,7 @@ public RequestType getRequestType() { return this.requestType; } - public DataSelectionV3Base getDataSelection(Map dsInfos, Set dfInfos, Set emptyDatasets) throws InternalException { + public DataSelectionV3Base getDataSelection(Map dsInfos, Map dfInfos, Set emptyDatasets) throws InternalException { return DataSelectionFactory.get(dsInfos, dfInfos, emptyDatasets, this.getRequestType()); } @@ -110,22 +109,23 @@ protected static void validateUUID(String thing, String id) throws BadRequestExc throw new BadRequestException("The " + thing + " parameter '" + id + "' is not a valid UUID"); } - protected static Prepared unpack(InputStream stream) throws InternalException { - Prepared prepared = new Prepared(); + protected static PreparedV3 unpack(InputStream stream) throws InternalException { + PreparedV3 prepared = new PreparedV3(); JsonObject pd; try (JsonReader jsonReader = Json.createReader(stream)) { pd = jsonReader.readObject(); } prepared.zip = pd.getBoolean("zip"); prepared.compress = pd.getBoolean("compress"); - SortedMap dsInfos = new TreeMap<>(); - SortedSet dfInfos = new TreeSet<>(); + SortedMap dsInfos = new TreeMap<>(); + SortedMap dfInfos = new TreeMap<>(); Set emptyDatasets = new HashSet<>(); for (JsonValue itemV : pd.getJsonArray("dfInfo")) { JsonObject item = (JsonObject) itemV; String dfLocation = item.isNull("dfLocation") ? null : item.getString("dfLocation"); - dfInfos.add(new DataFileInfo(item.getJsonNumber("dfId").longValueExact(), item.getString("dfName"), + long dfid = item.getJsonNumber("dfId").longValueExact(); + dfInfos.put(dfid, new DataFileInfo(dfid, item.getString("dfName"), dfLocation, item.getString("createId"), item.getString("modId"), item.getJsonNumber("dsId").longValueExact())); diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java index 7cbae32f..58a2703f 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java @@ -23,6 +23,7 @@ import org.icatproject.ids.v3.handlers.GetIcatUrlHandler; import org.icatproject.ids.v3.handlers.GetServiceStatusHandler; import org.icatproject.ids.v3.handlers.GetSizeHandler; +import org.icatproject.ids.v3.handlers.GetStatusHandler; import org.icatproject.ids.v3.models.ValueContainer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -55,6 +56,7 @@ public RequestHandlerService() { this.registerHandler(new GetDataFileIdsHandler()); this.registerHandler(new GetServiceStatusHandler()); this.registerHandler(new GetSizeHandler()); + this.registerHandler(new GetStatusHandler()); } diff --git a/src/main/java/org/icatproject/ids/v3/ServiceProvider.java b/src/main/java/org/icatproject/ids/v3/ServiceProvider.java index 38fe5f29..1880942c 100644 --- a/src/main/java/org/icatproject/ids/v3/ServiceProvider.java +++ b/src/main/java/org/icatproject/ids/v3/ServiceProvider.java @@ -39,9 +39,9 @@ public static void createInstance(Transmitter transmitter, FiniteStateMachine fs instance = new ServiceProvider(transmitter, fsm, lockManager, reader); } - public static ServiceProvider getInstance() throws InternalException { + public static ServiceProvider getInstance() { if(instance == null) { - throw new InternalException("ServiceProvider is not yet instantiated, please call createInstance at first."); + throw new RuntimeException("ServiceProvider is not yet instantiated, please call createInstance at first."); } return instance; } diff --git a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java index cb5fbbcd..91f52f11 100644 --- a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java +++ b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java @@ -4,5 +4,5 @@ * This enum contains all defined types of requests to this server */ public enum RequestType { - GETDATA, ARCHIVE, GETICATURL, GETDATAFILEIDS, GETSERVICESTATUS, GETSIZE + GETDATA, ARCHIVE, GETICATURL, GETDATAFILEIDS, GETSERVICESTATUS, GETSIZE, GETSTATUS } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetDataFileIdsHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetDataFileIdsHandler.java index 5274e081..dd0a13ee 100644 --- a/src/main/java/org/icatproject/ids/v3/handlers/GetDataFileIdsHandler.java +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetDataFileIdsHandler.java @@ -6,10 +6,10 @@ import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.util.HashMap; -import java.util.Set; +import java.util.Map; +import java.util.SortedMap; import org.icatproject.IcatException_Exception; -import org.icatproject.ids.Prepared; import org.icatproject.ids.StorageUnit; import org.icatproject.ids.exceptions.BadRequestException; import org.icatproject.ids.exceptions.DataNotOnlineException; @@ -18,11 +18,12 @@ import org.icatproject.ids.exceptions.NotFoundException; import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.v3.DataSelectionV3Base; +import org.icatproject.ids.v3.PreparedV3; import org.icatproject.ids.v3.RequestHandlerBase; import org.icatproject.ids.v3.ServiceProvider; import org.icatproject.ids.v3.enums.CallType; import org.icatproject.ids.v3.enums.RequestType; -import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataInfoBase; import org.icatproject.ids.v3.models.ValueContainer; import jakarta.json.Json; @@ -65,7 +66,7 @@ private String getDatafileIds(String preparedId, String ip) validateUUID("preparedId", preparedId); // Do it - Prepared prepared; + PreparedV3 prepared; try (InputStream stream = Files.newInputStream(preparedDir.resolve(preparedId))) { prepared = unpack(stream); } catch (NoSuchFileException e) { @@ -76,14 +77,14 @@ private String getDatafileIds(String preparedId, String ip) final boolean zip = prepared.zip; final boolean compress = prepared.compress; - final Set dfInfos = prepared.dfInfos; + final SortedMap dfInfos = prepared.dfInfos; ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { gen.write("zip", zip); gen.write("compress", compress); gen.writeStartArray("ids"); - for (DataFileInfo dfInfo : dfInfos) { + for (DataInfoBase dfInfo : dfInfos.values()) { gen.write(dfInfo.getId()); } gen.writeEnd().writeEnd().close(); @@ -120,11 +121,11 @@ private String getDatafileIds(String sessionId, String investigationIds, String final DataSelectionV3Base dataSelection = this.getDataSelection(sessionId, investigationIds, datasetIds, datafileIds); // Do it - Set dfInfos = dataSelection.getDfInfo(); + Map dfInfos = dataSelection.getDfInfo(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { gen.writeStartArray("ids"); - for (DataFileInfo dfInfo : dfInfos) { + for (DataInfoBase dfInfo : dfInfos.values()) { gen.write(dfInfo.getId()); } gen.writeEnd().writeEnd().close(); diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java index 5d89e9da..b77c2e6a 100644 --- a/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetDataHandler.java @@ -7,16 +7,15 @@ import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.text.SimpleDateFormat; +import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.Map; -import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.icatproject.IcatException_Exception; -import org.icatproject.ids.Prepared; import org.icatproject.ids.LockManager.Lock; import org.icatproject.ids.LockManager.LockType; import org.icatproject.ids.StorageUnit; @@ -29,13 +28,13 @@ import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.plugin.AlreadyLockedException; import org.icatproject.ids.v3.DataSelectionV3Base; +import org.icatproject.ids.v3.PreparedV3; import org.icatproject.ids.v3.RequestHandlerBase; import org.icatproject.ids.v3.ServiceProvider; import org.icatproject.ids.v3.enums.CallType; import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.helper.SO; -import org.icatproject.ids.v3.models.DataFileInfo; -import org.icatproject.ids.v3.models.DataSetInfo; +import org.icatproject.ids.v3.models.DataInfoBase; import org.icatproject.ids.v3.models.ValueContainer; import jakarta.json.Json; @@ -112,7 +111,7 @@ private Response getData(String preparedId, String outname, final long offset, S validateUUID("preparedId", preparedId); // Do it - Prepared prepared; + PreparedV3 prepared; try (InputStream stream = Files.newInputStream(preparedDir.resolve(preparedId))) { prepared = unpack(stream); } catch (NoSuchFileException e) { @@ -121,11 +120,11 @@ private Response getData(String preparedId, String outname, final long offset, S throw new InternalException(e.getClass() + " " + e.getMessage()); } - DataSelectionV3Base dataSelection = this.getDataSelection((Map) prepared.dsInfos, (Set) prepared.dfInfos, (Set) prepared.emptyDatasets); + DataSelectionV3Base dataSelection = this.getDataSelection(prepared.dsInfos, prepared.dfInfos, prepared.emptyDatasets); final boolean zip = prepared.zip; final boolean compress = prepared.compress; - final Set dfInfos = prepared.dfInfos; - final Map dsInfos = prepared.dsInfos; + final Map dfInfos = prepared.dfInfos; + final Map dsInfos = prepared.dsInfos; Lock lock = null; try { @@ -135,7 +134,7 @@ private Response getData(String preparedId, String outname, final long offset, S if (twoLevel) { dataSelection.checkOnline(); } - checkDatafilesPresent(dfInfos); + checkDatafilesPresent(dfInfos.values()); /* Construct the name to include in the headers */ String name; @@ -143,7 +142,7 @@ private Response getData(String preparedId, String outname, final long offset, S if (zip) { name = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss").format(new Date()) + ".zip"; } else { - name = dfInfos.iterator().next().getDfName(); + name = dfInfos.values().iterator().next().getName(); } } else { if (zip) { @@ -208,8 +207,8 @@ private Response getData(String sessionId, String investigationIds, String datas final DataSelectionV3Base dataSelection = this.getDataSelection(sessionId, investigationIds, datasetIds, datafileIds); // Do it - Map dsInfos = dataSelection.getDsInfo(); - Set dfInfos = dataSelection.getDfInfo(); + Map dsInfos = dataSelection.getDsInfo(); + Map dfInfos = dataSelection.getDfInfo(); Lock lock = null; try { @@ -218,7 +217,7 @@ private Response getData(String sessionId, String investigationIds, String datas if (twoLevel) { dataSelection.checkOnline(); } - checkDatafilesPresent(dfInfos); + checkDatafilesPresent(dfInfos.values()); final boolean finalZip = zip ? true : dataSelection.mustZip(); @@ -228,7 +227,7 @@ private Response getData(String sessionId, String investigationIds, String datas if (finalZip) { name = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss").format(new Date()) + ".zip"; } else { - name = dataSelection.getDfInfo().iterator().next().getDfName(); + name = dataSelection.getDfInfo().values().iterator().next().getName(); } } else { if (finalZip) { @@ -280,14 +279,14 @@ private Response getData(String sessionId, String investigationIds, String datas } } - private void checkDatafilesPresent(Set dfInfos) + private void checkDatafilesPresent(Collection dfInfos) throws NotFoundException, InternalException { var serviceProvider = ServiceProvider.getInstance(); /* Check that datafiles have not been deleted before locking */ int n = 0; StringBuffer sb = new StringBuffer("SELECT COUNT(df) from Datafile df WHERE (df.id in ("); - for (DataFileInfo dfInfo : dfInfos) { + for (DataInfoBase dfInfo : dfInfos) { if (n != 0) { sb.append(','); } diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetSizeHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetSizeHandler.java index 9150d9ae..93ab2d9f 100644 --- a/src/main/java/org/icatproject/ids/v3/handlers/GetSizeHandler.java +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetSizeHandler.java @@ -7,7 +7,7 @@ import java.nio.file.NoSuchFileException; import java.util.HashMap; import java.util.List; -import java.util.Set; +import java.util.Map; import org.icatproject.Datafile; import org.icatproject.IcatException_Exception; @@ -20,11 +20,12 @@ import org.icatproject.ids.exceptions.NotFoundException; import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.v3.DataSelectionV3Base; +import org.icatproject.ids.v3.PreparedV3; import org.icatproject.ids.v3.RequestHandlerBase; import org.icatproject.ids.v3.ServiceProvider; import org.icatproject.ids.v3.enums.CallType; import org.icatproject.ids.v3.enums.RequestType; -import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataInfoBase; import org.icatproject.ids.v3.models.ValueContainer; import jakarta.json.Json; @@ -68,7 +69,7 @@ public long getSize(String preparedId, String ip) validateUUID("preparedId", preparedId); // Do it - Prepared prepared; + PreparedV3 prepared; try (InputStream stream = Files.newInputStream(preparedDir.resolve(preparedId))) { prepared = unpack(stream); } catch (NoSuchFileException e) { @@ -77,7 +78,7 @@ public long getSize(String preparedId, String ip) throw new InternalException(e.getClass() + " " + e.getMessage()); } - final Set dfInfos = prepared.dfInfos; + final Map dfInfos = prepared.dfInfos; // Note that the "fast computation for the simple case" (see the other getSize() implementation) is not // available when calling getSize() with a preparedId. @@ -92,7 +93,7 @@ public long getSize(String preparedId, String ip) StringBuilder sb = new StringBuilder(); int n = 0; - for (DataFileInfo df : dfInfos) { + for (DataInfoBase df : dfInfos.values()) { if (sb.length() != 0) { sb.append(','); } @@ -166,7 +167,7 @@ public long getSize(String sessionId, String investigationIds, String datasetIds StringBuilder sb = new StringBuilder(); int n = 0; - for (DataFileInfo df : dataSelection.getDfInfo()) { + for (DataInfoBase df : dataSelection.getDfInfo().values()) { if (sb.length() != 0) { sb.append(','); } diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetStatusHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetStatusHandler.java new file mode 100644 index 00000000..81ce1763 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetStatusHandler.java @@ -0,0 +1,154 @@ +package org.icatproject.ids.v3.handlers; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.NoSuchFileException; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import org.icatproject.IcatException_Exception; +import org.icatproject.ids.Prepared; +import org.icatproject.ids.Status; +import org.icatproject.ids.StorageUnit; +import org.icatproject.ids.exceptions.BadRequestException; +import org.icatproject.ids.exceptions.DataNotOnlineException; +import org.icatproject.ids.exceptions.InsufficientPrivilegesException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.exceptions.NotFoundException; +import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.DataSelectionV3Base; +import org.icatproject.ids.v3.PreparedV3; +import org.icatproject.ids.v3.RequestHandlerBase; +import org.icatproject.ids.v3.ServiceProvider; +import org.icatproject.ids.v3.enums.CallType; +import org.icatproject.ids.v3.enums.RequestType; +import org.icatproject.ids.v3.models.DataInfoBase; +import org.icatproject.ids.v3.models.ValueContainer; + +import jakarta.json.Json; +import jakarta.json.stream.JsonGenerator; + +public class GetStatusHandler extends RequestHandlerBase { + + public GetStatusHandler() { + super(new StorageUnit[] {StorageUnit.DATAFILE, StorageUnit.DATASET, null}, RequestType.GETSTATUS ); + } + + @Override + public ValueContainer handle(HashMap parameters) + throws BadRequestException, InternalException, InsufficientPrivilegesException, NotFoundException, + DataNotOnlineException, NotImplementedException { + + String preparedId = parameters.get("preparedId").getString(); + String sessionId = parameters.get("sessionId").getString(); + String investigationIds = parameters.get("investigationIds").getString(); + String datasetIds = parameters.get("datasetIds").getString(); + String datafileIds = parameters.get("datafileIds").getString(); + String ip = parameters.get("ip").getString(); + + + if (preparedId != null) { + return new ValueContainer(this.getStatus(preparedId, ip)); + } else { + return new ValueContainer(this.getStatus(sessionId, investigationIds, datasetIds, datafileIds, ip)); + } + } + + + private String getStatus(String preparedId, String ip) + throws BadRequestException, NotFoundException, InsufficientPrivilegesException, InternalException { + + long start = System.currentTimeMillis(); + var serviceProvider = ServiceProvider.getInstance(); + + // Log and validate + logger.info("New webservice request: getSize preparedId = '{}'", preparedId); + validateUUID("preparedId", preparedId); + + // Do it + PreparedV3 prepared; + try (InputStream stream = Files.newInputStream(preparedDir.resolve(preparedId))) { + prepared = unpack(stream); + } catch (NoSuchFileException e) { + throw new NotFoundException("The preparedId " + preparedId + " is not known"); + } catch (IOException e) { + throw new InternalException(e.getClass() + " " + e.getMessage()); + } + + final Map dfInfos = prepared.dfInfos; + final Map dsInfos = prepared.dsInfos; + Set emptyDatasets = prepared.emptyDatasets; + + // Do it + var dataSelection = this.getDataSelection(dsInfos, dfInfos, emptyDatasets); + Status status = dataSelection.getStatus(); + + logger.debug("Status is " + status.name()); + + if (serviceProvider.getLogSet().contains(CallType.INFO)) { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { + gen.write("preparedId", preparedId); + gen.writeEnd(); + } + String body = baos.toString(); + serviceProvider.getTransmitter().processMessage("getStatus", ip, body, start); + } + + return status.name(); + + } + + + private String getStatus(String sessionId, String investigationIds, String datasetIds, String datafileIds, String ip) + throws BadRequestException, NotFoundException, InsufficientPrivilegesException, InternalException, NotImplementedException { + + long start = System.currentTimeMillis(); + var serviceProvider = ServiceProvider.getInstance(); + + // Log and validate + logger.info( + String.format("New webservice request: getStatus investigationIds=%s, datasetIds=%s, datafileIds=%s", + investigationIds, datasetIds, datafileIds)); + + if (sessionId == null) { + try { + sessionId = serviceProvider.getIcatReader().getSessionId(); + } catch (IcatException_Exception e) { + throw new InternalException(e.getFaultInfo().getType() + " " + e.getMessage()); + } + } else { + validateUUID("sessionId", sessionId); + } + + // Do it + DataSelectionV3Base dataSelection = this.getDataSelection(sessionId, investigationIds, datasetIds, datafileIds); + Status status = dataSelection.getStatus(); + + + logger.debug("Status is " + status.name()); + + if (serviceProvider.getLogSet().contains(CallType.INFO)) { + try { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { + if (sessionId != null) { + gen.write("userName", serviceProvider.getIcat().getUserName(sessionId)); + } + addIds(gen, investigationIds, datasetIds, datafileIds); + gen.writeEnd(); + } + String body = baos.toString(); + serviceProvider.getTransmitter().processMessage("getStatus", ip, body, start); + } catch (IcatException_Exception e) { + logger.error("Failed to prepare jms message " + e.getClass() + " " + e.getMessage()); + } + } + + return status.name(); + + } +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/helper/SO.java b/src/main/java/org/icatproject/ids/v3/helper/SO.java index 2badb253..fb01b92a 100644 --- a/src/main/java/org/icatproject/ids/v3/helper/SO.java +++ b/src/main/java/org/icatproject/ids/v3/helper/SO.java @@ -5,17 +5,17 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.util.List; import java.util.Map; -import java.util.Set; import java.util.zip.ZipEntry; import java.util.zip.ZipException; import java.util.zip.ZipOutputStream; import org.icatproject.ids.RangeOutputStream; import org.icatproject.ids.LockManager.Lock; -import org.icatproject.ids.plugin.DsInfo; import org.icatproject.ids.v3.ServiceProvider; import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataInfoBase; import org.icatproject.ids.v3.models.DataSetInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -28,10 +28,10 @@ public class SO implements StreamingOutput { private long offset; private boolean zip; - private Map dsInfos; + private Map dsInfos; private Lock lock; private boolean compress; - private Set dfInfos; + private Map dfInfos; private String ip; private long start; private Long transferId; @@ -40,7 +40,7 @@ public class SO implements StreamingOutput { private static final int BUFSIZ = 2048; private final static Logger logger = LoggerFactory.getLogger(SO.class); - public SO(Map dsInfos, Set dfInfos, long offset, boolean zip, boolean compress, + public SO(Map dsInfos, Map dfInfos, long offset, boolean zip, boolean compress, Lock lock, Long transferId, String ip, long start, ServiceProvider serviceProvider) { this.offset = offset; this.zip = zip; @@ -69,15 +69,16 @@ public void write(OutputStream output) throws IOException { zos.setLevel(0); // Otherwise use default compression } - for (DataFileInfo dfInfo : dfInfos) { + for ( DataInfoBase dataInfo : dfInfos.values()) { + var dfInfo = (DataFileInfo) dataInfo; logger.debug("Adding " + dfInfo + " to zip"); transfer = dfInfo; - DsInfo dsInfo = dsInfos.get(dfInfo.getDsId()); - String entryName = this.serviceProvider.getPropertyHandler().getZipMapper().getFullEntryName(dsInfo, dfInfo); + DataInfoBase dsInfo = dsInfos.get(dfInfo.getDsId() ); + String entryName = this.serviceProvider.getPropertyHandler().getZipMapper().getFullEntryName((DataSetInfo)dsInfo, (DataFileInfo)dfInfo); InputStream stream = null; try { zos.putNextEntry(new ZipEntry(entryName)); - stream = this.serviceProvider.getMainStorage().get(dfInfo.getDfLocation(), dfInfo.getCreateId(), dfInfo.getModId()); + stream = this.serviceProvider.getMainStorage().get(dfInfo.getLocation(), dfInfo.getCreateId(), dfInfo.getModId()); int length; while ((length = stream.read(bytes)) >= 0) { zos.write(bytes, 0, length); @@ -92,7 +93,7 @@ public void write(OutputStream output) throws IOException { } zos.close(); } else { - DataFileInfo dfInfo = dfInfos.iterator().next(); + DataFileInfo dfInfo = (DataFileInfo) dfInfos.values().iterator().next(); transfer = dfInfo; InputStream stream = this.serviceProvider.getMainStorage().get(dfInfo.getDfLocation(), dfInfo.getCreateId(), dfInfo.getModId()); From bc105492bf327321b77b9a61d27c27f4a2a4c31b Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Tue, 27 Feb 2024 13:16:55 +0100 Subject: [PATCH 20/92] Removed no longer needed methods from IdsBean --- .../java/org/icatproject/ids/IdsBean.java | 545 ------------------ .../ids/v3/RequestHandlerBase.java | 1 - .../icatproject/ids/v3/ServiceProvider.java | 1 - .../ids/v3/handlers/GetSizeHandler.java | 1 - .../ids/v3/handlers/GetStatusHandler.java | 1 - 5 files changed, 549 deletions(-) diff --git a/src/main/java/org/icatproject/ids/IdsBean.java b/src/main/java/org/icatproject/ids/IdsBean.java index 4f9fc245..59975cc5 100644 --- a/src/main/java/org/icatproject/ids/IdsBean.java +++ b/src/main/java/org/icatproject/ids/IdsBean.java @@ -70,7 +70,6 @@ import org.icatproject.ids.plugin.AlreadyLockedException; import org.icatproject.ids.plugin.ArchiveStorageInterface; import org.icatproject.ids.plugin.MainStorageInterface; -import org.icatproject.ids.plugin.ZipMapperInterface; import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; import org.icatproject.ids.v3.enums.CallType; import org.icatproject.ids.v3.enums.DeferredOp; @@ -375,8 +374,6 @@ public static void validateUUID(String thing, String id) throws BadRequestExcept private boolean readOnly; - private Set rootUserNames; - private StorageUnit storageUnit; private boolean twoLevel; @@ -417,53 +414,6 @@ private void addIds(JsonGenerator gen, String investigationIds, String datasetId } } - public void archive(String sessionId, String investigationIds, String datasetIds, String datafileIds, String ip) - throws NotImplementedException, BadRequestException, InsufficientPrivilegesException, InternalException, - NotFoundException { - - long start = System.currentTimeMillis(); - - // Log and validate - logger.info("New webservice request: archive " + "investigationIds='" + investigationIds + "' " + "datasetIds='" - + datasetIds + "' " + "datafileIds='" + datafileIds + "'"); - - validateUUID("sessionId", sessionId); - - // Do it - if (storageUnit == StorageUnit.DATASET) { - DataSelection dataSelection = new DataSelection(propertyHandler, reader, sessionId, - investigationIds, datasetIds, datafileIds, Returns.DATASETS); - Map dsInfos = dataSelection.getDsInfo(); - for (DataSetInfo dsInfo : dsInfos.values()) { - fsm.queue(dsInfo, DeferredOp.ARCHIVE); - } - } else if (storageUnit == StorageUnit.DATAFILE) { - DataSelection dataSelection = new DataSelection(propertyHandler, reader, sessionId, - investigationIds, datasetIds, datafileIds, Returns.DATAFILES); - Set dfInfos = dataSelection.getDfInfo(); - for (DataFileInfo dfInfo : dfInfos) { - fsm.queue(dfInfo, DeferredOp.ARCHIVE); - } - } else { - throw new NotImplementedException("This operation is unavailable for single level storage"); - } - - if (logSet.contains(CallType.MIGRATE)) { - try { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { - gen.write("userName", icat.getUserName(sessionId)); - addIds(gen, investigationIds, datasetIds, datafileIds); - gen.writeEnd(); - } - String body = baos.toString(); - transmitter.processMessage("archive", ip, body, start); - } catch (IcatException_Exception e) { - logger.error("Failed to prepare jms message " + e.getClass() + " " + e.getMessage()); - } - } - } - public void delete(String sessionId, String investigationIds, String datasetIds, String datafileIds, String ip) throws NotImplementedException, BadRequestException, InsufficientPrivilegesException, InternalException, NotFoundException, DataNotOnlineException { @@ -573,499 +523,6 @@ public void delete(String sessionId, String investigationIds, String datasetIds, } } - public String getDatafileIds(String preparedId, String ip) - throws BadRequestException, InternalException, NotFoundException { - - long start = System.currentTimeMillis(); - - // Log and validate - logger.info("New webservice request: getDatafileIds preparedId = '" + preparedId); - - validateUUID("preparedId", preparedId); - - // Do it - Prepared prepared; - try (InputStream stream = Files.newInputStream(preparedDir.resolve(preparedId))) { - prepared = unpack(stream); - } catch (NoSuchFileException e) { - throw new NotFoundException("The preparedId " + preparedId + " is not known"); - } catch (IOException e) { - throw new InternalException(e.getClass() + " " + e.getMessage()); - } - - final boolean zip = prepared.zip; - final boolean compress = prepared.compress; - final Set dfInfos = prepared.dfInfos; - - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { - gen.write("zip", zip); - gen.write("compress", compress); - gen.writeStartArray("ids"); - for (DataFileInfo dfInfo : dfInfos) { - gen.write(dfInfo.getId()); - } - gen.writeEnd().writeEnd().close(); - } - String resp = baos.toString(); - - if (logSet.contains(CallType.INFO)) { - baos = new ByteArrayOutputStream(); - try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { - gen.write("preparedId", preparedId); - gen.writeEnd(); - } - transmitter.processMessage("getDatafileIds", ip, baos.toString(), start); - } - - return resp; - } - - public String getDatafileIds(String sessionId, String investigationIds, String datasetIds, String datafileIds, - String ip) - throws BadRequestException, NotFoundException, InsufficientPrivilegesException, InternalException { - - long start = System.currentTimeMillis(); - - // Log and validate - logger.info(String.format( - "New webservice request: getDatafileIds investigationIds=%s, datasetIds=%s, datafileIds=%s", - investigationIds, datasetIds, datafileIds)); - - validateUUID("sessionId", sessionId); - - final DataSelection dataSelection = new DataSelection(propertyHandler, reader, sessionId, - investigationIds, datasetIds, datafileIds, Returns.DATAFILES); - - // Do it - Set dfInfos = dataSelection.getDfInfo(); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { - gen.writeStartArray("ids"); - for (DataFileInfo dfInfo : dfInfos) { - gen.write(dfInfo.getId()); - } - gen.writeEnd().writeEnd().close(); - } - String resp = baos.toString(); - - if (logSet.contains(CallType.INFO)) { - baos = new ByteArrayOutputStream(); - try { - try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { - gen.write("userName", icat.getUserName(sessionId)); - addIds(gen, investigationIds, datasetIds, datafileIds); - gen.writeEnd(); - } - transmitter.processMessage("getDatafileIds", ip, baos.toString(), start); - } catch (IcatException_Exception e) { - logger.error("Failed to prepare jms message " + e.getClass() + " " + e.getMessage()); - } - } - - return resp; - - } - - public String getIcatUrl(String ip) { - if (logSet.contains(CallType.INFO)) { - transmitter.processMessage("getIcatUrl", ip, "{}", System.currentTimeMillis()); - } - return propertyHandler.getIcatUrl(); - } - - public String getServiceStatus(String sessionId, String ip) - throws InternalException, InsufficientPrivilegesException { - - long start = System.currentTimeMillis(); - - // Log and validate - logger.info("New webservice request: getServiceStatus"); - - try { - String uname = icat.getUserName(sessionId); - if (!rootUserNames.contains(uname)) { - throw new InsufficientPrivilegesException(uname + " is not included in the ids rootUserNames set."); - } - } catch (IcatException_Exception e) { - IcatExceptionType type = e.getFaultInfo().getType(); - if (type == IcatExceptionType.SESSION) { - throw new InsufficientPrivilegesException(e.getClass() + " " + e.getMessage()); - } - throw new InternalException(e.getClass() + " " + e.getMessage()); - } - - if (logSet.contains(CallType.INFO)) { - try { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { - gen.write("userName", icat.getUserName(sessionId)); - gen.writeEnd(); - } - String body = baos.toString(); - transmitter.processMessage("getServiceStatus", ip, body, start); - } catch (IcatException_Exception e) { - logger.error("Failed to prepare jms message " + e.getClass() + " " + e.getMessage()); - } - } - - return fsm.getServiceStatus(); - } - - public long getSize(String preparedId, String ip) - throws BadRequestException, NotFoundException, InsufficientPrivilegesException, InternalException { - - long start = System.currentTimeMillis(); - - // Log and validate - logger.info("New webservice request: getSize preparedId = '{}'", preparedId); - validateUUID("preparedId", preparedId); - - // Do it - Prepared prepared; - try (InputStream stream = Files.newInputStream(preparedDir.resolve(preparedId))) { - prepared = unpack(stream); - } catch (NoSuchFileException e) { - throw new NotFoundException("The preparedId " + preparedId + " is not known"); - } catch (IOException e) { - throw new InternalException(e.getClass() + " " + e.getMessage()); - } - - final Set dfInfos = prepared.dfInfos; - - // Note that the "fast computation for the simple case" (see the other getSize() implementation) is not - // available when calling getSize() with a preparedId. - logger.debug("Slow computation for normal case"); - String sessionId; - try { - sessionId = reader.getSessionId(); - } catch (IcatException_Exception e) { - throw new InternalException(e.getFaultInfo().getType() + " " + e.getMessage()); - } - long size = 0; - - StringBuilder sb = new StringBuilder(); - int n = 0; - for (DataFileInfo df : dfInfos) { - if (sb.length() != 0) { - sb.append(','); - } - sb.append(df.getId()); - if (n++ == 500) { - size += getSizeFor(sessionId, sb); - sb = new StringBuilder(); - n = 0; - } - } - if (n > 0) { - size += getSizeFor(sessionId, sb); - } - - if (logSet.contains(CallType.INFO)) { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { - gen.write("preparedId", preparedId); - gen.writeEnd(); - } - String body = baos.toString(); - transmitter.processMessage("getSize", ip, body, start); - } - - return size; - } - - - public long getSize(String sessionId, String investigationIds, String datasetIds, String datafileIds, String ip) - throws BadRequestException, NotFoundException, InsufficientPrivilegesException, InternalException { - - long start = System.currentTimeMillis(); - - // Log and validate - logger.info(String.format("New webservice request: getSize investigationIds=%s, datasetIds=%s, datafileIds=%s", - investigationIds, datasetIds, datafileIds)); - - validateUUID("sessionId", sessionId); - - List dfids = DataSelection.getValidIds("datafileIds", datafileIds); - List dsids = DataSelection.getValidIds("datasetIds", datasetIds); - List invids = DataSelection.getValidIds("investigationIds", investigationIds); - - long size = 0; - if (dfids.size() + dsids.size() + invids.size() == 1) { - size = getSizeFor(sessionId, invids, "df.dataset.investigation.id") - + getSizeFor(sessionId, dsids, "df.dataset.id") + getSizeFor(sessionId, dfids, "df.id"); - logger.debug("Fast computation for simple case"); - if (size == 0) { - try { - if (dfids.size() != 0) { - Datafile datafile = (Datafile) icat.get(sessionId, "Datafile", dfids.get(0)); - if (datafile.getLocation() == null) { - throw new NotFoundException("Datafile not found"); - } - } - if (dsids.size() != 0) { - icat.get(sessionId, "Dataset", dsids.get(0)); - } - if (invids.size() != 0) { - icat.get(sessionId, "Investigation", invids.get(0)); - } - } catch (IcatException_Exception e) { - throw new NotFoundException(e.getMessage()); - } - } - } else { - logger.debug("Slow computation for normal case"); - final DataSelection dataSelection = new DataSelection(propertyHandler, reader, sessionId, - investigationIds, datasetIds, datafileIds, Returns.DATASETS_AND_DATAFILES); - - StringBuilder sb = new StringBuilder(); - int n = 0; - for (DataFileInfo df : dataSelection.getDfInfo()) { - if (sb.length() != 0) { - sb.append(','); - } - sb.append(df.getId()); - if (n++ == 500) { - size += getSizeFor(sessionId, sb); - sb = new StringBuilder(); - n = 0; - } - } - if (n > 0) { - size += getSizeFor(sessionId, sb); - } - } - - if (logSet.contains(CallType.INFO)) { - try { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { - gen.write("userName", icat.getUserName(sessionId)); - addIds(gen, investigationIds, datasetIds, datafileIds); - gen.writeEnd(); - } - String body = baos.toString(); - transmitter.processMessage("getSize", ip, body, start); - } catch (IcatException_Exception e) { - logger.error("Failed to prepare jms message " + e.getClass() + " " + e.getMessage()); - } - } - - return size; - } - - private long getSizeFor(String sessionId, List ids, String where) throws InternalException { - - long size = 0; - if (ids != null) { - - StringBuilder sb = new StringBuilder(); - int n = 0; - for (Long id : ids) { - if (sb.length() != 0) { - sb.append(','); - } - sb.append(id); - if (n++ == 500) { - size += evalSizeFor(sessionId, where, sb); - sb = new StringBuilder(); - n = 0; - } - } - if (n > 0) { - size += evalSizeFor(sessionId, where, sb); - } - } - return size; - } - - private long getSizeFor(String sessionId, StringBuilder sb) throws InternalException { - String query = "SELECT SUM(df.fileSize) from Datafile df WHERE df.id IN (" + sb.toString() + ") AND df.location IS NOT NULL"; - try { - return (Long) icat.search(sessionId, query).get(0); - } catch (IcatException_Exception e) { - throw new InternalException(e.getClass() + " " + e.getMessage()); - } catch (IndexOutOfBoundsException e) { - return 0L; - } - } - - private long evalSizeFor(String sessionId, String where, StringBuilder sb) throws InternalException { - String query = "SELECT SUM(df.fileSize) from Datafile df WHERE " + where + " IN (" + sb.toString() + ") AND df.location IS NOT NULL"; - logger.debug("icat query for size: {}", query); - try { - return (Long) icat.search(sessionId, query).get(0); - } catch (IcatException_Exception e) { - throw new InternalException(e.getClass() + " " + e.getMessage()); - } catch (IndexOutOfBoundsException e) { - return 0L; - } - } - - public String getStatus(String preparedId, String ip) - throws BadRequestException, NotFoundException, InsufficientPrivilegesException, InternalException { - - long start = System.currentTimeMillis(); - - // Log and validate - logger.info("New webservice request: getSize preparedId = '{}'", preparedId); - validateUUID("preparedId", preparedId); - - // Do it - Prepared prepared; - try (InputStream stream = Files.newInputStream(preparedDir.resolve(preparedId))) { - prepared = unpack(stream); - } catch (NoSuchFileException e) { - throw new NotFoundException("The preparedId " + preparedId + " is not known"); - } catch (IOException e) { - throw new InternalException(e.getClass() + " " + e.getMessage()); - } - - final Set dfInfos = prepared.dfInfos; - final Map dsInfos = prepared.dsInfos; - Set emptyDatasets = prepared.emptyDatasets; - - Status status = Status.ONLINE; - - if (storageUnit == StorageUnit.DATASET) { - Set restoring = fsm.getRestoring(); - Set maybeOffline = fsm.getMaybeOffline(); - for (DataSetInfo dsInfo : dsInfos.values()) { - fsm.checkFailure(dsInfo.getId()); - if (restoring.contains(dsInfo)) { - status = Status.RESTORING; - } else if (maybeOffline.contains(dsInfo)) { - status = Status.ARCHIVED; - break; - } else if (!emptyDatasets.contains(dsInfo.getId()) && !mainStorage.exists(dsInfo)) { - status = Status.ARCHIVED; - break; - } - } - } else if (storageUnit == StorageUnit.DATAFILE) { - Set restoring = fsm.getRestoring(); - Set maybeOffline = fsm.getMaybeOffline(); - for (DataFileInfo dfInfo : dfInfos) { - fsm.checkFailure(dfInfo.getId()); - if (restoring.contains(dfInfo)) { - status = Status.RESTORING; - } else if (maybeOffline.contains(dfInfo)) { - status = Status.ARCHIVED; - break; - } else if (!mainStorage.exists(dfInfo.getDfLocation())) { - status = Status.ARCHIVED; - break; - } - } - } - - logger.debug("Status is " + status.name()); - - if (logSet.contains(CallType.INFO)) { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { - gen.write("preparedId", preparedId); - gen.writeEnd(); - } - String body = baos.toString(); - transmitter.processMessage("getStatus", ip, body, start); - } - - return status.name(); - - } - - public String getStatus(String sessionId, String investigationIds, String datasetIds, String datafileIds, String ip) - throws BadRequestException, NotFoundException, InsufficientPrivilegesException, InternalException { - - long start = System.currentTimeMillis(); - - // Log and validate - logger.info( - String.format("New webservice request: getStatus investigationIds=%s, datasetIds=%s, datafileIds=%s", - investigationIds, datasetIds, datafileIds)); - - if (sessionId == null) { - try { - sessionId = reader.getSessionId(); - } catch (IcatException_Exception e) { - throw new InternalException(e.getFaultInfo().getType() + " " + e.getMessage()); - } - } else { - validateUUID("sessionId", sessionId); - } - - // Do it - Status status = Status.ONLINE; - - if (storageUnit == StorageUnit.DATASET) { - DataSelection dataSelection = new DataSelection(propertyHandler, reader, sessionId, - investigationIds, datasetIds, datafileIds, Returns.DATASETS); - Map dsInfos = dataSelection.getDsInfo(); - - Set restoring = fsm.getRestoring(); - Set maybeOffline = fsm.getMaybeOffline(); - Set emptyDatasets = dataSelection.getEmptyDatasets(); - for (DataSetInfo dsInfo : dsInfos.values()) { - fsm.checkFailure(dsInfo.getId()); - if (restoring.contains(dsInfo)) { - status = Status.RESTORING; - } else if (maybeOffline.contains(dsInfo)) { - status = Status.ARCHIVED; - break; - } else if (!emptyDatasets.contains(dsInfo.getId()) && !mainStorage.exists(dsInfo)) { - status = Status.ARCHIVED; - break; - } - } - } else if (storageUnit == StorageUnit.DATAFILE) { - DataSelection dataSelection = new DataSelection(propertyHandler, reader, sessionId, - investigationIds, datasetIds, datafileIds, Returns.DATAFILES); - Set dfInfos = dataSelection.getDfInfo(); - - Set restoring = fsm.getRestoring(); - Set maybeOffline = fsm.getMaybeOffline(); - for (DataFileInfo dfInfo : dfInfos) { - fsm.checkFailure(dfInfo.getId()); - if (restoring.contains(dfInfo)) { - status = Status.RESTORING; - } else if (maybeOffline.contains(dfInfo)) { - status = Status.ARCHIVED; - break; - } else if (!mainStorage.exists(dfInfo.getDfLocation())) { - status = Status.ARCHIVED; - break; - } - } - } else { - // Throw exception if selection does not exist - new DataSelection(propertyHandler, reader, sessionId, - investigationIds, datasetIds, datafileIds, Returns.DATASETS); - } - - logger.debug("Status is " + status.name()); - - if (logSet.contains(CallType.INFO)) { - try { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { - if (sessionId != null) { - gen.write("userName", icat.getUserName(sessionId)); - } - addIds(gen, investigationIds, datasetIds, datafileIds); - gen.writeEnd(); - } - String body = baos.toString(); - transmitter.processMessage("getStatus", ip, body, start); - } catch (IcatException_Exception e) { - logger.error("Failed to prepare jms message " + e.getClass() + " " + e.getMessage()); - } - } - - return status.name(); - - } - @PostConstruct private void init() { try { @@ -1081,7 +538,6 @@ private void init() { preparedDir = propertyHandler.getCacheDir().resolve("prepared"); Files.createDirectories(preparedDir); - rootUserNames = propertyHandler.getRootUserNames(); readOnly = propertyHandler.getReadOnly(); enableWrite = propertyHandler.getEnableWrite(); @@ -1754,7 +1210,6 @@ public void restore(String sessionId, String investigationIds, String datasetIds } } - @SuppressWarnings("unchecked") public void write(String sessionId, String investigationIds, String datasetIds, String datafileIds, String ip) throws NotImplementedException, BadRequestException, InsufficientPrivilegesException, InternalException, NotFoundException, DataNotOnlineException { diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java index 74a852f7..fe5c1ba0 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java @@ -28,7 +28,6 @@ import jakarta.json.stream.JsonGenerator; import org.icatproject.ids.DataSelection; -import org.icatproject.ids.Prepared; import org.icatproject.ids.StorageUnit; import org.icatproject.ids.exceptions.BadRequestException; import org.icatproject.ids.exceptions.DataNotOnlineException; diff --git a/src/main/java/org/icatproject/ids/v3/ServiceProvider.java b/src/main/java/org/icatproject/ids/v3/ServiceProvider.java index 1880942c..ab39a989 100644 --- a/src/main/java/org/icatproject/ids/v3/ServiceProvider.java +++ b/src/main/java/org/icatproject/ids/v3/ServiceProvider.java @@ -7,7 +7,6 @@ import org.icatproject.ids.LockManager; import org.icatproject.ids.PropertyHandler; import org.icatproject.ids.Transmitter; -import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; import org.icatproject.ids.v3.enums.CallType; diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetSizeHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetSizeHandler.java index 93ab2d9f..8cfa2033 100644 --- a/src/main/java/org/icatproject/ids/v3/handlers/GetSizeHandler.java +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetSizeHandler.java @@ -11,7 +11,6 @@ import org.icatproject.Datafile; import org.icatproject.IcatException_Exception; -import org.icatproject.ids.Prepared; import org.icatproject.ids.StorageUnit; import org.icatproject.ids.exceptions.BadRequestException; import org.icatproject.ids.exceptions.DataNotOnlineException; diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetStatusHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetStatusHandler.java index 81ce1763..f1aa2f99 100644 --- a/src/main/java/org/icatproject/ids/v3/handlers/GetStatusHandler.java +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetStatusHandler.java @@ -10,7 +10,6 @@ import java.util.Set; import org.icatproject.IcatException_Exception; -import org.icatproject.ids.Prepared; import org.icatproject.ids.Status; import org.icatproject.ids.StorageUnit; import org.icatproject.ids.exceptions.BadRequestException; From 484fffc7f8e017a0f296b3e8cc116c5b60d4e452 Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Tue, 27 Feb 2024 14:57:50 +0100 Subject: [PATCH 21/92] IsPreparedHandler #1 --- .../java/org/icatproject/ids/IdsBean.java | 1 + .../java/org/icatproject/ids/IdsService.java | 14 +- .../DataSelectionForSingleLevelStorage.java | 8 +- .../DataSelectionForStorageUnitDatafile.java | 28 +-- .../DataSelectionForStorageUnitDataset.java | 29 +-- .../ids/v3/DataSelectionV3Base.java | 35 ++- .../ids/v3/RequestHandlerBase.java | 6 + .../ids/v3/RequestHandlerService.java | 2 + .../icatproject/ids/v3/enums/RequestType.java | 2 +- .../ids/v3/handlers/IsPreparedHandler.java | 200 ++++++++++++++++++ 10 files changed, 255 insertions(+), 70 deletions(-) create mode 100644 src/main/java/org/icatproject/ids/v3/handlers/IsPreparedHandler.java diff --git a/src/main/java/org/icatproject/ids/IdsBean.java b/src/main/java/org/icatproject/ids/IdsBean.java index 59975cc5..55bd7406 100644 --- a/src/main/java/org/icatproject/ids/IdsBean.java +++ b/src/main/java/org/icatproject/ids/IdsBean.java @@ -538,6 +538,7 @@ private void init() { preparedDir = propertyHandler.getCacheDir().resolve("prepared"); Files.createDirectories(preparedDir); + readOnly = propertyHandler.getReadOnly(); enableWrite = propertyHandler.getEnableWrite(); diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index a7c92722..e90c6c2a 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -422,6 +422,9 @@ private void init() { * @throws BadRequestException * @throws NotFoundException * @throws InternalException + * @throws NotImplementedException + * @throws DataNotOnlineException + * @throws InsufficientPrivilegesException * @summary isPrepared * @statuscode 200 To indicate success */ @@ -429,8 +432,15 @@ private void init() { @Path("isPrepared") @Produces(MediaType.TEXT_PLAIN) public boolean isPrepared(@Context HttpServletRequest request, @QueryParam("preparedId") String preparedId) - throws BadRequestException, NotFoundException, InternalException { - return idsBean.isPrepared(preparedId, request.getRemoteAddr()); + throws BadRequestException, NotFoundException, InternalException, InsufficientPrivilegesException, DataNotOnlineException, NotImplementedException { + + var parameters = new HashMap(); + parameters.put("preparedId", new ValueContainer(preparedId)); + parameters.put("ip", new ValueContainer(request.getRemoteAddr())); + + return this.requestService.handle(RequestType.ISPREPARED, parameters).getBool(); + + //return idsBean.isPrepared(preparedId, request.getRemoteAddr()); } /** diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java index b925ce7d..9a98276d 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java @@ -22,12 +22,6 @@ protected DataSelectionForSingleLevelStorage(Map dsInfos, Ma } - @Override - public void checkOnline() throws InternalException, DataNotOnlineException { - // nothing to do here for single level storage - } - - @Override protected void scheduleTask(DeferredOp operation) throws NotImplementedException, InternalException { @@ -36,7 +30,7 @@ protected void scheduleTask(DeferredOp operation) throws NotImplementedException @Override - protected Collection getDataInfosForStatusCheck() { + public Collection getPrimaryDataInfos() { return new ArrayList(); } diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java index 19cabdb0..8a2dd1e1 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java @@ -20,32 +20,6 @@ protected DataSelectionForStorageUnitDatafile(Map dsInfos, M super(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids, requestType); } - @Override - public void checkOnline()throws InternalException, DataNotOnlineException { - - boolean maybeOffline = false; - for (DataInfoBase dfInfo : dfInfos.values()) { - if (this.restoreIfOffline(dfInfo)) { - maybeOffline = true; - } - } - if (maybeOffline) { - throw new DataNotOnlineException( - "Before getting a datafile, it must be restored, restoration requested automatically"); - } - } - - public boolean restoreIfOffline(DataInfoBase dfInfo) throws InternalException { - boolean maybeOffline = false; - var serviceProvider = ServiceProvider.getInstance(); - if (serviceProvider.getFsm().getMaybeOffline().contains(dfInfo)) { - maybeOffline = true; - } else if (!serviceProvider.getMainStorage().exists(dfInfo.getLocation())) { - serviceProvider.getFsm().queue(dfInfo, DeferredOp.RESTORE); - maybeOffline = true; - } - return maybeOffline; - } @Override protected void scheduleTask(DeferredOp operation) throws NotImplementedException, InternalException { @@ -56,7 +30,7 @@ protected void scheduleTask(DeferredOp operation) throws NotImplementedException } @Override - protected Collection getDataInfosForStatusCheck() { + public Collection getPrimaryDataInfos() { return this.dfInfos.values(); } diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java index f965fc0e..8a31195e 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java @@ -21,33 +21,6 @@ protected DataSelectionForStorageUnitDataset(Map dsInfos, Ma super(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids, requestType); } - @Override - public void checkOnline() throws InternalException, DataNotOnlineException { - - boolean maybeOffline = false; - for (DataInfoBase dsInfo : dsInfos.values()) { - if (this.restoreIfOffline(dsInfo, emptyDatasets)) { - maybeOffline = true; - } - } - if (maybeOffline) { - throw new DataNotOnlineException( - "Before putting, getting or deleting a datafile, its dataset has to be restored, restoration requested automatically"); - } - } - - - public boolean restoreIfOffline(DataInfoBase dsInfo, Set emptyDatasets) throws InternalException { - boolean maybeOffline = false; - var serviceProvider = ServiceProvider.getInstance(); - if (serviceProvider.getFsm().getMaybeOffline().contains(dsInfo)) { - maybeOffline = true; - } else if (!emptyDatasets.contains(dsInfo.getId()) && !serviceProvider.getMainStorage().exists((DataSetInfo) dsInfo)) { //TODO: casting to DataSetInfo save? - serviceProvider.getFsm().queue(dsInfo, DeferredOp.RESTORE); - maybeOffline = true; - } - return maybeOffline; - } @Override protected void scheduleTask(DeferredOp operation) throws NotImplementedException, InternalException { @@ -58,7 +31,7 @@ protected void scheduleTask(DeferredOp operation) throws NotImplementedException } @Override - protected Collection getDataInfosForStatusCheck() { + public Collection getPrimaryDataInfos() { return this.dsInfos.values(); } diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java b/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java index 3923f213..cf076bd7 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java @@ -44,15 +44,13 @@ protected DataSelectionV3Base(Map dsInfos, Map getDataInfosForStatusCheck(); + public abstract Collection getPrimaryDataInfos(); protected abstract boolean existsInMainStorage(DataInfoBase dataInfo) throws InternalException; @@ -125,7 +123,7 @@ public Status getStatus() throws InternalException { Set restoring = serviceProvider.getFsm().getRestoring(); Set maybeOffline = serviceProvider.getFsm().getMaybeOffline(); - for (DataInfoBase dataInfo : this.getDataInfosForStatusCheck()) { + for (DataInfoBase dataInfo : this.getPrimaryDataInfos()) { serviceProvider.getFsm().checkFailure(dataInfo.getId()); if (restoring.contains(dataInfo)) { status = Status.RESTORING; @@ -141,4 +139,31 @@ public Status getStatus() throws InternalException { return status; } + public boolean restoreIfOffline(DataInfoBase dataInfo) throws InternalException { + boolean maybeOffline = false; + var serviceProvider = ServiceProvider.getInstance(); + if (serviceProvider.getFsm().getMaybeOffline().contains(dataInfo)) { + maybeOffline = true; + } else if (!this.existsInMainStorage(dataInfo)) { + serviceProvider.getFsm().queue(dataInfo, DeferredOp.RESTORE); + maybeOffline = true; + } + return maybeOffline; + } + + + public void checkOnline()throws InternalException, DataNotOnlineException { + + boolean maybeOffline = false; + for (DataInfoBase dfInfo : this.getPrimaryDataInfos()) { + if (this.restoreIfOffline(dfInfo)) { + maybeOffline = true; + } + } + if (maybeOffline) { + throw new DataNotOnlineException( + "Before getting, putting, etc. a datafile or dataset, it must be restored, restoration requested automatically"); + } + } + } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java index fe5c1ba0..da79af04 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java @@ -10,6 +10,8 @@ import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; import java.util.regex.Pattern; import org.icatproject.ids.v3.enums.RequestType; @@ -45,6 +47,8 @@ public abstract class RequestHandlerBase { protected StorageUnit storageUnit; protected RequestType requestType; + protected ExecutorService threadPool; + /** * matches standard UUID format of 8-4-4-4-12 hexadecimal digits */ @@ -97,6 +101,8 @@ public void init() throws InternalException { var archiveStorage = propertyHandler.getArchiveStorage(); this.twoLevel = archiveStorage != null; + this.threadPool = Executors.newCachedThreadPool(); + //logger.info("RequestHandlerBase initialized"); } diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java index 58a2703f..810290ac 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java @@ -24,6 +24,7 @@ import org.icatproject.ids.v3.handlers.GetServiceStatusHandler; import org.icatproject.ids.v3.handlers.GetSizeHandler; import org.icatproject.ids.v3.handlers.GetStatusHandler; +import org.icatproject.ids.v3.handlers.IsPreparedHandler; import org.icatproject.ids.v3.models.ValueContainer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -57,6 +58,7 @@ public RequestHandlerService() { this.registerHandler(new GetServiceStatusHandler()); this.registerHandler(new GetSizeHandler()); this.registerHandler(new GetStatusHandler()); + this.registerHandler(new IsPreparedHandler()); } diff --git a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java index 91f52f11..0e45739b 100644 --- a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java +++ b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java @@ -4,5 +4,5 @@ * This enum contains all defined types of requests to this server */ public enum RequestType { - GETDATA, ARCHIVE, GETICATURL, GETDATAFILEIDS, GETSERVICESTATUS, GETSIZE, GETSTATUS + GETDATA, ARCHIVE, GETICATURL, GETDATAFILEIDS, GETSERVICESTATUS, GETSIZE, GETSTATUS, ISPREPARED } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/handlers/IsPreparedHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/IsPreparedHandler.java new file mode 100644 index 00000000..c0faac00 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/handlers/IsPreparedHandler.java @@ -0,0 +1,200 @@ +package org.icatproject.ids.v3.handlers; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.NoSuchFileException; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.SortedMap; +import java.util.TreeMap; +import java.util.concurrent.Callable; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.concurrent.locks.ReentrantLock; + +import org.icatproject.ids.StorageUnit; +import org.icatproject.ids.exceptions.BadRequestException; +import org.icatproject.ids.exceptions.DataNotOnlineException; +import org.icatproject.ids.exceptions.InsufficientPrivilegesException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.exceptions.NotFoundException; +import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.DataSelectionV3Base; +import org.icatproject.ids.v3.PreparedV3; +import org.icatproject.ids.v3.RequestHandlerBase; +import org.icatproject.ids.v3.ServiceProvider; +import org.icatproject.ids.v3.enums.CallType; +import org.icatproject.ids.v3.enums.RequestType; +import org.icatproject.ids.v3.models.DataInfoBase; +import org.icatproject.ids.v3.models.ValueContainer; + +import jakarta.json.Json; +import jakarta.json.stream.JsonGenerator; + +public class IsPreparedHandler extends RequestHandlerBase { + + public IsPreparedHandler() { + super(new StorageUnit[]{StorageUnit.DATAFILE, StorageUnit.DATASET, null}, RequestType.ISPREPARED); + } + + class PreparedStatus { + public ReentrantLock lock = new ReentrantLock(); + public Long fromDfElement; + public Future future; + public Long fromDsElement; + } + + private Map preparedStatusMap = new ConcurrentHashMap<>(); + + @Override + public ValueContainer handle(HashMap parameters) + throws BadRequestException, InternalException, InsufficientPrivilegesException, NotFoundException, + DataNotOnlineException, NotImplementedException { + + long start = System.currentTimeMillis(); + + String preparedId = parameters.get("preparedId").getString(); + String ip = parameters.get("ip").getString(); + + logger.info(String.format("New webservice request: isPrepared preparedId=%s", preparedId)); + + // Validate + validateUUID("preparedId", preparedId); + + // Do it + boolean prepared = true; + + PreparedV3 preparedJson; + try (InputStream stream = Files.newInputStream(preparedDir.resolve(preparedId))) { + preparedJson = unpack(stream); + } catch (NoSuchFileException e) { + throw new NotFoundException("The preparedId " + preparedId + " is not known"); + } catch (IOException e) { + throw new InternalException(e.getClass() + " " + e.getMessage()); + } + + PreparedStatus status = preparedStatusMap.computeIfAbsent(preparedId, k -> new PreparedStatus()); + + if (!status.lock.tryLock()) { + logger.debug("Lock held for evaluation of isPrepared for preparedId {}", preparedId); + return new ValueContainer(false); + } + try { + Future future = status.future; + if (future != null) { + if (future.isDone()) { + try { + future.get(); + } catch (ExecutionException e) { + throw new InternalException(e.getClass() + " " + e.getMessage()); + } catch (InterruptedException e) { + // Ignore + } finally { + status.future = null; + } + } else { + logger.debug("Background process still running for preparedId {}", preparedId); + return new ValueContainer(false); + } + } + + var serviceProvider = ServiceProvider.getInstance(); + DataSelectionV3Base dataSelection = this.getDataSelection(preparedJson.dsInfos, preparedJson.dfInfos, preparedJson.emptyDatasets); + + if (storageUnit == StorageUnit.DATASET) { + Collection toCheck = status.fromDsElement == null ? dataSelection.getPrimaryDataInfos() + : preparedJson.dsInfos.tailMap(status.fromDsElement).values(); + logger.debug("Will check online status of {} entries", toCheck.size()); + for (DataInfoBase dsInfo : toCheck) { + serviceProvider.getFsm().checkFailure(dsInfo.getId()); + if (dataSelection.restoreIfOffline(dsInfo)) { + prepared = false; + status.fromDsElement = dsInfo.getId(); + toCheck = preparedJson.dsInfos.tailMap(status.fromDsElement).values(); + logger.debug("Will check in background status of {} entries", toCheck.size()); + status.future = threadPool.submit(new RunPrepDataInfoCheck(toCheck, dataSelection)); + break; + } + } + if (prepared) { + toCheck = status.fromDsElement == null ? Collections.emptySet() + : preparedJson.dsInfos.headMap(status.fromDsElement).values(); + logger.debug("Will check finally online status of {} entries", toCheck.size()); + for (DataInfoBase dsInfo : toCheck) { + serviceProvider.getFsm().checkFailure(dsInfo.getId()); + if (dataSelection.restoreIfOffline(dsInfo)) { + prepared = false; + } + } + } + } else if (storageUnit == StorageUnit.DATAFILE) { + SortedMap toCheck = status.fromDfElement == null ? preparedJson.dfInfos + : preparedJson.dfInfos.tailMap(status.fromDfElement); + logger.debug("Will check online status of {} entries", toCheck.size()); + for (DataInfoBase dfInfo : toCheck.values()) { + serviceProvider.getFsm().checkFailure(dfInfo.getId()); + if (dataSelection.restoreIfOffline(dfInfo)) { + prepared = false; + status.fromDfElement = dfInfo.getId(); + toCheck = preparedJson.dfInfos.tailMap(status.fromDfElement); + logger.debug("Will check in background status of {} entries", toCheck.size()); + status.future = threadPool.submit(new RunPrepDataInfoCheck(toCheck.values(), dataSelection)); + break; + } + } + if (prepared) { + toCheck = status.fromDfElement == null ? new TreeMap<>() + : preparedJson.dfInfos.headMap(status.fromDfElement); + logger.debug("Will check finally online status of {} entries", toCheck.size()); + for (DataInfoBase dfInfo : toCheck.values()) { + serviceProvider.getFsm().checkFailure(dfInfo.getId()); + if (dataSelection.restoreIfOffline(dfInfo)) { + prepared = false; + } + } + } + } + + if (serviceProvider.getLogSet().contains(CallType.INFO)) { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { + gen.write("preparedId", preparedId); + gen.writeEnd(); + } + String body = baos.toString(); + serviceProvider.getTransmitter().processMessage("isPrepared", ip, body, start); + } + + return new ValueContainer(prepared); + + } finally { + status.lock.unlock(); + } + } + + public class RunPrepDataInfoCheck implements Callable { + + private Collection toCheck; + private DataSelectionV3Base dataselection; + + public RunPrepDataInfoCheck(Collection toCheck, DataSelectionV3Base dataSelection) { + this.toCheck = toCheck; + this.dataselection = dataSelection; + } + + @Override + public Void call() throws Exception { + for(DataInfoBase dataInfo : toCheck) { + ServiceProvider.getInstance().getFsm().checkFailure(dataInfo.getId()); + dataselection.restoreIfOffline(dataInfo); + } + return null; + } + + } +} \ No newline at end of file From 310eb3f75fc896bb3a557bc3ce835ccbf7df5084 Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Wed, 28 Feb 2024 07:52:01 +0100 Subject: [PATCH 22/92] IsPreparedHandler #2 --- .../java/org/icatproject/ids/IdsService.java | 2 - .../ids/v3/DataSelectionFactory.java | 12 ++- .../DataSelectionForSingleLevelStorage.java | 18 ++-- .../DataSelectionForStorageUnitDatafile.java | 16 ++-- .../DataSelectionForStorageUnitDataset.java | 16 ++-- .../ids/v3/DataSelectionV3Base.java | 93 +++++++++++++++++-- .../ids/v3/RequestHandlerBase.java | 9 +- .../ids/v3/handlers/GetStatusHandler.java | 6 +- .../ids/v3/handlers/IsPreparedHandler.java | 85 +---------------- .../org/icatproject/ids/v3/helper/SO.java | 1 - 10 files changed, 132 insertions(+), 126 deletions(-) diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index e90c6c2a..19a6b424 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -439,8 +439,6 @@ public boolean isPrepared(@Context HttpServletRequest request, @QueryParam("prep parameters.put("ip", new ValueContainer(request.getRemoteAddr())); return this.requestService.handle(RequestType.ISPREPARED, parameters).getBool(); - - //return idsBean.isPrepared(preparedId, request.getRemoteAddr()); } /** diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java b/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java index 513b693f..a468eeca 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java @@ -8,6 +8,8 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.SortedMap; +import java.util.TreeMap; import org.icatproject.Datafile; import org.icatproject.Dataset; @@ -68,7 +70,7 @@ protected static DataSelectionV3Base get(String userSessionId, String investigat - protected static DataSelectionV3Base get(Map dsInfos, Map dfInfos, Set emptyDatasets, RequestType requestType) throws InternalException { + protected static DataSelectionV3Base get(SortedMap dsInfos, SortedMap dfInfos, Set emptyDatasets, RequestType requestType) throws InternalException { List dsids = new ArrayList(dsInfos.keySet()); List dfids = new ArrayList(); var dataFileInfos = new HashMap(); @@ -93,7 +95,7 @@ private DataSelectionFactory() throws InternalException logger.info("### Constructing finished"); } - private DataSelectionV3Base createSelection(Map dsInfos, Map dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids, RequestType requestType) throws InternalException { + private DataSelectionV3Base createSelection(SortedMap dsInfos, SortedMap dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids, RequestType requestType) throws InternalException { StorageUnit storageUnit = this.propertyHandler.getStorageUnit(); @@ -145,11 +147,11 @@ private DataSelectionV3Base getSelection( String userSessionId, String investiga private DataSelectionV3Base prepareFromIds(boolean dfWanted, boolean dsWanted, List dfids, List dsids, List invids, String userSessionId, Session restSessionToUse, Session userRestSession, RequestType requestType) throws NotFoundException, InsufficientPrivilegesException, InternalException, BadRequestException { - var dsInfos = new HashMap(); + var dsInfos = new TreeMap(); var emptyDatasets = new HashSet(); - var dfInfos = new HashMap(); + var dfInfos = new TreeMap(); if (dfWanted) { //redundant ? - dfInfos = new HashMap(); + dfInfos = new TreeMap(); } try { diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java index 9a98276d..3fd5a99e 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForSingleLevelStorage.java @@ -1,12 +1,10 @@ package org.icatproject.ids.v3; -import java.util.ArrayList; -import java.util.Collection; import java.util.List; -import java.util.Map; import java.util.Set; +import java.util.SortedMap; +import java.util.TreeMap; -import org.icatproject.ids.exceptions.DataNotOnlineException; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.v3.enums.DeferredOp; @@ -15,7 +13,7 @@ public class DataSelectionForSingleLevelStorage extends DataSelectionV3Base { - protected DataSelectionForSingleLevelStorage(Map dsInfos, Map dfInfos, + protected DataSelectionForSingleLevelStorage(SortedMap dsInfos, SortedMap dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids, RequestType requestType) { super(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids, requestType); @@ -30,8 +28,8 @@ protected void scheduleTask(DeferredOp operation) throws NotImplementedException @Override - public Collection getPrimaryDataInfos() { - return new ArrayList(); + public SortedMap getPrimaryDataInfos() { + return new TreeMap(); } @@ -42,6 +40,12 @@ protected boolean existsInMainStorage(DataInfoBase dataInfo) throws InternalExce } + @Override + public boolean isPrepared(String preparedId) throws InternalException { + return true; + } + + } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java index 8a2dd1e1..0558e6b1 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDatafile.java @@ -1,11 +1,9 @@ package org.icatproject.ids.v3; -import java.util.Collection; import java.util.List; -import java.util.Map; import java.util.Set; +import java.util.SortedMap; -import org.icatproject.ids.exceptions.DataNotOnlineException; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.v3.enums.DeferredOp; @@ -14,7 +12,7 @@ public class DataSelectionForStorageUnitDatafile extends DataSelectionV3Base { - protected DataSelectionForStorageUnitDatafile(Map dsInfos, Map dfInfos, + protected DataSelectionForStorageUnitDatafile(SortedMap dsInfos, SortedMap dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids, RequestType requestType) { super(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids, requestType); @@ -30,13 +28,19 @@ protected void scheduleTask(DeferredOp operation) throws NotImplementedException } @Override - public Collection getPrimaryDataInfos() { - return this.dfInfos.values(); + public SortedMap getPrimaryDataInfos() { + return this.dfInfos; } @Override protected boolean existsInMainStorage(DataInfoBase dataInfo) throws InternalException { return ServiceProvider.getInstance().getMainStorage().exists(dataInfo.getLocation()); } + + + @Override + public boolean isPrepared(String preparedId) throws InternalException { + return areDataInfosPrepared(preparedId); + } } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java index 8a31195e..d783a213 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionForStorageUnitDataset.java @@ -1,11 +1,9 @@ package org.icatproject.ids.v3; -import java.util.Collection; import java.util.List; -import java.util.Map; import java.util.Set; +import java.util.SortedMap; -import org.icatproject.ids.exceptions.DataNotOnlineException; import org.icatproject.ids.exceptions.InternalException; import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.v3.enums.DeferredOp; @@ -15,7 +13,7 @@ public class DataSelectionForStorageUnitDataset extends DataSelectionV3Base { - protected DataSelectionForStorageUnitDataset(Map dsInfos, Map dfInfos, + protected DataSelectionForStorageUnitDataset(SortedMap dsInfos, SortedMap dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids, RequestType requestType) { super(dsInfos, dfInfos, emptyDatasets, invids2, dsids, dfids, requestType); @@ -31,8 +29,8 @@ protected void scheduleTask(DeferredOp operation) throws NotImplementedException } @Override - public Collection getPrimaryDataInfos() { - return this.dsInfos.values(); + public SortedMap getPrimaryDataInfos() { + return this.dsInfos; } @Override @@ -43,6 +41,12 @@ protected boolean existsInMainStorage(DataInfoBase dataInfo) throws InternalExce return emptyDatasets.contains(dataInfo.getId()) || ServiceProvider.getInstance().getMainStorage().exists(dsInfo); } + + + @Override + public boolean isPrepared(String preparedId) throws InternalException { + return this.areDataInfosPrepared(preparedId); + } } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java b/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java index cf076bd7..c521c557 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java @@ -7,6 +7,13 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.SortedMap; +import java.util.concurrent.Callable; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.locks.ReentrantLock; import org.icatproject.ids.Status; import org.icatproject.ids.exceptions.BadRequestException; @@ -16,19 +23,33 @@ import org.icatproject.ids.v3.enums.DeferredOp; import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.models.DataInfoBase; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public abstract class DataSelectionV3Base { - protected Map dsInfos; - protected Map dfInfos; + protected final static Logger logger = LoggerFactory.getLogger(DataSelectionV3Base.class); + + protected SortedMap dsInfos; + protected SortedMap dfInfos; protected Set emptyDatasets; protected List invids; protected List dsids; protected List dfids; protected RequestType requestType; protected HashMap requestTypeToDeferredOpMapping; + protected ExecutorService threadPool; + + private Map preparedStatusMap = new ConcurrentHashMap<>(); - protected DataSelectionV3Base(Map dsInfos, Map dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids, RequestType requestType) { + class PreparedStatus { + public ReentrantLock lock = new ReentrantLock(); + public Long fromElement; + public Future future; + + } + + protected DataSelectionV3Base(SortedMap dsInfos, SortedMap dfInfos, Set emptyDatasets, List invids2, List dsids, List dfids, RequestType requestType) { this.dsInfos = dsInfos; this.dfInfos = dfInfos; @@ -41,16 +62,20 @@ protected DataSelectionV3Base(Map dsInfos, Map(); this.requestTypeToDeferredOpMapping.put(RequestType.ARCHIVE, DeferredOp.ARCHIVE); this.requestTypeToDeferredOpMapping.put(RequestType.GETDATA, null); + + this.threadPool = Executors.newCachedThreadPool(); } protected abstract void scheduleTask(DeferredOp operation) throws NotImplementedException, InternalException; + public abstract boolean isPrepared(String preparedId) throws InternalException; + /** * To get the DataInfos whom are primary worked on, depending on StorageUnit * @return */ - public abstract Collection getPrimaryDataInfos(); + public abstract SortedMap getPrimaryDataInfos(); protected abstract boolean existsInMainStorage(DataInfoBase dataInfo) throws InternalException; @@ -123,7 +148,7 @@ public Status getStatus() throws InternalException { Set restoring = serviceProvider.getFsm().getRestoring(); Set maybeOffline = serviceProvider.getFsm().getMaybeOffline(); - for (DataInfoBase dataInfo : this.getPrimaryDataInfos()) { + for (DataInfoBase dataInfo : this.getPrimaryDataInfos().values()) { serviceProvider.getFsm().checkFailure(dataInfo.getId()); if (restoring.contains(dataInfo)) { status = Status.RESTORING; @@ -155,7 +180,7 @@ public boolean restoreIfOffline(DataInfoBase dataInfo) throws InternalException public void checkOnline()throws InternalException, DataNotOnlineException { boolean maybeOffline = false; - for (DataInfoBase dfInfo : this.getPrimaryDataInfos()) { + for (DataInfoBase dfInfo : this.getPrimaryDataInfos().values()) { if (this.restoreIfOffline(dfInfo)) { maybeOffline = true; } @@ -166,4 +191,60 @@ public void checkOnline()throws InternalException, DataNotOnlineException { } } + protected boolean areDataInfosPrepared(String preparedId) throws InternalException { + boolean prepared = true; + var serviceProvider = ServiceProvider.getInstance(); + PreparedStatus status = preparedStatusMap.computeIfAbsent(preparedId, k -> new PreparedStatus()); + + Collection toCheck = status.fromElement == null ? this.getPrimaryDataInfos().values() + : this.getPrimaryDataInfos().tailMap(status.fromElement).values(); + logger.debug("Will check online status of {} entries", toCheck.size()); + for (DataInfoBase dataInfo : toCheck) { + serviceProvider.getFsm().checkFailure(dataInfo.getId()); + if (this.restoreIfOffline(dataInfo)) { + prepared = false; + status.fromElement = dataInfo.getId(); + toCheck = this.getPrimaryDataInfos().tailMap(status.fromElement).values(); + logger.debug("Will check in background status of {} entries", toCheck.size()); + status.future = threadPool.submit(new RunPrepDataInfoCheck(toCheck, this)); + break; + } + } + if (prepared) { + toCheck = status.fromElement == null ? Collections.emptySet() + : this.getPrimaryDataInfos().headMap(status.fromElement).values(); + logger.debug("Will check finally online status of {} entries", toCheck.size()); + for (DataInfoBase dataInfo : toCheck) { + serviceProvider.getFsm().checkFailure(dataInfo.getId()); + if (this.restoreIfOffline(dataInfo)) { + prepared = false; + } + } + } + + return prepared; + } + + + private class RunPrepDataInfoCheck implements Callable { + + private Collection toCheck; + private DataSelectionV3Base dataselection; + + public RunPrepDataInfoCheck(Collection toCheck, DataSelectionV3Base dataSelection) { + this.toCheck = toCheck; + this.dataselection = dataSelection; + } + + @Override + public Void call() throws Exception { + for(DataInfoBase dataInfo : toCheck) { + ServiceProvider.getInstance().getFsm().checkFailure(dataInfo.getId()); + dataselection.restoreIfOffline(dataInfo); + } + return null; + } + + } + } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java index da79af04..b0a035d2 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java @@ -6,12 +6,9 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; import java.util.regex.Pattern; import org.icatproject.ids.v3.enums.RequestType; @@ -47,8 +44,6 @@ public abstract class RequestHandlerBase { protected StorageUnit storageUnit; protected RequestType requestType; - protected ExecutorService threadPool; - /** * matches standard UUID format of 8-4-4-4-12 hexadecimal digits */ @@ -72,7 +67,7 @@ public RequestType getRequestType() { return this.requestType; } - public DataSelectionV3Base getDataSelection(Map dsInfos, Map dfInfos, Set emptyDatasets) throws InternalException { + public DataSelectionV3Base getDataSelection(SortedMap dsInfos, SortedMap dfInfos, Set emptyDatasets) throws InternalException { return DataSelectionFactory.get(dsInfos, dfInfos, emptyDatasets, this.getRequestType()); } @@ -101,8 +96,6 @@ public void init() throws InternalException { var archiveStorage = propertyHandler.getArchiveStorage(); this.twoLevel = archiveStorage != null; - this.threadPool = Executors.newCachedThreadPool(); - //logger.info("RequestHandlerBase initialized"); } diff --git a/src/main/java/org/icatproject/ids/v3/handlers/GetStatusHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/GetStatusHandler.java index f1aa2f99..0d702fdb 100644 --- a/src/main/java/org/icatproject/ids/v3/handlers/GetStatusHandler.java +++ b/src/main/java/org/icatproject/ids/v3/handlers/GetStatusHandler.java @@ -6,8 +6,8 @@ import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.util.HashMap; -import java.util.Map; import java.util.Set; +import java.util.SortedMap; import org.icatproject.IcatException_Exception; import org.icatproject.ids.Status; @@ -77,8 +77,8 @@ private String getStatus(String preparedId, String ip) throw new InternalException(e.getClass() + " " + e.getMessage()); } - final Map dfInfos = prepared.dfInfos; - final Map dsInfos = prepared.dsInfos; + final SortedMap dfInfos = prepared.dfInfos; + final SortedMap dsInfos = prepared.dsInfos; Set emptyDatasets = prepared.emptyDatasets; // Do it diff --git a/src/main/java/org/icatproject/ids/v3/handlers/IsPreparedHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/IsPreparedHandler.java index c0faac00..65b2ff8d 100644 --- a/src/main/java/org/icatproject/ids/v3/handlers/IsPreparedHandler.java +++ b/src/main/java/org/icatproject/ids/v3/handlers/IsPreparedHandler.java @@ -5,13 +5,8 @@ import java.io.InputStream; import java.nio.file.Files; import java.nio.file.NoSuchFileException; -import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.Map; -import java.util.SortedMap; -import java.util.TreeMap; -import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; @@ -30,7 +25,6 @@ import org.icatproject.ids.v3.ServiceProvider; import org.icatproject.ids.v3.enums.CallType; import org.icatproject.ids.v3.enums.RequestType; -import org.icatproject.ids.v3.models.DataInfoBase; import org.icatproject.ids.v3.models.ValueContainer; import jakarta.json.Json; @@ -44,9 +38,9 @@ public IsPreparedHandler() { class PreparedStatus { public ReentrantLock lock = new ReentrantLock(); - public Long fromDfElement; + public Long fromElement; public Future future; - public Long fromDsElement; + } private Map preparedStatusMap = new ConcurrentHashMap<>(); @@ -106,59 +100,7 @@ public ValueContainer handle(HashMap parameters) var serviceProvider = ServiceProvider.getInstance(); DataSelectionV3Base dataSelection = this.getDataSelection(preparedJson.dsInfos, preparedJson.dfInfos, preparedJson.emptyDatasets); - if (storageUnit == StorageUnit.DATASET) { - Collection toCheck = status.fromDsElement == null ? dataSelection.getPrimaryDataInfos() - : preparedJson.dsInfos.tailMap(status.fromDsElement).values(); - logger.debug("Will check online status of {} entries", toCheck.size()); - for (DataInfoBase dsInfo : toCheck) { - serviceProvider.getFsm().checkFailure(dsInfo.getId()); - if (dataSelection.restoreIfOffline(dsInfo)) { - prepared = false; - status.fromDsElement = dsInfo.getId(); - toCheck = preparedJson.dsInfos.tailMap(status.fromDsElement).values(); - logger.debug("Will check in background status of {} entries", toCheck.size()); - status.future = threadPool.submit(new RunPrepDataInfoCheck(toCheck, dataSelection)); - break; - } - } - if (prepared) { - toCheck = status.fromDsElement == null ? Collections.emptySet() - : preparedJson.dsInfos.headMap(status.fromDsElement).values(); - logger.debug("Will check finally online status of {} entries", toCheck.size()); - for (DataInfoBase dsInfo : toCheck) { - serviceProvider.getFsm().checkFailure(dsInfo.getId()); - if (dataSelection.restoreIfOffline(dsInfo)) { - prepared = false; - } - } - } - } else if (storageUnit == StorageUnit.DATAFILE) { - SortedMap toCheck = status.fromDfElement == null ? preparedJson.dfInfos - : preparedJson.dfInfos.tailMap(status.fromDfElement); - logger.debug("Will check online status of {} entries", toCheck.size()); - for (DataInfoBase dfInfo : toCheck.values()) { - serviceProvider.getFsm().checkFailure(dfInfo.getId()); - if (dataSelection.restoreIfOffline(dfInfo)) { - prepared = false; - status.fromDfElement = dfInfo.getId(); - toCheck = preparedJson.dfInfos.tailMap(status.fromDfElement); - logger.debug("Will check in background status of {} entries", toCheck.size()); - status.future = threadPool.submit(new RunPrepDataInfoCheck(toCheck.values(), dataSelection)); - break; - } - } - if (prepared) { - toCheck = status.fromDfElement == null ? new TreeMap<>() - : preparedJson.dfInfos.headMap(status.fromDfElement); - logger.debug("Will check finally online status of {} entries", toCheck.size()); - for (DataInfoBase dfInfo : toCheck.values()) { - serviceProvider.getFsm().checkFailure(dfInfo.getId()); - if (dataSelection.restoreIfOffline(dfInfo)) { - prepared = false; - } - } - } - } + prepared = dataSelection.isPrepared(preparedId); if (serviceProvider.getLogSet().contains(CallType.INFO)) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); @@ -176,25 +118,4 @@ public ValueContainer handle(HashMap parameters) status.lock.unlock(); } } - - public class RunPrepDataInfoCheck implements Callable { - - private Collection toCheck; - private DataSelectionV3Base dataselection; - - public RunPrepDataInfoCheck(Collection toCheck, DataSelectionV3Base dataSelection) { - this.toCheck = toCheck; - this.dataselection = dataSelection; - } - - @Override - public Void call() throws Exception { - for(DataInfoBase dataInfo : toCheck) { - ServiceProvider.getInstance().getFsm().checkFailure(dataInfo.getId()); - dataselection.restoreIfOffline(dataInfo); - } - return null; - } - - } } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/helper/SO.java b/src/main/java/org/icatproject/ids/v3/helper/SO.java index fb01b92a..4d88d589 100644 --- a/src/main/java/org/icatproject/ids/v3/helper/SO.java +++ b/src/main/java/org/icatproject/ids/v3/helper/SO.java @@ -5,7 +5,6 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.util.List; import java.util.Map; import java.util.zip.ZipEntry; import java.util.zip.ZipException; From 9726812eb05c7b41220655d9c1261518801d497b Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Wed, 28 Feb 2024 09:06:49 +0100 Subject: [PATCH 23/92] IsReadOnlyHandler and IsTwoLevelHandler --- .../java/org/icatproject/ids/IdsService.java | 28 ++++++++++++-- .../ids/v3/RequestHandlerBase.java | 3 ++ .../ids/v3/RequestHandlerService.java | 4 ++ .../icatproject/ids/v3/enums/RequestType.java | 2 +- .../ids/v3/handlers/IsReadOnlyHandler.java | 37 +++++++++++++++++++ .../ids/v3/handlers/IsTwoLevelHandler.java | 36 ++++++++++++++++++ 6 files changed, 105 insertions(+), 5 deletions(-) create mode 100644 src/main/java/org/icatproject/ids/v3/handlers/IsReadOnlyHandler.java create mode 100644 src/main/java/org/icatproject/ids/v3/handlers/IsTwoLevelHandler.java diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index 19a6b424..039f2d90 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -446,14 +446,24 @@ public boolean isPrepared(@Context HttpServletRequest request, @QueryParam("prep * readOnly status of the server. * * @return true if readonly, else false + * @throws NotImplementedException + * @throws DataNotOnlineException + * @throws NotFoundException + * @throws InsufficientPrivilegesException + * @throws BadRequestException + * @throws InternalException * @summary isReadOnly * @statuscode 200 To indicate success */ @GET @Path("isReadOnly") @Produces(MediaType.TEXT_PLAIN) - public boolean isReadOnly(@Context HttpServletRequest request) { - return idsBean.isReadOnly(request.getRemoteAddr()); + public boolean isReadOnly(@Context HttpServletRequest request) throws InternalException, BadRequestException, InsufficientPrivilegesException, NotFoundException, DataNotOnlineException, NotImplementedException { + + var parameters = new HashMap(); + parameters.put("ip", new ValueContainer(request.getRemoteAddr())); + + return this.requestService.handle(RequestType.ISREADONLY, parameters).getBool(); } /** @@ -461,14 +471,24 @@ public boolean isReadOnly(@Context HttpServletRequest request) { * storage. This returns the twoLevel status of the server. * * @return true if twoLevel, else false + * @throws NotImplementedException + * @throws DataNotOnlineException + * @throws NotFoundException + * @throws InsufficientPrivilegesException + * @throws BadRequestException + * @throws InternalException * @summary isTwoLevel * @statuscode 200 To indicate success */ @GET @Path("isTwoLevel") @Produces(MediaType.TEXT_PLAIN) - public boolean isTwoLevel(@Context HttpServletRequest request) { - return idsBean.isTwoLevel(request.getRemoteAddr()); + public boolean isTwoLevel(@Context HttpServletRequest request) throws InternalException, BadRequestException, InsufficientPrivilegesException, NotFoundException, DataNotOnlineException, NotImplementedException { + + var parameters = new HashMap(); + parameters.put("ip", new ValueContainer(request.getRemoteAddr())); + + return this.requestService.handle(RequestType.ISTWOLEVEL, parameters).getBool(); } /** diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java index b0a035d2..2044b645 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java @@ -43,6 +43,7 @@ public abstract class RequestHandlerBase { protected boolean twoLevel; protected StorageUnit storageUnit; protected RequestType requestType; + protected boolean readOnly; /** * matches standard UUID format of 8-4-4-4-12 hexadecimal digits @@ -96,6 +97,8 @@ public void init() throws InternalException { var archiveStorage = propertyHandler.getArchiveStorage(); this.twoLevel = archiveStorage != null; + this.readOnly = propertyHandler.getReadOnly(); + //logger.info("RequestHandlerBase initialized"); } diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java index 810290ac..f8ab1da2 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java @@ -25,6 +25,8 @@ import org.icatproject.ids.v3.handlers.GetSizeHandler; import org.icatproject.ids.v3.handlers.GetStatusHandler; import org.icatproject.ids.v3.handlers.IsPreparedHandler; +import org.icatproject.ids.v3.handlers.IsReadOnlyHandler; +import org.icatproject.ids.v3.handlers.IsTwoLevelHandler; import org.icatproject.ids.v3.models.ValueContainer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -59,6 +61,8 @@ public RequestHandlerService() { this.registerHandler(new GetSizeHandler()); this.registerHandler(new GetStatusHandler()); this.registerHandler(new IsPreparedHandler()); + this.registerHandler(new IsReadOnlyHandler()); + this.registerHandler(new IsTwoLevelHandler()); } diff --git a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java index 0e45739b..4de79b8e 100644 --- a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java +++ b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java @@ -4,5 +4,5 @@ * This enum contains all defined types of requests to this server */ public enum RequestType { - GETDATA, ARCHIVE, GETICATURL, GETDATAFILEIDS, GETSERVICESTATUS, GETSIZE, GETSTATUS, ISPREPARED + GETDATA, ARCHIVE, GETICATURL, GETDATAFILEIDS, GETSERVICESTATUS, GETSIZE, GETSTATUS, ISPREPARED, ISREADONLY, ISTWOLEVEL } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/handlers/IsReadOnlyHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/IsReadOnlyHandler.java new file mode 100644 index 00000000..22d0a5f0 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/handlers/IsReadOnlyHandler.java @@ -0,0 +1,37 @@ +package org.icatproject.ids.v3.handlers; + +import java.util.HashMap; + +import org.icatproject.ids.StorageUnit; +import org.icatproject.ids.exceptions.BadRequestException; +import org.icatproject.ids.exceptions.DataNotOnlineException; +import org.icatproject.ids.exceptions.InsufficientPrivilegesException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.exceptions.NotFoundException; +import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.RequestHandlerBase; +import org.icatproject.ids.v3.ServiceProvider; +import org.icatproject.ids.v3.enums.CallType; +import org.icatproject.ids.v3.enums.RequestType; +import org.icatproject.ids.v3.models.ValueContainer; + +public class IsReadOnlyHandler extends RequestHandlerBase { + + public IsReadOnlyHandler() { + super(new StorageUnit[]{StorageUnit.DATAFILE, StorageUnit.DATASET, null}, RequestType.ISREADONLY); + + } + + @Override + public ValueContainer handle(HashMap parameters) + throws BadRequestException, InternalException, InsufficientPrivilegesException, NotFoundException, + DataNotOnlineException, NotImplementedException { + + var serviceProvider = ServiceProvider.getInstance(); + + if (serviceProvider.getLogSet().contains(CallType.INFO)) { + serviceProvider.getTransmitter().processMessage("isReadOnly", parameters.get("ip").getString(), "{}", System.currentTimeMillis()); + } + return new ValueContainer(this.readOnly); + } +} \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/handlers/IsTwoLevelHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/IsTwoLevelHandler.java new file mode 100644 index 00000000..50a9e2af --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/handlers/IsTwoLevelHandler.java @@ -0,0 +1,36 @@ +package org.icatproject.ids.v3.handlers; + +import java.util.HashMap; + +import org.icatproject.ids.StorageUnit; +import org.icatproject.ids.exceptions.BadRequestException; +import org.icatproject.ids.exceptions.DataNotOnlineException; +import org.icatproject.ids.exceptions.InsufficientPrivilegesException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.exceptions.NotFoundException; +import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.RequestHandlerBase; +import org.icatproject.ids.v3.ServiceProvider; +import org.icatproject.ids.v3.enums.CallType; +import org.icatproject.ids.v3.enums.RequestType; +import org.icatproject.ids.v3.models.ValueContainer; + +public class IsTwoLevelHandler extends RequestHandlerBase { + + public IsTwoLevelHandler() { + super(new StorageUnit[]{StorageUnit.DATAFILE, StorageUnit.DATASET, null}, RequestType.ISTWOLEVEL); + } + + @Override + public ValueContainer handle(HashMap parameters) + throws BadRequestException, InternalException, InsufficientPrivilegesException, NotFoundException, + DataNotOnlineException, NotImplementedException { + + var serviceProvider = ServiceProvider.getInstance(); + + if (serviceProvider.getLogSet().contains(CallType.INFO)) { + serviceProvider.getTransmitter().processMessage("isTwoLevel", parameters.get("ip").getString(), "{}", System.currentTimeMillis()); + } + return new ValueContainer(twoLevel); + } +} \ No newline at end of file From 0f52dd7db1787204a2c6cdaa89253efb6c3b216f Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Thu, 29 Feb 2024 08:57:54 +0100 Subject: [PATCH 24/92] Prepare PrepareData Redesign #1 --- .../java/org/icatproject/ids/IdsBean.java | 47 ++++++++++--------- .../java/org/icatproject/ids/IdsService.java | 3 +- .../ids/v3/DataSelectionFactory.java | 4 +- .../icatproject/ids/v3/enums/RequestType.java | 2 +- .../icatproject/ids/PreparePackingTest.java | 9 ++-- 5 files changed, 36 insertions(+), 29 deletions(-) diff --git a/src/main/java/org/icatproject/ids/IdsBean.java b/src/main/java/org/icatproject/ids/IdsBean.java index 55bd7406..b841ad61 100644 --- a/src/main/java/org/icatproject/ids/IdsBean.java +++ b/src/main/java/org/icatproject/ids/IdsBean.java @@ -70,9 +70,12 @@ import org.icatproject.ids.plugin.AlreadyLockedException; import org.icatproject.ids.plugin.ArchiveStorageInterface; import org.icatproject.ids.plugin.MainStorageInterface; +import org.icatproject.ids.v3.DataSelectionFactory; +import org.icatproject.ids.v3.DataSelectionV3Base; import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; import org.icatproject.ids.v3.enums.CallType; import org.icatproject.ids.v3.enums.DeferredOp; +import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.models.DataFileInfo; import org.icatproject.ids.v3.models.DataInfoBase; import org.icatproject.ids.v3.models.DataSetInfo; @@ -123,16 +126,16 @@ public Void call() throws Exception { public class RestoreDfTask implements Callable { - private Set dfInfos; + private Collection dfInfos; - public RestoreDfTask(Set dfInfos) { + public RestoreDfTask(Collection dfInfos) { this.dfInfos = dfInfos; } @Override public Void call() throws Exception { - for (DataFileInfo dfInfo : dfInfos) { - DataSelection.restoreIfOffline(dfInfo); + for (DataInfoBase dfInfo : dfInfos) { + DataSelection.restoreIfOffline((DataFileInfo)dfInfo); } return null; } @@ -140,18 +143,18 @@ public Void call() throws Exception { } public class RestoreDsTask implements Callable { - private Collection dsInfos; + private Collection dsInfos; private Set emptyDs; - public RestoreDsTask(Collection dsInfos, Set emptyDs) { + public RestoreDsTask(Collection dsInfos, Set emptyDs) { this.dsInfos = dsInfos; this.emptyDs = emptyDs; } @Override public Void call() throws Exception { - for (DataSetInfo dsInfo : dsInfos) { - DataSelection.restoreIfOffline(dsInfo, emptyDs); + for (DataInfoBase dsInfo : dsInfos) { + DataSelection.restoreIfOffline((DataSetInfo)dsInfo, emptyDs); } return null; } @@ -246,15 +249,16 @@ static String getLocationFromDigest(long id, String locationWithHash, String key } } - static void pack(OutputStream stream, boolean zip, boolean compress, Map dsInfos, - Set dfInfos, Set emptyDatasets) { + static void pack(OutputStream stream, boolean zip, boolean compress, Map dsInfos, + Map dfInfos, Set emptyDatasets) { JsonGenerator gen = Json.createGenerator(stream); gen.writeStartObject(); gen.write("zip", zip); gen.write("compress", compress); gen.writeStartArray("dsInfo"); - for (DataSetInfo dsInfo : dsInfos.values()) { + for (DataInfoBase dataInfo : dsInfos.values()) { + var dsInfo = (DataSetInfo)dataInfo; logger.debug("dsInfo " + dsInfo); gen.writeStartObject().write("dsId", dsInfo.getId()) @@ -271,8 +275,9 @@ static void pack(OutputStream stream, boolean zip, boolean compress, Map dsInfos = dataSelection.getDsInfo(); + Map dsInfos = dataSelection.getDsInfo(); Set emptyDs = dataSelection.getEmptyDatasets(); - Set dfInfos = dataSelection.getDfInfo(); + Map dfInfos = dataSelection.getDfInfo(); if (storageUnit == StorageUnit.DATASET) { - for (DataSetInfo dsInfo : dsInfos.values()) { + for (DataInfoBase dsInfo : dsInfos.values()) { fsm.recordSuccess(dsInfo.getId()); } threadPool.submit(new RestoreDsTask(dsInfos.values(), emptyDs)); } else if (storageUnit == StorageUnit.DATAFILE) { - for (DataFileInfo dfInfo : dfInfos) { + for (DataInfoBase dfInfo : dfInfos.values()) { fsm.recordSuccess(dfInfo.getId()); } - threadPool.submit(new RestoreDfTask(dfInfos)); + threadPool.submit(new RestoreDfTask(dfInfos.values())); } if (dataSelection.mustZip()) { diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index 039f2d90..5f946703 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -529,6 +529,7 @@ public String ping() { * @throws InsufficientPrivilegesException * @throws NotFoundException * @throws InternalException + * @throws NotImplementedException * @summary prepareData * @statuscode 200 To indicate success */ @@ -540,7 +541,7 @@ public String prepareData(@Context HttpServletRequest request, @FormParam("sessi @FormParam("investigationIds") String investigationIds, @FormParam("datasetIds") String datasetIds, @FormParam("datafileIds") String datafileIds, @FormParam("compress") boolean compress, @FormParam("zip") boolean zip) - throws BadRequestException, InsufficientPrivilegesException, NotFoundException, InternalException { + throws BadRequestException, InsufficientPrivilegesException, NotFoundException, InternalException, NotImplementedException { return idsBean.prepareData(sessionId, investigationIds, datasetIds, datafileIds, compress, zip, request.getRemoteAddr()); } diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java b/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java index a468eeca..9183bdac 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionFactory.java @@ -62,7 +62,7 @@ public static DataSelectionFactory getInstance() throws InternalException { return instance; } - protected static DataSelectionV3Base get(String userSessionId, String investigationIds, String datasetIds, String datafileIds, RequestType requestType) + public static DataSelectionV3Base get(String userSessionId, String investigationIds, String datasetIds, String datafileIds, RequestType requestType) throws InternalException, BadRequestException, NotFoundException, InsufficientPrivilegesException, NotImplementedException { return DataSelectionFactory.getInstance().getSelection(userSessionId, investigationIds, datasetIds, datafileIds, requestType); @@ -362,7 +362,7 @@ private void createRequestTypeToReturnsMapping() throws InternalException { //this.requestTypeToReturnsMapping.put(RequestType.DELETE, Returns.DATASETS_AND_DATAFILES); this.requestTypeToReturnsMapping.put(RequestType.GETDATAFILEIDS, Returns.DATAFILES); this.requestTypeToReturnsMapping.put(RequestType.GETSIZE, Returns.DATASETS_AND_DATAFILES); - //this.requestTypeToReturnsMapping.put(RequestType.PREPAREDATA, Returns.DATASETS_AND_DATAFILES); + this.requestTypeToReturnsMapping.put(RequestType.PREPAREDATA, Returns.DATASETS_AND_DATAFILES); //this.requestTypeToReturnsMapping.put(RequestType.RESET, Returns.DATASETS_AND_DATAFILES); //this.requestTypeToReturnsMapping.put(RequestType.WRITE, Returns.DATASETS_AND_DATAFILES); this.requestTypeToReturnsMapping.put(RequestType.GETDATA, Returns.DATASETS_AND_DATAFILES); diff --git a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java index 4de79b8e..08297397 100644 --- a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java +++ b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java @@ -4,5 +4,5 @@ * This enum contains all defined types of requests to this server */ public enum RequestType { - GETDATA, ARCHIVE, GETICATURL, GETDATAFILEIDS, GETSERVICESTATUS, GETSIZE, GETSTATUS, ISPREPARED, ISREADONLY, ISTWOLEVEL + GETDATA, ARCHIVE, GETICATURL, GETDATAFILEIDS, GETSERVICESTATUS, GETSIZE, GETSTATUS, ISPREPARED, ISREADONLY, ISTWOLEVEL, PREPAREDATA } \ No newline at end of file diff --git a/src/test/java/org/icatproject/ids/PreparePackingTest.java b/src/test/java/org/icatproject/ids/PreparePackingTest.java index e92a6bf8..679bfbd6 100644 --- a/src/test/java/org/icatproject/ids/PreparePackingTest.java +++ b/src/test/java/org/icatproject/ids/PreparePackingTest.java @@ -18,6 +18,7 @@ import static org.junit.Assert.fail; import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataInfoBase; import org.icatproject.ids.v3.models.DataSetInfo; import org.junit.Test; @@ -27,17 +28,17 @@ public class PreparePackingTest { public void packAndUnpack() throws Exception { boolean zip = true; boolean compress = false; - Map dsInfos = new HashMap<>(); - Set dfInfos = new HashSet<>(); + Map dsInfos = new HashMap<>(); + Map dfInfos = new HashMap<>(); Set emptyDatasets = new HashSet<>(); long dsid1 = 17L; long dsid2 = 18L; long invId = 15L; long facilityId = 45L; - dfInfos.add(new DataFileInfo(5L, "dfName", "dfLocation", "createId", "modId", dsid1)); + dfInfos.put(5L, new DataFileInfo(5L, "dfName", "dfLocation", "createId", "modId", dsid1)); - dfInfos.add(new DataFileInfo(51L, "dfName2", null, "createId", "modId", dsid1)); + dfInfos.put(51L, new DataFileInfo(51L, "dfName2", null, "createId", "modId", dsid1)); dsInfos.put(dsid1, new DataSetInfo(dsid1, "dsName", "dsLocation", invId, "invName", "visitId", facilityId, "facilityName")); From dfced29c90c05defc05666ed675dec49f533e6dc Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Thu, 29 Feb 2024 16:00:00 +0100 Subject: [PATCH 25/92] PrepareDataHandler #1 --- .../java/org/icatproject/ids/IdsBean.java | 79 +++++++++---------- .../java/org/icatproject/ids/IdsService.java | 14 +++- .../ids/v3/DataSelectionV3Base.java | 35 +++++++- .../ids/integration/twodf/MiscTest.java | 5 +- .../util/client/TestingClient.java | 14 ++++ 5 files changed, 102 insertions(+), 45 deletions(-) diff --git a/src/main/java/org/icatproject/ids/IdsBean.java b/src/main/java/org/icatproject/ids/IdsBean.java index b841ad61..273dc08c 100644 --- a/src/main/java/org/icatproject/ids/IdsBean.java +++ b/src/main/java/org/icatproject/ids/IdsBean.java @@ -72,6 +72,7 @@ import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.v3.DataSelectionFactory; import org.icatproject.ids.v3.DataSelectionV3Base; +import org.icatproject.ids.v3.ServiceProvider; import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; import org.icatproject.ids.v3.enums.CallType; import org.icatproject.ids.v3.enums.DeferredOp; @@ -79,6 +80,7 @@ import org.icatproject.ids.v3.models.DataFileInfo; import org.icatproject.ids.v3.models.DataInfoBase; import org.icatproject.ids.v3.models.DataSetInfo; +import org.icatproject.ids.v3.models.ValueContainer; import org.icatproject.utils.IcatSecurity; @Stateless @@ -124,42 +126,25 @@ public Void call() throws Exception { } - public class RestoreDfTask implements Callable { + public class RestoreDataInfoTask implements Callable { + private Collection dataInfos; + private DataSelectionV3Base dataSelection; - private Collection dfInfos; - - public RestoreDfTask(Collection dfInfos) { - this.dfInfos = dfInfos; + public RestoreDataInfoTask(Collection dataInfos, DataSelectionV3Base dataSelection) { + this.dataInfos = dataInfos; + this.dataSelection = dataSelection; } @Override public Void call() throws Exception { - for (DataInfoBase dfInfo : dfInfos) { - DataSelection.restoreIfOffline((DataFileInfo)dfInfo); + for (DataInfoBase dfInfo : dataInfos) { + dataSelection.restoreIfOffline(dfInfo); } return null; } } - public class RestoreDsTask implements Callable { - private Collection dsInfos; - private Set emptyDs; - - public RestoreDsTask(Collection dsInfos, Set emptyDs) { - this.dsInfos = dsInfos; - this.emptyDs = emptyDs; - } - - @Override - public Void call() throws Exception { - for (DataInfoBase dsInfo : dsInfos) { - DataSelection.restoreIfOffline((DataSetInfo)dsInfo, emptyDs); - } - return null; - } - } - private static Boolean inited = false; private static String key; @@ -719,11 +704,19 @@ public boolean isTwoLevel(String ip) { return twoLevel; } - public String prepareData(String sessionId, String investigationIds, String datasetIds, String datafileIds, - boolean compress, boolean zip, String ip) + public String prepareData(HashMap parameters) throws BadRequestException, InternalException, InsufficientPrivilegesException, NotFoundException, NotImplementedException { long start = System.currentTimeMillis(); + var serviceProvider = ServiceProvider.getInstance(); + + String sessionId = parameters.get("sessionId").getString(); + String investigationIds = parameters.get("investigationIds").getString(); + String datasetIds = parameters.get("datasetIds").getString(); + String datafileIds = parameters.get("datafileIds").getString(); + boolean compress = parameters.get("compress").getBool(); + boolean zip = parameters.get("zip").getBool(); + String ip = parameters.get("ip").getString(); // Log and validate logger.info("New webservice request: prepareData " + "investigationIds='" + investigationIds + "' " @@ -742,18 +735,22 @@ public String prepareData(String sessionId, String investigationIds, String data Set emptyDs = dataSelection.getEmptyDatasets(); Map dfInfos = dataSelection.getDfInfo(); - if (storageUnit == StorageUnit.DATASET) { - for (DataInfoBase dsInfo : dsInfos.values()) { - fsm.recordSuccess(dsInfo.getId()); - } - threadPool.submit(new RestoreDsTask(dsInfos.values(), emptyDs)); + dataSelection.restoreDataInfos(); - } else if (storageUnit == StorageUnit.DATAFILE) { - for (DataInfoBase dfInfo : dfInfos.values()) { - fsm.recordSuccess(dfInfo.getId()); - } - threadPool.submit(new RestoreDfTask(dfInfos.values())); - } + // if (storageUnit == StorageUnit.DATASET) { + // logger.info("#### prepareData: 2" ); + // for (DataInfoBase dsInfo : dataInfos) { + // serviceProvider.getFsm().recordSuccess(dsInfo.getId()); + // } + // dataSelection.threadPool.submit(new RestoreDataInfoTask(dsInfos.values(), dataSelection)); + + // } else if (storageUnit == StorageUnit.DATAFILE) { + // logger.info("#### prepareData: 3" ); + // for (DataInfoBase dfInfo : dataInfos) { + // serviceProvider.getFsm().recordSuccess(dfInfo.getId()); + // } + // dataSelection.threadPool.submit(new RestoreDataInfoTask(dfInfos.values(), dataSelection)); + // } if (dataSelection.mustZip()) { zip = true; @@ -768,17 +765,17 @@ public String prepareData(String sessionId, String investigationIds, String data logger.debug("preparedId is " + preparedId); - if (logSet.contains(CallType.PREPARE)) { + if (serviceProvider.getLogSet().contains(CallType.PREPARE)) { try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { - gen.write("userName", icat.getUserName(sessionId)); + gen.write("userName", serviceProvider.getIcat().getUserName(sessionId)); addIds(gen, investigationIds, datasetIds, datafileIds); gen.write("preparedId", preparedId); gen.writeEnd(); } String body = baos.toString(); - transmitter.processMessage("prepareData", ip, body, start); + serviceProvider.getTransmitter().processMessage("prepareData", ip, body, start); } catch (IcatException_Exception e) { logger.error("Failed to prepare jms message " + e.getClass() + " " + e.getMessage()); } diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index 5f946703..b38d62f9 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -542,8 +542,18 @@ public String prepareData(@Context HttpServletRequest request, @FormParam("sessi @FormParam("datafileIds") String datafileIds, @FormParam("compress") boolean compress, @FormParam("zip") boolean zip) throws BadRequestException, InsufficientPrivilegesException, NotFoundException, InternalException, NotImplementedException { - return idsBean.prepareData(sessionId, investigationIds, datasetIds, datafileIds, compress, zip, - request.getRemoteAddr()); + + + var parameters = new HashMap(); + parameters.put("sessionId", new ValueContainer(sessionId)); + parameters.put("investigationIds", new ValueContainer(investigationIds)); + parameters.put("datasetIds", new ValueContainer(datasetIds)); + parameters.put("datafileIds", new ValueContainer(datafileIds)); + parameters.put("compress", new ValueContainer(compress)); + parameters.put("zip", new ValueContainer(zip)); + parameters.put("ip", new ValueContainer(request.getRemoteAddr())); + + return idsBean.prepareData(parameters); } /** diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java b/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java index c521c557..ad04148e 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java @@ -15,7 +15,9 @@ import java.util.concurrent.Future; import java.util.concurrent.locks.ReentrantLock; +import org.icatproject.ids.IdsBean.RestoreDataInfoTask; import org.icatproject.ids.Status; +import org.icatproject.ids.StorageUnit; import org.icatproject.ids.exceptions.BadRequestException; import org.icatproject.ids.exceptions.DataNotOnlineException; import org.icatproject.ids.exceptions.InternalException; @@ -38,7 +40,7 @@ public abstract class DataSelectionV3Base { protected List dfids; protected RequestType requestType; protected HashMap requestTypeToDeferredOpMapping; - protected ExecutorService threadPool; + public ExecutorService threadPool; private Map preparedStatusMap = new ConcurrentHashMap<>(); @@ -191,6 +193,18 @@ public void checkOnline()throws InternalException, DataNotOnlineException { } } + + public void restoreDataInfos() { + + var dataInfos = this.getPrimaryDataInfos().values(); + if(!dataInfos.isEmpty()) { + for (DataInfoBase dataInfo : dataInfos) { + ServiceProvider.getInstance().getFsm().recordSuccess(dataInfo.getId()); + } + this.threadPool.submit(new RestoreDataInfoTask(dataInfos, this)); + } + } + protected boolean areDataInfosPrepared(String preparedId) throws InternalException { boolean prepared = true; var serviceProvider = ServiceProvider.getInstance(); @@ -247,4 +261,23 @@ public Void call() throws Exception { } + public class RestoreDataInfoTask implements Callable { + private Collection dataInfos; + private DataSelectionV3Base dataSelection; + + public RestoreDataInfoTask(Collection dataInfos, DataSelectionV3Base dataSelection) { + this.dataInfos = dataInfos; + this.dataSelection = dataSelection; + } + + @Override + public Void call() throws Exception { + for (DataInfoBase dfInfo : dataInfos) { + dataSelection.restoreIfOffline(dfInfo); + } + return null; + } + + } + } \ No newline at end of file diff --git a/src/test/java/org/icatproject/ids/integration/twodf/MiscTest.java b/src/test/java/org/icatproject/ids/integration/twodf/MiscTest.java index 482ad9d7..011de0ad 100644 --- a/src/test/java/org/icatproject/ids/integration/twodf/MiscTest.java +++ b/src/test/java/org/icatproject/ids/integration/twodf/MiscTest.java @@ -43,7 +43,10 @@ public void correctBehaviourNoOffsetTest() throws Exception { String preparedId = testingClient.prepareData(sessionId, new DataSelection().addDatafile(datafileIds.get(0)), Flag.NONE, 200); - assertFalse(testingClient.getServiceStatus(sessionId, 200).getOpItems().isEmpty()); + var status = testingClient.getServiceStatus(sessionId, 200); + //System.out.println("### twodf.MiscTest.correctBehaviourNoOffsetTest - status: " + status.toString()); + + assertFalse(status.getOpItems().isEmpty()); while (!testingClient.isPrepared(preparedId, 200)) { Thread.sleep(1000); diff --git a/src/test/java/org/icatproject/ids/integration/util/client/TestingClient.java b/src/test/java/org/icatproject/ids/integration/util/client/TestingClient.java index fa5d6d28..11f5c375 100644 --- a/src/test/java/org/icatproject/ids/integration/util/client/TestingClient.java +++ b/src/test/java/org/icatproject/ids/integration/util/client/TestingClient.java @@ -101,6 +101,20 @@ void storeFailure(Long id) { failures.add(id); } + @Override + public String toString() { + String result = "lockCount: " + this.lockCount + ", "; + result += "lockedIDs: " + lockedDs.size() + "\n"; + for(long lockedId : lockedDs) result += "\t" + lockedId + "\n"; + result += "opItems: " + opItems.size() + "\n"; + for(String key : opItems.keySet()) result += "\t" + key + ": " + opItems.get(key) + "\n"; + result += "failures: " + failures.size() + "\n"; + for(long fail : failures) result += " " + fail; + result += "\n"; + + return result; + } + } public enum Status { From e17e54ddefe9015c732997dc06989ff383ab7f7b Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Fri, 1 Mar 2024 09:33:26 +0100 Subject: [PATCH 26/92] Weird behavior in IdsService.prepareData() --- .../java/org/icatproject/ids/IdsBean.java | 81 +------------- .../java/org/icatproject/ids/IdsService.java | 10 +- .../ids/v3/RequestHandlerBase.java | 54 +++++++++ .../ids/v3/RequestHandlerService.java | 5 +- .../ids/v3/handlers/PrepareDataHandler.java | 105 ++++++++++++++++++ 5 files changed, 175 insertions(+), 80 deletions(-) create mode 100644 src/main/java/org/icatproject/ids/v3/handlers/PrepareDataHandler.java diff --git a/src/main/java/org/icatproject/ids/IdsBean.java b/src/main/java/org/icatproject/ids/IdsBean.java index 273dc08c..a8dda5c0 100644 --- a/src/main/java/org/icatproject/ids/IdsBean.java +++ b/src/main/java/org/icatproject/ids/IdsBean.java @@ -72,6 +72,7 @@ import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.v3.DataSelectionFactory; import org.icatproject.ids.v3.DataSelectionV3Base; +import org.icatproject.ids.v3.RequestHandlerService; import org.icatproject.ids.v3.ServiceProvider; import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; import org.icatproject.ids.v3.enums.CallType; @@ -704,84 +705,10 @@ public boolean isTwoLevel(String ip) { return twoLevel; } - public String prepareData(HashMap parameters) - throws BadRequestException, InternalException, InsufficientPrivilegesException, NotFoundException, NotImplementedException { + public String prepareData(HashMap parameters, RequestHandlerService requestService) + throws BadRequestException, InternalException, InsufficientPrivilegesException, NotFoundException, NotImplementedException, DataNotOnlineException { - long start = System.currentTimeMillis(); - var serviceProvider = ServiceProvider.getInstance(); - - String sessionId = parameters.get("sessionId").getString(); - String investigationIds = parameters.get("investigationIds").getString(); - String datasetIds = parameters.get("datasetIds").getString(); - String datafileIds = parameters.get("datafileIds").getString(); - boolean compress = parameters.get("compress").getBool(); - boolean zip = parameters.get("zip").getBool(); - String ip = parameters.get("ip").getString(); - - // Log and validate - logger.info("New webservice request: prepareData " + "investigationIds='" + investigationIds + "' " - + "datasetIds='" + datasetIds + "' " + "datafileIds='" + datafileIds + "' " + "compress='" + compress - + "' " + "zip='" + zip + "'"); - - validateUUID("sessionId", sessionId); - - final DataSelectionV3Base dataSelection = DataSelectionFactory.get(sessionId, - investigationIds, datasetIds, datafileIds, RequestType.PREPAREDATA); - - // Do it - String preparedId = UUID.randomUUID().toString(); - - Map dsInfos = dataSelection.getDsInfo(); - Set emptyDs = dataSelection.getEmptyDatasets(); - Map dfInfos = dataSelection.getDfInfo(); - - dataSelection.restoreDataInfos(); - - // if (storageUnit == StorageUnit.DATASET) { - // logger.info("#### prepareData: 2" ); - // for (DataInfoBase dsInfo : dataInfos) { - // serviceProvider.getFsm().recordSuccess(dsInfo.getId()); - // } - // dataSelection.threadPool.submit(new RestoreDataInfoTask(dsInfos.values(), dataSelection)); - - // } else if (storageUnit == StorageUnit.DATAFILE) { - // logger.info("#### prepareData: 3" ); - // for (DataInfoBase dfInfo : dataInfos) { - // serviceProvider.getFsm().recordSuccess(dfInfo.getId()); - // } - // dataSelection.threadPool.submit(new RestoreDataInfoTask(dfInfos.values(), dataSelection)); - // } - - if (dataSelection.mustZip()) { - zip = true; - } - - logger.debug("Writing to " + preparedDir.resolve(preparedId)); - try (OutputStream stream = new BufferedOutputStream(Files.newOutputStream(preparedDir.resolve(preparedId)))) { - pack(stream, zip, compress, dsInfos, dfInfos, emptyDs); - } catch (IOException e) { - throw new InternalException(e.getClass() + " " + e.getMessage()); - } - - logger.debug("preparedId is " + preparedId); - - if (serviceProvider.getLogSet().contains(CallType.PREPARE)) { - try { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { - gen.write("userName", serviceProvider.getIcat().getUserName(sessionId)); - addIds(gen, investigationIds, datasetIds, datafileIds); - gen.write("preparedId", preparedId); - gen.writeEnd(); - } - String body = baos.toString(); - serviceProvider.getTransmitter().processMessage("prepareData", ip, body, start); - } catch (IcatException_Exception e) { - logger.error("Failed to prepare jms message " + e.getClass() + " " + e.getMessage()); - } - } - - return preparedId; + return requestService.handle(RequestType.PREPAREDATA, parameters).getString(); } public Response put(InputStream body, String sessionId, String name, String datafileFormatIdString, diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index b38d62f9..f5b1161d 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -530,6 +530,7 @@ public String ping() { * @throws NotFoundException * @throws InternalException * @throws NotImplementedException + * @throws DataNotOnlineException * @summary prepareData * @statuscode 200 To indicate success */ @@ -541,7 +542,7 @@ public String prepareData(@Context HttpServletRequest request, @FormParam("sessi @FormParam("investigationIds") String investigationIds, @FormParam("datasetIds") String datasetIds, @FormParam("datafileIds") String datafileIds, @FormParam("compress") boolean compress, @FormParam("zip") boolean zip) - throws BadRequestException, InsufficientPrivilegesException, NotFoundException, InternalException, NotImplementedException { + throws BadRequestException, InsufficientPrivilegesException, NotFoundException, InternalException, NotImplementedException, DataNotOnlineException { var parameters = new HashMap(); @@ -553,7 +554,12 @@ public String prepareData(@Context HttpServletRequest request, @FormParam("sessi parameters.put("zip", new ValueContainer(zip)); parameters.put("ip", new ValueContainer(request.getRemoteAddr())); - return idsBean.prepareData(parameters); + // TODO: weird - calling idsBean.prepareData() works and tests succeeding. It just calls requestService.handle(). + // But calling requestService.handle() directly here, some tests will fail with {"code":"InternalException","message":"Restore failed"} + // Maybe because it is a Post Request @POST + + //return this.requestService.handle(RequestType.PREPAREDATA, parameters).getString(); + return idsBean.prepareData(parameters, this.requestService); } /** diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java index 2044b645..f84d1b66 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerBase.java @@ -1,11 +1,13 @@ package org.icatproject.ids.v3; import java.io.InputStream; +import java.io.OutputStream; import java.nio.file.Path; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; @@ -110,6 +112,58 @@ protected static void validateUUID(String thing, String id) throws BadRequestExc throw new BadRequestException("The " + thing + " parameter '" + id + "' is not a valid UUID"); } + protected static void pack(OutputStream stream, boolean zip, boolean compress, Map dsInfos, + Map dfInfos, Set emptyDatasets) { + JsonGenerator gen = Json.createGenerator(stream); + gen.writeStartObject(); + gen.write("zip", zip); + gen.write("compress", compress); + + gen.writeStartArray("dsInfo"); + for (DataInfoBase dataInfo : dsInfos.values()) { + var dsInfo = (DataSetInfo)dataInfo; + logger.debug("dsInfo " + dsInfo); + gen.writeStartObject().write("dsId", dsInfo.getId()) + + .write("dsName", dsInfo.getDsName()).write("facilityId", dsInfo.getFacilityId()) + .write("facilityName", dsInfo.getFacilityName()).write("invId", dsInfo.getInvId()) + .write("invName", dsInfo.getInvName()).write("visitId", dsInfo.getVisitId()); + if (dsInfo.getDsLocation() != null) { + gen.write("dsLocation", dsInfo.getDsLocation()); + } else { + gen.writeNull("dsLocation"); + } + gen.writeEnd(); + } + gen.writeEnd(); + + gen.writeStartArray("dfInfo"); + for (DataInfoBase dataInfo : dfInfos.values()) { + var dfInfo = (DataFileInfo)dataInfo; + DataInfoBase dsInfo = dsInfos.get(dfInfo.getDsId()); + gen.writeStartObject().write("dsId", dsInfo.getId()).write("dfId", dfInfo.getId()) + .write("dfName", dfInfo.getDfName()).write("createId", dfInfo.getCreateId()) + .write("modId", dfInfo.getModId()); + if (dfInfo.getDfLocation() != null) { + gen.write("dfLocation", dfInfo.getDfLocation()); + } else { + gen.writeNull("dfLocation"); + } + gen.writeEnd(); + + } + gen.writeEnd(); + + gen.writeStartArray("emptyDs"); + for (Long emptyDs : emptyDatasets) { + gen.write(emptyDs); + } + gen.writeEnd(); + + gen.writeEnd().close(); + + } + protected static PreparedV3 unpack(InputStream stream) throws InternalException { PreparedV3 prepared = new PreparedV3(); JsonObject pd; diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java index f8ab1da2..1c1c2a3b 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java @@ -27,6 +27,7 @@ import org.icatproject.ids.v3.handlers.IsPreparedHandler; import org.icatproject.ids.v3.handlers.IsReadOnlyHandler; import org.icatproject.ids.v3.handlers.IsTwoLevelHandler; +import org.icatproject.ids.v3.handlers.PrepareDataHandler; import org.icatproject.ids.v3.models.ValueContainer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -63,6 +64,7 @@ public RequestHandlerService() { this.registerHandler(new IsPreparedHandler()); this.registerHandler(new IsReadOnlyHandler()); this.registerHandler(new IsTwoLevelHandler()); + this.registerHandler(new PrepareDataHandler()); } @@ -76,8 +78,9 @@ private void registerHandler(RequestHandlerBase requestHandler) { public ValueContainer handle(RequestType requestType, HashMap parameters) throws InternalException, BadRequestException, InsufficientPrivilegesException, NotFoundException, DataNotOnlineException, NotImplementedException { - if(this.handlers.containsKey(requestType)) + if(this.handlers.containsKey(requestType)) { return this.handlers.get(requestType).handle(parameters); + } else throw new InternalException("No handler found for RequestType " + requestType + " and StorageUnit " + this.propertyHandler.getStorageUnit() + " in RequestHandlerService. Do you forgot to register?"); } diff --git a/src/main/java/org/icatproject/ids/v3/handlers/PrepareDataHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/PrepareDataHandler.java new file mode 100644 index 00000000..7a0d0e30 --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/handlers/PrepareDataHandler.java @@ -0,0 +1,105 @@ +package org.icatproject.ids.v3.handlers; + +import java.io.BufferedOutputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.file.Files; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.UUID; + +import org.icatproject.IcatException_Exception; +import org.icatproject.ids.StorageUnit; +import org.icatproject.ids.exceptions.BadRequestException; +import org.icatproject.ids.exceptions.DataNotOnlineException; +import org.icatproject.ids.exceptions.InsufficientPrivilegesException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.exceptions.NotFoundException; +import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.v3.DataSelectionV3Base; +import org.icatproject.ids.v3.RequestHandlerBase; +import org.icatproject.ids.v3.ServiceProvider; +import org.icatproject.ids.v3.enums.CallType; +import org.icatproject.ids.v3.enums.RequestType; +import org.icatproject.ids.v3.models.DataInfoBase; +import org.icatproject.ids.v3.models.ValueContainer; + +import jakarta.json.Json; +import jakarta.json.stream.JsonGenerator; + +public class PrepareDataHandler extends RequestHandlerBase { + + public PrepareDataHandler() { + super(new StorageUnit[]{StorageUnit.DATAFILE, StorageUnit.DATASET, null}, RequestType.PREPAREDATA); + } + + @Override + public ValueContainer handle(HashMap parameters) + throws BadRequestException, InternalException, InsufficientPrivilegesException, NotFoundException, + DataNotOnlineException, NotImplementedException { + + + long start = System.currentTimeMillis(); + var serviceProvider = ServiceProvider.getInstance(); + + String sessionId = parameters.get("sessionId").getString(); + String investigationIds = parameters.get("investigationIds").getString(); + String datasetIds = parameters.get("datasetIds").getString(); + String datafileIds = parameters.get("datafileIds").getString(); + boolean compress = parameters.get("compress").getBool(); + boolean zip = parameters.get("zip").getBool(); + String ip = parameters.get("ip").getString(); + + // Log and validate + logger.info("New webservice request: prepareData " + "investigationIds='" + investigationIds + "' " + + "datasetIds='" + datasetIds + "' " + "datafileIds='" + datafileIds + "' " + "compress='" + compress + + "' " + "zip='" + zip + "'"); + + validateUUID("sessionId", sessionId); + + final DataSelectionV3Base dataSelection = this.getDataSelection(sessionId, + investigationIds, datasetIds, datafileIds); + + // Do it + String preparedId = UUID.randomUUID().toString(); + + Map dsInfos = dataSelection.getDsInfo(); + Set emptyDs = dataSelection.getEmptyDatasets(); + Map dfInfos = dataSelection.getDfInfo(); + + dataSelection.restoreDataInfos(); + + if (dataSelection.mustZip()) { + zip = true; + } + + logger.debug("Writing to " + preparedDir.resolve(preparedId)); + try (OutputStream stream = new BufferedOutputStream(Files.newOutputStream(preparedDir.resolve(preparedId)))) { + pack(stream, zip, compress, dsInfos, dfInfos, emptyDs); + } catch (IOException e) { + throw new InternalException(e.getClass() + " " + e.getMessage()); + } + + logger.debug("preparedId is " + preparedId); + + if (serviceProvider.getLogSet().contains(CallType.PREPARE)) { + try { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) { + gen.write("userName", serviceProvider.getIcat().getUserName(sessionId)); + addIds(gen, investigationIds, datasetIds, datafileIds); + gen.write("preparedId", preparedId); + gen.writeEnd(); + } + String body = baos.toString(); + serviceProvider.getTransmitter().processMessage("prepareData", ip, body, start); + } catch (IcatException_Exception e) { + logger.error("Failed to prepare jms message " + e.getClass() + " " + e.getMessage()); + } + } + + return new ValueContainer(preparedId); + } +} \ No newline at end of file From 320aa3455b3ed53b93f41fb143b5c282fa8c6b1b Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Mon, 4 Mar 2024 11:53:57 +0100 Subject: [PATCH 27/92] Fix for RequestHandlerServer and made it an EJB --- .../java/org/icatproject/ids/IdsBean.java | 2 - .../java/org/icatproject/ids/IdsService.java | 10 +-- .../ids/v3/DataSelectionV3Base.java | 2 - .../ids/v3/RequestHandlerService.java | 71 +++++++++---------- 4 files changed, 40 insertions(+), 45 deletions(-) diff --git a/src/main/java/org/icatproject/ids/IdsBean.java b/src/main/java/org/icatproject/ids/IdsBean.java index a8dda5c0..b648737a 100644 --- a/src/main/java/org/icatproject/ids/IdsBean.java +++ b/src/main/java/org/icatproject/ids/IdsBean.java @@ -70,10 +70,8 @@ import org.icatproject.ids.plugin.AlreadyLockedException; import org.icatproject.ids.plugin.ArchiveStorageInterface; import org.icatproject.ids.plugin.MainStorageInterface; -import org.icatproject.ids.v3.DataSelectionFactory; import org.icatproject.ids.v3.DataSelectionV3Base; import org.icatproject.ids.v3.RequestHandlerService; -import org.icatproject.ids.v3.ServiceProvider; import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; import org.icatproject.ids.v3.enums.CallType; import org.icatproject.ids.v3.enums.DeferredOp; diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index f5b1161d..419183ee 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -39,6 +39,7 @@ import org.icatproject.ids.exceptions.NotFoundException; import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.v3.RequestHandlerService; +import org.icatproject.ids.v3.ServiceProvider; import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.models.ValueContainer; @@ -61,9 +62,10 @@ public class IdsService { @EJB private IcatReader reader; - private FiniteStateMachine fsm = null; + @EJB + private RequestHandlerService requestService; - private RequestHandlerService requestService = null; + private FiniteStateMachine fsm = null; /** * Archive data specified by the investigationIds, datasetIds and @@ -399,9 +401,7 @@ private void init() { FiniteStateMachine.createInstance(reader, lockManager, PropertyHandler.getInstance().getStorageUnit()); this.fsm = FiniteStateMachine.getInstance(); this.fsm.init(); - - this.requestService = new RequestHandlerService(); - this.requestService.init(this.transmitter, this.lockManager, this.fsm, this.reader); + ServiceProvider.createInstance(transmitter, fsm, lockManager, reader); logger.info("created IdsService"); } diff --git a/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java b/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java index ad04148e..5a841493 100644 --- a/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java +++ b/src/main/java/org/icatproject/ids/v3/DataSelectionV3Base.java @@ -15,9 +15,7 @@ import java.util.concurrent.Future; import java.util.concurrent.locks.ReentrantLock; -import org.icatproject.ids.IdsBean.RestoreDataInfoTask; import org.icatproject.ids.Status; -import org.icatproject.ids.StorageUnit; import org.icatproject.ids.exceptions.BadRequestException; import org.icatproject.ids.exceptions.DataNotOnlineException; import org.icatproject.ids.exceptions.InternalException; diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java index 1c1c2a3b..d4c337d7 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java @@ -4,10 +4,7 @@ import java.nio.file.Path; import java.util.HashMap; -import org.icatproject.ids.IcatReader; -import org.icatproject.ids.LockManager; import org.icatproject.ids.PropertyHandler; -import org.icatproject.ids.Transmitter; import org.icatproject.ids.exceptions.BadRequestException; import org.icatproject.ids.exceptions.DataNotOnlineException; import org.icatproject.ids.exceptions.InsufficientPrivilegesException; @@ -15,7 +12,6 @@ import org.icatproject.ids.exceptions.NotFoundException; import org.icatproject.ids.exceptions.NotImplementedException; import org.icatproject.ids.plugin.ArchiveStorageInterface; -import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; import org.icatproject.ids.v3.enums.RequestType; import org.icatproject.ids.v3.handlers.ArchiveHandler; import org.icatproject.ids.v3.handlers.GetDataFileIdsHandler; @@ -32,7 +28,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -//TODO: rename to RequestHandlerService +import jakarta.annotation.PostConstruct; +import jakarta.ejb.Stateless; + +@Stateless public class RequestHandlerService { private HashMap handlers; @@ -48,26 +47,6 @@ public class RequestHandlerService { private UnfinishedWorkServiceBase unfinishedWorkService; - public RequestHandlerService() { - - this.propertyHandler = PropertyHandler.getInstance(); - this.unfinishedWorkService = new UnfinishedWorkServiceBase(); - - this.handlers = new HashMap(); - this.registerHandler(new GetDataHandler()); - this.registerHandler(new ArchiveHandler()); - this.registerHandler(new GetIcatUrlHandler()); - this.registerHandler(new GetDataFileIdsHandler()); - this.registerHandler(new GetServiceStatusHandler()); - this.registerHandler(new GetSizeHandler()); - this.registerHandler(new GetStatusHandler()); - this.registerHandler(new IsPreparedHandler()); - this.registerHandler(new IsReadOnlyHandler()); - this.registerHandler(new IsTwoLevelHandler()); - this.registerHandler(new PrepareDataHandler()); - } - - private void registerHandler(RequestHandlerBase requestHandler) { //use only the handlers that supports the configured StorageUnit @@ -85,12 +64,11 @@ public ValueContainer handle(RequestType requestType, HashMap(); + this.registerHandler(new GetDataHandler()); + this.registerHandler(new ArchiveHandler()); + this.registerHandler(new GetIcatUrlHandler()); + this.registerHandler(new GetDataFileIdsHandler()); + this.registerHandler(new GetServiceStatusHandler()); + this.registerHandler(new GetSizeHandler()); + this.registerHandler(new GetStatusHandler()); + this.registerHandler(new IsPreparedHandler()); + this.registerHandler(new IsReadOnlyHandler()); + this.registerHandler(new IsTwoLevelHandler()); + this.registerHandler(new PrepareDataHandler()); + logger.info("Initializing " + this.handlers.size() + " RequestHandlers..."); for(RequestHandlerBase handler : this.handlers.values()) { handler.init(); From e6b9947914935c5dfd31a3933806c8306cea5cfb Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Wed, 6 Mar 2024 09:51:19 +0100 Subject: [PATCH 28/92] Made IdsBean.key un-static because it is initialized in an un-static way. This caused problems. --- src/main/java/org/icatproject/ids/IdsBean.java | 7 ++++--- src/main/java/org/icatproject/ids/IdsService.java | 8 +------- .../org/icatproject/ids/v3/RequestHandlerService.java | 1 - 3 files changed, 5 insertions(+), 11 deletions(-) diff --git a/src/main/java/org/icatproject/ids/IdsBean.java b/src/main/java/org/icatproject/ids/IdsBean.java index b648737a..c362c280 100644 --- a/src/main/java/org/icatproject/ids/IdsBean.java +++ b/src/main/java/org/icatproject/ids/IdsBean.java @@ -1,6 +1,5 @@ package org.icatproject.ids; -import java.io.BufferedOutputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; @@ -24,7 +23,6 @@ import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; -import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; @@ -72,6 +70,7 @@ import org.icatproject.ids.plugin.MainStorageInterface; import org.icatproject.ids.v3.DataSelectionV3Base; import org.icatproject.ids.v3.RequestHandlerService; +import org.icatproject.ids.v3.ServiceProvider; import org.icatproject.ids.v3.FiniteStateMachine.FiniteStateMachine; import org.icatproject.ids.v3.enums.CallType; import org.icatproject.ids.v3.enums.DeferredOp; @@ -146,7 +145,7 @@ public Void call() throws Exception { private static Boolean inited = false; - private static String key; + private String key; private final static Logger logger = LoggerFactory.getLogger(IdsBean.class); private static String paddedPrefix; @@ -208,6 +207,8 @@ public static String getLocation(long dfid, String location) if (location == null) { throw new InternalException("location is null"); } + + var key = ServiceProvider.getInstance().getPropertyHandler().getKey(); if (key == null) { return location; } else { diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index 419183ee..d75c96bc 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -544,7 +544,6 @@ public String prepareData(@Context HttpServletRequest request, @FormParam("sessi @FormParam("zip") boolean zip) throws BadRequestException, InsufficientPrivilegesException, NotFoundException, InternalException, NotImplementedException, DataNotOnlineException { - var parameters = new HashMap(); parameters.put("sessionId", new ValueContainer(sessionId)); parameters.put("investigationIds", new ValueContainer(investigationIds)); @@ -554,12 +553,7 @@ public String prepareData(@Context HttpServletRequest request, @FormParam("sessi parameters.put("zip", new ValueContainer(zip)); parameters.put("ip", new ValueContainer(request.getRemoteAddr())); - // TODO: weird - calling idsBean.prepareData() works and tests succeeding. It just calls requestService.handle(). - // But calling requestService.handle() directly here, some tests will fail with {"code":"InternalException","message":"Restore failed"} - // Maybe because it is a Post Request @POST - - //return this.requestService.handle(RequestType.PREPAREDATA, parameters).getString(); - return idsBean.prepareData(parameters, this.requestService); + return this.requestService.handle(RequestType.PREPAREDATA, parameters).getString(); } /** diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java index d4c337d7..1f93b095 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java @@ -1,6 +1,5 @@ package org.icatproject.ids.v3; -import java.nio.file.Files; import java.nio.file.Path; import java.util.HashMap; From 58f8f5c15b8b8a0832584874539fa3416e77d53f Mon Sep 17 00:00:00 2001 From: Marcus Lewerenz Date: Wed, 6 Mar 2024 11:54:48 +0100 Subject: [PATCH 29/92] Added PutHandler - may needs a further redesign --- .../java/org/icatproject/ids/IdsService.java | 18 +- .../ids/v3/RequestHandlerService.java | 2 + .../icatproject/ids/v3/enums/RequestType.java | 13 +- .../ids/v3/enums/ValueContainerType.java | 2 +- .../ids/v3/handlers/PutHandler.java | 359 ++++++++++++++++++ .../ids/v3/models/ValueContainer.java | 23 +- 6 files changed, 412 insertions(+), 5 deletions(-) create mode 100644 src/main/java/org/icatproject/ids/v3/handlers/PutHandler.java diff --git a/src/main/java/org/icatproject/ids/IdsService.java b/src/main/java/org/icatproject/ids/IdsService.java index d75c96bc..e050bc15 100644 --- a/src/main/java/org/icatproject/ids/IdsService.java +++ b/src/main/java/org/icatproject/ids/IdsService.java @@ -591,8 +591,22 @@ public Response put(@Context HttpServletRequest request, InputStream body, @QueryParam("datafileCreateTime") String datafileCreateTime, @QueryParam("datafileModTime") String datafileModTime) throws BadRequestException, NotFoundException, InternalException, InsufficientPrivilegesException, NotImplementedException, DataNotOnlineException { - return idsBean.put(body, sessionId, name, datafileFormatId, datasetId, description, doi, datafileCreateTime, - datafileModTime, false, false, request.getRemoteAddr()); + + var parameters = new HashMap(); + parameters.put("body", new ValueContainer(body)); + parameters.put("sessionId", new ValueContainer(sessionId)); + parameters.put("name", new ValueContainer(name)); + parameters.put("datafileFormatId", new ValueContainer(datafileFormatId)); + parameters.put("datasetId", new ValueContainer(datasetId)); + parameters.put("description", new ValueContainer(description)); + parameters.put("doi", new ValueContainer(doi)); + parameters.put("datafileCreateTime", new ValueContainer(datafileCreateTime)); + parameters.put("datafileModTime", new ValueContainer(datafileModTime)); + parameters.put("wrap", new ValueContainer(false)); + parameters.put("padding", new ValueContainer(false)); + parameters.put("ip", new ValueContainer(request.getRemoteAddr())); + + return this.requestService.handle(RequestType.PUT, parameters).getResponse(); } /** diff --git a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java index 1f93b095..e4375864 100644 --- a/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java +++ b/src/main/java/org/icatproject/ids/v3/RequestHandlerService.java @@ -23,6 +23,7 @@ import org.icatproject.ids.v3.handlers.IsReadOnlyHandler; import org.icatproject.ids.v3.handlers.IsTwoLevelHandler; import org.icatproject.ids.v3.handlers.PrepareDataHandler; +import org.icatproject.ids.v3.handlers.PutHandler; import org.icatproject.ids.v3.models.ValueContainer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -132,6 +133,7 @@ private void init() { this.registerHandler(new IsReadOnlyHandler()); this.registerHandler(new IsTwoLevelHandler()); this.registerHandler(new PrepareDataHandler()); + this.registerHandler(new PutHandler()); logger.info("Initializing " + this.handlers.size() + " RequestHandlers..."); for(RequestHandlerBase handler : this.handlers.values()) { diff --git a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java index 08297397..cdfe3c4c 100644 --- a/src/main/java/org/icatproject/ids/v3/enums/RequestType.java +++ b/src/main/java/org/icatproject/ids/v3/enums/RequestType.java @@ -4,5 +4,16 @@ * This enum contains all defined types of requests to this server */ public enum RequestType { - GETDATA, ARCHIVE, GETICATURL, GETDATAFILEIDS, GETSERVICESTATUS, GETSIZE, GETSTATUS, ISPREPARED, ISREADONLY, ISTWOLEVEL, PREPAREDATA + GETDATA, + ARCHIVE, + GETICATURL, + GETDATAFILEIDS, + GETSERVICESTATUS, + GETSIZE, + GETSTATUS, + ISPREPARED, + ISREADONLY, + ISTWOLEVEL, + PREPAREDATA, + PUT } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/enums/ValueContainerType.java b/src/main/java/org/icatproject/ids/v3/enums/ValueContainerType.java index ad018925..b6fa283c 100644 --- a/src/main/java/org/icatproject/ids/v3/enums/ValueContainerType.java +++ b/src/main/java/org/icatproject/ids/v3/enums/ValueContainerType.java @@ -4,5 +4,5 @@ * This enum provides all possible values of a ValueContainer */ public enum ValueContainerType { - INVALID, VOID, INT, LONG, BOOL, STRING, REQUEST, RESPONSE + INVALID, VOID, INT, LONG, BOOL, STRING, REQUEST, RESPONSE, INPUTSTREAM } \ No newline at end of file diff --git a/src/main/java/org/icatproject/ids/v3/handlers/PutHandler.java b/src/main/java/org/icatproject/ids/v3/handlers/PutHandler.java new file mode 100644 index 00000000..d89241ea --- /dev/null +++ b/src/main/java/org/icatproject/ids/v3/handlers/PutHandler.java @@ -0,0 +1,359 @@ +package org.icatproject.ids.v3.handlers; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.HttpURLConnection; +import java.security.NoSuchAlgorithmException; +import java.util.Collections; +import java.util.GregorianCalendar; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.TreeMap; +import java.util.zip.CRC32; +import javax.xml.datatype.DatatypeFactory; + +import org.icatproject.Datafile; +import org.icatproject.DatafileFormat; +import org.icatproject.Dataset; +import org.icatproject.ICAT; +import org.icatproject.IcatExceptionType; +import org.icatproject.IcatException_Exception; +import org.icatproject.ids.CheckedWithSizeInputStream; +import org.icatproject.ids.DataSelection; +import org.icatproject.ids.IdsBean; +import org.icatproject.ids.LockManager.Lock; +import org.icatproject.ids.LockManager.LockType; +import org.icatproject.ids.StorageUnit; +import org.icatproject.ids.exceptions.BadRequestException; +import org.icatproject.ids.exceptions.DataNotOnlineException; +import org.icatproject.ids.exceptions.IdsException; +import org.icatproject.ids.exceptions.InsufficientPrivilegesException; +import org.icatproject.ids.exceptions.InternalException; +import org.icatproject.ids.exceptions.NotFoundException; +import org.icatproject.ids.exceptions.NotImplementedException; +import org.icatproject.ids.plugin.AlreadyLockedException; +import org.icatproject.ids.v3.DataSelectionV3Base; +import org.icatproject.ids.v3.RequestHandlerBase; +import org.icatproject.ids.v3.ServiceProvider; +import org.icatproject.ids.v3.enums.CallType; +import org.icatproject.ids.v3.enums.DeferredOp; +import org.icatproject.ids.v3.enums.RequestType; +import org.icatproject.ids.v3.models.DataFileInfo; +import org.icatproject.ids.v3.models.DataInfoBase; +import org.icatproject.ids.v3.models.DataSetInfo; +import org.icatproject.ids.v3.models.ValueContainer; +import org.icatproject.utils.IcatSecurity; + +import jakarta.json.Json; +import jakarta.json.stream.JsonGenerator; +import jakarta.ws.rs.core.Response; + +public class PutHandler extends RequestHandlerBase { + + private DatatypeFactory datatypeFactory; + private static String paddedPrefix; + private static final String prefix = ""; + + static { + paddedPrefix = ""; - - /** - * matches standard UUID format of 8-4-4-4-12 hexadecimal digits - */ - public static final Pattern uuidRegExp = Pattern - .compile("^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$"); - - static { - paddedPrefix = "