diff --git a/README.md b/README.md
index 8315ba5a37c5..3b36bb998934 100644
--- a/README.md
+++ b/README.md
@@ -24,7 +24,8 @@ help getting started using Gradoop.
##### Further Information (articles and talks)
-* [Graph Sampling with Distributed In-Memory Dataflow Systems, arXiv, October 2019](https://arxiv.org/pdf/1910.04493.pdf)
+* [Exploration and Analysis of Temporal Property Graphs, EDBT Demo, March 2021](https://dbs.uni-leipzig.de/file/EDBT_DEMO_Rost_2021_published.pdf)
+* [Graph Sampling with Distributed In-Memory Dataflow Systems, BTW, March 2021](https://dbs.uni-leipzig.de/file/A3-21.pdf)
* [Evolution Analysis of Large Graphs with Gradoop, ECML PKDD LEG Workshop, September 2019](https://dbs.uni-leipzig.de/file/LEGECML-PKDD_2019_paper_9.pdf)
* [Gradoop @Gridka Keynote Distributed Graph Analytics, August 2019](https://indico.scc.kit.edu/event/460/contributions/5772/attachments/2873/4171/gradoop_gridka19.pdf)
* [Temporal Graph Analysis using Gradoop, BTW 2019-Workshopband, March 2019](https://dl.gi.de/bitstream/handle/20.500.12116/21797/C2-1.pdf)
@@ -96,18 +97,18 @@ Latest weekly build (additional repository is required):
```
-In any case you also need Apache Flink (version 1.7.2):
+In any case you also need Apache Flink (version 1.9.3):
```xml
org.apache.flink
flink-java
- 1.7.2
+ 1.9.3
org.apache.flink
flink-clients_2.11
- 1.7.2
+ 1.9.3
```
@@ -182,6 +183,10 @@ Used to maintain the code style for the whole project.
## Related Repositories
+### [Gradoop Tutorial](https://github.com/dbs-leipzig/gradoop-tutorial)
+
+Gradoop Tutorial which has been shown in [BOSS20'](https://boss-workshop.github.io/boss-2020/) Workshop of VLDB 2020 international conference.
+
### [Gradoop Benchmarks](https://github.com/dbs-leipzig/gradoop-benchmarks)
This repository contains sets of Gradoop operator benchmarks designed to run on a cluster to measure
@@ -191,6 +196,15 @@ scalability and speedup of the operators.
Demo application to show the functionalities of the grouping and query operator in an interactive web UI.
+
+### [Temporal Graph Explorer](https://github.com/dbs-leipzig/temporal_graph_explorer)
+
+Gradoop Temporal Graph Explorer Demo which showcases some operators of the Temporal Property Graph Model.
+
+### [Gradoop GDL](https://github.com/dbs-leipzig/gdl)
+
+This repository contains the definition of our Temporal Graph Definition Language (Temporal-GDL).
+
### Version History
See the [Changelog](https://github.com/dbs-leipzig/gradoop/wiki/Changelog) at the Wiki pages.
diff --git a/gradoop-flink/src/test/java/org/gradoop/flink/model/impl/operators/matching/single/cypher/operators/OperatorIntegrationTest.java b/gradoop-flink/src/test/java/org/gradoop/flink/model/impl/operators/matching/single/cypher/operators/OperatorIntegrationTest.java
deleted file mode 100644
index c9a48a403258..000000000000
--- a/gradoop-flink/src/test/java/org/gradoop/flink/model/impl/operators/matching/single/cypher/operators/OperatorIntegrationTest.java
+++ /dev/null
@@ -1,268 +0,0 @@
-/*
- * Copyright © 2014 - 2021 Leipzig University (Database Research Group)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-//
-//package org.gradoop.flink.model.impl.operators.matching.single.cypher.operators.physical;
-//
-//import com.google.common.collect.Lists;
-//import com.google.common.collect.Sets;
-//import org.apache.flink.api.common.operators.base.JoinOperatorBase;
-//import org.apache.flink.api.java.DataSet;
-//import org.gradoop.flink.model.GradoopFlinkTestBase;
-//import org.gradoop.flink.model.impl.LogicalGraph;
-//import org.gradoop.flink.model.impl.operators.matching.common.MatchStrategy;
-//import org.gradoop.flink.model.impl.operators.matching.common.query.QueryHandler;
-//import org.gradoop.flink.model.impl.operators.matching.common.query.predicates.CNF;
-//import Embedding;
-//import org.gradoop.flink.model.impl.operators.matching.single.cypher.operators.*;
-//import org.gradoop.flink.model.impl.operators.matching.single.cypher.operators.cartesian
-// .CartesianProduct;
-//import org.gradoop.flink.model.impl.operators.matching.single.cypher.operators.expand.Expand;
-//import org.gradoop.flink.util.FlinkAsciiGraphLoader;
-//import org.junit.Before;
-//import org.junit.Ignore;
-//import org.junit.Test;
-//
-//import java.util.ArrayList;
-//import java.util.HashMap;
-//
-//import static org.gradoop.flink.model.impl.operators.matching.single.cypher.utils.ExpandDirection.*;
-//
-//@Ignore
-//public class OperatorIntegrationTest extends GradoopFlinkTestBase {
-//
-// private FlinkAsciiGraphLoader testGraphs;
-//
-// @Before
-// public void loadGraph() throws Exception {
-// this.testGraphs = getLoaderFromFile(
-// OperatorIntegrationTest.class.getResource("/data/gdl/physicalOperators.gdl").getFile()
-// );
-// }
-//
-// /**
-// * MATCH (p1:Person {name: "Alice"})-[r1:worked_at]->(o)
-// * WHERE r1.active=1
-// * RETURN *
-// */
-// @Test
-// public void simplePredicateQueryTest() throws Exception {
-// QueryHandler query = new QueryHandler(
-// "MATCH (p1:Person {name: \"Alice\"})-[r1:worked_at]->(o) " +
-// "WHERE r1.active=1"
-// );
-//
-// LogicalGraph graph = testGraphs.getLogicalGraphByVariable("g");
-//
-// CNF predicates = query.getPredicates();
-// CNF p1Predicate = predicates.getSubCNF(Sets.newHashSet("p1"));
-// CNF r1Predicate = predicates.getSubCNF(Sets.newHashSet("r1"));
-//
-// DataSet p1 =
-// new FilterVertices(graph.getVertices(), p1Predicate).evaluate();
-// DataSet r1 =
-// new FilterEdges(graph.getEdges(), r1Predicate).evaluate();
-//
-// //DataSet res = new JoinEmbeddings(p1,r1,0, ExpandDirection.OUT).evaluate();
-//
-// //System.out.println("res.collect() = " + res.collect());
-// }
-//
-// /**
-// * MATCH (a:Person)-[]->(b:Person)
-// * WHERE a.age > b.age OR a.name=b.name
-// * RETURN *
-// */
-// @Test
-// public void CrossPredicateTest() throws Exception {
-// LogicalGraph graph = loadGraph("","g");
-//
-// QueryHandler query = new QueryHandler(
-// "MATCH (a:Person)-[]->(b:Person)" +
-// "WHERE a.age > b.age OR a.name=b.name"
-// );
-//
-// CNF predicates = query.getPredicates();
-//
-// DataSet vertices =
-// new FilterAndProjectVertices(
-// graph.getVertices(),
-// predicates.getSubCNF(Sets.newHashSet("a")),
-// Lists.newArrayList("age","name")
-// ).evaluate();
-//
-// DataSet edges = new ProjectEdges(graph.getEdges()).evaluate();
-//
-// DataSet aExpanded = new ExpandOne(vertices,edges,0, OUT).evaluate();
-//
-// DataSet ab = new JoinEmbeddings(aExpanded,vertices, 2, 0).evaluate();
-//
-// HashMap mapping = new HashMap<>();
-// mapping.put("a",0);
-// mapping.put("b",2);
-// DataSet res = new FilterEmbeddings(
-// ab,
-// predicates.getSubCNF(Sets.newHashSet("a","b")),
-// mapping
-// ).evaluate();
-//
-// System.out.println("res.collect() = " + res.collect());
-// }
-//
-// /**
-// * MATCH (n)-[]->(m)-[]->(o)
-// * RETURN *
-// */
-// @Test
-// public void homomorphismTest() throws Exception {
-// LogicalGraph graph = loadGraph("","g");
-//
-// DataSet vertices =
-// new ProjectVertices(graph.getVertices()).evaluate();
-// DataSet edges =
-// new ProjectEdges(graph.getEdges()).evaluate();
-//
-// DataSet a = new ExpandOne(
-// vertices, edges, 0,
-// OUT, MatchStrategy.HOMOMORPHISM, JoinOperatorBase.JoinHint.OPTIMIZER_CHOOSES
-// ).evaluate();
-//
-// DataSet res = new ExpandOne(a, edges, 2,
-// OUT, MatchStrategy.HOMOMORPHISM, JoinOperatorBase.JoinHint.OPTIMIZER_CHOOSES
-// ).evaluate();
-//
-// System.out.println("res.collect() = " + res.collect());
-// }
-//
-// /**
-// * MATCH (n)-[]->(m)-[]->(o)
-// * RETURN *
-// */
-// @Test
-// public void isomorphismTest() throws Exception {
-// LogicalGraph graph = loadGraph("","g");
-//
-// DataSet vertices =
-// new ProjectVertices(graph.getVertices()).evaluate();
-// DataSet edges =
-// new ProjectEdges(graph.getEdges()).evaluate();
-//
-// DataSet a = new ExpandOne(
-// vertices, edges, 0,
-// OUT, MatchStrategy.ISOMORPHISM, JoinOperatorBase.JoinHint.OPTIMIZER_CHOOSES
-// ).evaluate();
-//
-// DataSet res = new ExpandOne(a, edges, 2,
-// OUT, MatchStrategy.ISOMORPHISM, JoinOperatorBase.JoinHint.OPTIMIZER_CHOOSES
-// ).evaluate();
-//
-// System.out.println("res.collect() = " + res.collect());
-// }
-//
-// /**
-// * MATCH (n)-[*2..3]->(m)
-// * RETURN *
-// */
-// @Test
-// public void variableLengthPathQueryTest() throws Exception{
-// LogicalGraph graph = loadGraph("","g");
-//
-// DataSet n = new ProjectVertices(graph.getVertices()).evaluate();
-// DataSet edges = new ProjectEdges(graph.getEdges()).evaluate();
-//
-// DataSet res =
-// new Expand(n,edges,0,2,3,OUT, new ArrayList<>(), new ArrayList<>(),-1).evaluate();
-//
-// System.out.println("res.collect() = " + res.collect());
-// }
-//
-// /**
-// * MATCH (a:Department), (b:City)
-// * RETURN *
-// */
-// @Test
-// public void cartesianProductTest() throws Exception {
-// LogicalGraph graph = loadGraph("","g");
-// QueryHandler query = new QueryHandler("MATCH (a:Department), (b:City)");
-// CNF predicates = query.getPredicates();
-//
-// DataSet a = new FilterVertices(
-// graph.getVertices(),
-// predicates.getSubCNF(Sets.newHashSet("a"))
-// ).evaluate();
-// DataSet b = new FilterVertices(
-// graph.getVertices(),
-// predicates.getSubCNF(Sets.newHashSet("a"))
-// ).evaluate();
-//
-// DataSet res = new CartesianProduct(a,b).evaluate();
-//
-// System.out.println("res.collect() = " + res.collect());
-// }
-//
-// /**
-// * MATCH (a:Department), (b)-[]->(c:Person {name: "Alice")
-// * WHERE a.prop = b.prop
-// * RETURN *
-// */
-// @Test
-// public void valueJoinTest() throws Exception {
-// LogicalGraph graph = loadGraph("","g");
-// QueryHandler query = new QueryHandler(
-// " MATCH (a:Department),(b)-[]->(c:Person {name: \"Alice\")" +
-// " WHERE a.prop = b.prop"
-// );
-// CNF predicates = query.getPredicates();
-//
-// DataSet a =
-// new FilterAndProjectVertices(
-// graph.getVertices(),
-// predicates.getSubCNF(Sets.newHashSet("a")),
-// Lists.newArrayList("prop")
-// ).evaluate();
-//
-// DataSet c = new FilterVertices(
-// graph.getVertices(),
-// predicates.getSubCNF(Sets.newHashSet("c"))
-// ).evaluate();
-//
-// DataSet edges = new ProjectEdges(graph.getEdges()).evaluate();
-//
-// DataSet cexpand = new ExpandOne(c,edges,0,IN).evaluate();
-//
-// DataSet b =
-// new ProjectVertices(graph.getVertices(), Lists.newArrayList("prop")).evaluate();
-//
-// DataSet bc = new JoinEmbeddings(cexpand,b,2,0).evaluate();
-//
-// HashMap mapping = new HashMap<>();
-// mapping.put("a",0);
-// mapping.put("b",3);
-//
-// DataSet res = new ValueJoin(
-// a,
-// bc,
-// predicates.getSubCNF(Sets.newHashSet("a","b")),
-// mapping
-// ).evaluate();
-//
-// System.out.println("res.collect() = " + res.collect());
-// }
-//
-// private LogicalGraph loadGraph(String dataGraph, String variable) {
-// FlinkAsciiGraphLoader loader = getLoaderFromString(dataGraph);
-// return loader.getLogicalGraphByVariable(variable);
-// }
-//}