Skip to content

Commit

Permalink
fixup: feat: add multiple selection to CSV writer
Browse files Browse the repository at this point in the history
Same approach as for XSL has been implemented for the CSV writer. In the wizard of selecting a feature type has been implemented the multi selection table and the same number of .csv files are exported as the selection.

ING-3987
  • Loading branch information
emanuelaepure10 committed Oct 4, 2023
1 parent 2487be3 commit 5fdc62e
Show file tree
Hide file tree
Showing 5 changed files with 388 additions and 99 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
/*
* Copyright (c) 2023 wetransform GmbH
*
* All rights reserved. This program and the accompanying materials are made
* available under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution. If not, see <http://www.gnu.org/licenses/>.
*
* Contributors:
* wetransform GmbH <http://www.wetransform.to>
*/

package eu.esdihumboldt.hale.common.core.io.supplier;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;

/**
* I/O supplier based on a {@link ByteArrayOutputStream}
*
* @author EmanuelaEpure
*/
public class ByteArrayOutputStreamSupplier
implements LocatableInputSupplier<InputStream>, LocatableOutputSupplier<OutputStream> {

private final ByteArrayOutputStream byteArrayOutputStream;

/**
* Create a ByteArrayOutputStream I/O supplier.
*
* @param byteArrayOutputStream the byteArrayOutputStream
*
*/
public ByteArrayOutputStreamSupplier(ByteArrayOutputStream byteArrayOutputStream) {
this.byteArrayOutputStream = byteArrayOutputStream;
}

/**
* @see eu.esdihumboldt.util.io.InputSupplier#getInput()
*/
@Override
public InputStream getInput() throws IOException {
return new ByteArrayInputStream(byteArrayOutputStream.toByteArray());
}

/**
* @see eu.esdihumboldt.hale.common.core.io.supplier.Locatable#getLocation()
*/
@Override
public URI getLocation() {
return null;
}

/**
* @see eu.esdihumboldt.util.io.OutputSupplier#getOutput()
*/
@Override
public OutputStream getOutput() throws IOException {
return byteArrayOutputStream;
}

/**
* @see eu.esdihumboldt.hale.common.core.io.supplier.LocatableInputSupplier#getUsedLocation()
*/
@Override
public URI getUsedLocation() {
return null;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,18 @@
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;

import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Iterator;
import java.util.List;

import javax.xml.namespace.QName;

import org.eclipse.core.runtime.content.IContentType;
import org.junit.BeforeClass;
import org.junit.Rule;
Expand All @@ -35,10 +42,12 @@
import eu.esdihumboldt.hale.common.core.HalePlatform;
import eu.esdihumboldt.hale.common.core.io.Value;
import eu.esdihumboldt.hale.common.core.io.report.IOReport;
import eu.esdihumboldt.hale.common.core.io.supplier.ByteArrayOutputStreamSupplier;
import eu.esdihumboldt.hale.common.core.io.supplier.FileIOSupplier;
import eu.esdihumboldt.hale.common.instance.io.InstanceWriter;
import eu.esdihumboldt.hale.common.instance.model.InstanceCollection;
import eu.esdihumboldt.hale.common.schema.model.Schema;
import eu.esdihumboldt.hale.common.schema.model.impl.DefaultSchemaSpace;
import eu.esdihumboldt.hale.common.test.TestUtil;
import eu.esdihumboldt.hale.io.csv.InstanceTableIOConstants;
import eu.esdihumboldt.hale.io.csv.reader.internal.CSVSchemaReader;
Expand Down Expand Up @@ -73,9 +82,58 @@ public static void waitForServices() {
*/
@Test
public void testWriteSimpleSchema() throws Exception {
Schema schema = CSVInstanceWriterTestUtil.createExampleSchema();
TransformationExample example = TransformationExamples
.getExample(TransformationExamples.SIMPLE_ASSIGN);
char sep = ',';

QName qname = new QName("http://www.opengis.net/om/2.0", "OM_ObservationType");
File tmpFile = tmpFolder.newFile("csvTestWriteSimpleSchema.csv");

assertTrue("Csv Export was not successful.", writeCsvToFile(tmpFile, true, false,
Value.of(sep), null, null, example.getSourceInstances(), schema, qname, null));

CSVReader reader = new CSVReader(new FileReader(tmpFile), sep);
testWriteSimpleSchemaFromReader(reader);
}

/**
* Test - write simple data, without nested properties and useSchema=false
*
* @throws Exception , if an error occurs
*/
@Test
public void testWriteSimpleSchemaWithByteArrayOutputStream() throws Exception {
Schema schema = CSVInstanceWriterTestUtil.createExampleSchema();
TransformationExample example = TransformationExamples
.getExample(TransformationExamples.SIMPLE_ASSIGN);
char sep = ',';

QName qname = new QName("http://www.opengis.net/om/2.0", "OM_ObservationType");

// Create a ByteArrayOutputStream and write data to it
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();

assertTrue("Csv Export was not successful.",
writeCsvToFile(null, true, false, Value.of(sep), null, null,
example.getSourceInstances(), schema, qname, byteArrayOutputStream));

// Get the byte array from the ByteArrayOutputStream
byte[] writtenData = byteArrayOutputStream.toByteArray();

// Create a ByteArrayInputStream from the written data
ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(writtenData);

// Wrap the ByteArrayInputStream with an InputStreamReader
InputStreamReader inputStreamReader = new InputStreamReader(byteArrayInputStream);

// Wrap the InputStreamReader with a BufferedReader
BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
CSVReader reader = new CSVReader(bufferedReader, sep);
testWriteSimpleSchemaFromReader(reader);
}

private void testWriteSimpleSchemaFromReader(CSVReader reader) throws IOException {
// alternative the data could be generated by iterating through the
// exemple project's source data:
// eu.esdihumboldt.cst.test/src/testdata/simpleassign/instance1.xml
Expand All @@ -84,14 +142,7 @@ public void testWriteSimpleSchema() throws Exception {
// header size
int numberOfColumns = 4; // in the example t1.xsd: elements + attribute
int numberOfRows = 3;
char sep = ',';

File tmpFile = tmpFolder.newFile("csvTestWriteSimpleSchema.csv");

assertTrue("Csv Export was not successful.", writeCsvToFile(tmpFile, true, false,
Value.of(sep), null, null, example.getSourceInstances()));

CSVReader reader = new CSVReader(new FileReader(tmpFile), sep);
List<String[]> rows = reader.readAll();

reader.close();
Expand Down Expand Up @@ -146,10 +197,11 @@ public void testWriteSimpleSchemaColOrder() throws Exception {
int numberOfRows = 3; // counting also the header
char sep = ',';

QName qname = new QName("http://www.opengis.net/om/2.0", "OM_ObservationType");
File tmpFile = tmpFolder.newFile("csvTestWriteSimpleSchema.csv");

assertTrue("Csv Export was not successful.",
writeCsvToFile(tmpFile, true, true, Value.of(sep), null, null, instance));
assertTrue("Csv Export was not successful.", writeCsvToFile(tmpFile, true, true,
Value.of(sep), null, null, instance, schema, qname, null));

CSVReader reader = new CSVReader(new FileReader(tmpFile), sep);
List<String[]> rows = reader.readAll();
Expand Down Expand Up @@ -195,6 +247,7 @@ public void testWriteSimpleSchemaColOrder() throws Exception {
@Test
public void testWriteSimpleSchemaDelimiter() throws Exception {

Schema schema = CSVInstanceWriterTestUtil.createExampleSchema();
TransformationExample example = TransformationExamples
.getExample(TransformationExamples.SIMPLE_ASSIGN);

Expand All @@ -209,10 +262,12 @@ public void testWriteSimpleSchemaDelimiter() throws Exception {
char quo = '\'';
char esc = '"';

QName qname = new QName("http://www.opengis.net/om/2.0", "OM_ObservationType");
File tmpFile = tmpFolder.newFile("csvTestWriteSimpleSchemaDelimiter.csv");

assertTrue("Csv Export was not successful.", writeCsvToFile(tmpFile, true, false,
Value.of(sep), Value.of(quo), Value.of(esc), example.getSourceInstances()));
assertTrue("Csv Export was not successful.",
writeCsvToFile(tmpFile, true, false, Value.of(sep), Value.of(quo), Value.of(esc),
example.getSourceInstances(), schema, qname, null));

CSVReader reader = new CSVReader(new FileReader(tmpFile), sep, quo, esc);
List<String[]> rows = reader.readAll();
Expand Down Expand Up @@ -248,7 +303,7 @@ public void testWriteSimpleSchemaDelimiter() throws Exception {
*/
@Test
public void testWriteComplexSchema() throws Exception {

Schema schema = CSVInstanceWriterTestUtil.createExampleSchema();
TransformationExample example = TransformationExamples
.getExample(TransformationExamples.SIMPLE_COMPLEX);
// alternative the data could be generated by iterating through the
Expand All @@ -262,9 +317,10 @@ public void testWriteComplexSchema() throws Exception {
char sep = ',';

File tmpFile = tmpFolder.newFile("csvTestWriteComplexSchema.csv");
QName qname = new QName("http://www.opengis.net/om/2.0", "OM_ObservationType");

assertTrue("Csv Export was not successful.", writeCsvToFile(tmpFile, true, false,
Value.of(sep), null, null, example.getSourceInstances()));
Value.of(sep), null, null, example.getSourceInstances(), schema, qname, null));

CSVReader reader = new CSVReader(new FileReader(tmpFile), sep);
List<String[]> rows = reader.readAll();
Expand Down Expand Up @@ -298,7 +354,8 @@ public void testWriteComplexSchema() throws Exception {
}

private boolean writeCsvToFile(File tmpFile, boolean solveNestedProperties, boolean useSchema,
Value sep, Value quo, Value esc, InstanceCollection instances) throws Exception {
Value sep, Value quo, Value esc, InstanceCollection instances, Schema schema,
QName qname, ByteArrayOutputStream byteArrayOutputStream) throws Exception {
// set instances to xls instance writer
InstanceWriter writer = new CSVInstanceWriter();
IContentType contentType = HalePlatform.getContentTypeManager()
Expand All @@ -309,11 +366,23 @@ private boolean writeCsvToFile(File tmpFile, boolean solveNestedProperties, bool
writer.setParameter(CSVSchemaReader.PARAM_SEPARATOR, sep);
writer.setParameter(CSVSchemaReader.PARAM_QUOTE, quo);
writer.setParameter(CSVSchemaReader.PARAM_ESCAPE, esc);
writer.setParameter(InstanceTableIOConstants.EXPORT_TYPE, Value.of(qname.getLocalPart()));
writer.setInstances(instances);

DefaultSchemaSpace ss = new DefaultSchemaSpace();
ss.addSchema(schema);
writer.setTargetSchema(ss);

// write instances to a temporary CSV file
writer.setTarget(new FileIOSupplier(tmpFile));
if (tmpFile != null) {
writer.setTarget(new FileIOSupplier(tmpFile));
}
else {
writer.setTarget(new ByteArrayOutputStreamSupplier(byteArrayOutputStream));
}
writer.setContentType(contentType);
IOReport report = writer.execute(null);
return report.isSuccess();
}

}
Loading

0 comments on commit 5fdc62e

Please sign in to comment.