diff --git a/src/main/java/fr/insee/trevas/jupyter/DatasetUtils.java b/src/main/java/fr/insee/trevas/jupyter/DatasetUtils.java index 0607fdf..3c6073e 100644 --- a/src/main/java/fr/insee/trevas/jupyter/DatasetUtils.java +++ b/src/main/java/fr/insee/trevas/jupyter/DatasetUtils.java @@ -10,75 +10,75 @@ public class DatasetUtils { - public static String datasetToDisplay(Dataset dataset) { - Map roles = dataset.getDataStructure().getRoles(); - Dataset reducedDataset = new SparkDataset( - ((SparkDataset) ((PersistentDataset) dataset).getDelegate()) - .getSparkDataset() - .limit(50), roles); - var b = new StringBuilder(); - b.append(""); - b.append(""); - b.append(""); - reducedDataset.getDataStructure() - .forEach( - (name, component) -> { - b.append(""); - }); - b.append(""); - b.append(""); - b.append(""); - reducedDataset.getDataPoints() - .forEach( - row -> { - b.append(""); - reducedDataset.getDataStructure() - .keySet() - .forEach( - name -> { - b.append(""); - }); - b.append(""); - }); - b.append(""); - b.append("
").append(name).append("
") - .append(row.get(name)) - .append("
"); - b.append( - ""); - b.append( - "\n" - + " \n" - + "\n"); - b.append(""); + public static String datasetToDisplay(Dataset dataset) { + Map roles = dataset.getDataStructure().getRoles(); + Dataset reducedDataset = new SparkDataset( + ((SparkDataset) ((PersistentDataset) dataset).getDelegate()) + .getSparkDataset() + .limit(50), roles); + var b = new StringBuilder(); + b.append(""); + b.append(""); + b.append(""); + reducedDataset.getDataStructure() + .forEach( + (name, component) -> { + b.append(""); + }); + b.append(""); + b.append(""); + b.append(""); + reducedDataset.getDataPoints() + .forEach( + row -> { + b.append(""); + reducedDataset.getDataStructure() + .keySet() + .forEach( + name -> { + b.append(""); + }); + b.append(""); + }); + b.append(""); + b.append("
").append(name).append("
") + .append(row.get(name)) + .append("
"); + b.append( + ""); + b.append( + "\n" + + " \n" + + "\n"); + b.append(""); - return b.toString(); - } + return b.toString(); + } - public static String datasetMetadataToDisplay(Dataset dataset) { - StringBuilder sb = new StringBuilder(); - sb.append("
    "); - Structured.DataStructure dataStructure = dataset.getDataStructure(); - dataStructure.forEach( - (key, value) -> { - sb.append("
  • ") - .append(key) - .append(" (") - .append(value.getRole().name()) - .append(" - ") - .append(value.getType().getSimpleName()); - String valuedomain = value.getValuedomain(); - if (null != valuedomain) { - sb.append(" - ").append(valuedomain); - } - sb.append(")").append("
  • ").append("\n"); - }); - sb.append("
"); - return sb.toString(); - } + public static String datasetMetadataToDisplay(Dataset dataset) { + StringBuilder sb = new StringBuilder(); + sb.append("
    "); + Structured.DataStructure dataStructure = dataset.getDataStructure(); + dataStructure.forEach( + (key, value) -> { + sb.append("
  • ") + .append(key) + .append(" (") + .append(value.getRole().name()) + .append(" - ") + .append(value.getType().getSimpleName()); + String valuedomain = value.getValuedomain(); + if (null != valuedomain) { + sb.append(" - ").append(valuedomain); + } + sb.append(")").append("
  • ").append("\n"); + }); + sb.append("
"); + return sb.toString(); + } } diff --git a/src/main/java/fr/insee/trevas/jupyter/VtlKernel.java b/src/main/java/fr/insee/trevas/jupyter/VtlKernel.java index e87275e..4626869 100644 --- a/src/main/java/fr/insee/trevas/jupyter/VtlKernel.java +++ b/src/main/java/fr/insee/trevas/jupyter/VtlKernel.java @@ -34,283 +34,283 @@ public class VtlKernel extends BaseKernel { - private static DisplayData displayData = new DisplayData(); - private static SparkSession spark; - private static VtlScriptEngine engine; - private final LanguageInfo info; - private final AutoCompleter autoCompleter; - - public VtlKernel() throws Exception { - spark = SparkUtils.buildSparkSession(); - engine = SparkUtils.buildSparkEngine(spark); - System.out.println("Loaded VTL engine " + engine.getFactory().getEngineName()); - ScriptEngineFactory factory = engine.getFactory(); - this.info = - new LanguageInfo.Builder(factory.getEngineName()) - .version(factory.getEngineVersion()) - .build(); - registerGlobalMethods(); - this.autoCompleter = new OranoranCompleter(); - } - - private static Map getRoleMap( - Collection components) { - return components.stream() - .collect( - Collectors.toMap( - Structured.Component::getName, Structured.Component::getRole)); - } - - private static Map getRoleMap(fr.insee.vtl.model.Dataset dataset) { - return getRoleMap(dataset.getDataStructure().values()); - } - - private static SparkDataset asSparkDataset(Dataset dataset) { - if (dataset instanceof SparkDataset) { - return (SparkDataset) dataset; - } - if (dataset instanceof PersistentDataset) { - fr.insee.vtl.model.Dataset ds = ((PersistentDataset) dataset).getDelegate(); - if (ds instanceof SparkDataset) { - return (SparkDataset) ds; - } else { - return new SparkDataset(ds, getRoleMap(dataset), spark); - } - } - throw new IllegalArgumentException("Unknow dataset type"); - } - - public static SparkDataset loadParquet(String path) throws Exception { - return SparkUtils.readParquetDataset(spark, path); - } - - public static SparkDataset loadCSV(String path) throws Exception { - return SparkUtils.readCSVDataset(spark, path); - } - - public static SparkDataset loadSas(String path) throws Exception { - return SparkUtils.readSasDataset(spark, path); - } - - public static String writeParquet(String path, Dataset ds) { - SparkUtils.writeParquetDataset(path, asSparkDataset(ds)); - return "Dataset written: '" + path + "'"; - } - - public static String writeCSV(String path, Dataset ds) { - SparkUtils.writeCSVDataset(path, asSparkDataset(ds)); - return "Dataset written: '" + path + "'"; - } - - public static String getSize(Dataset ds) { - SparkDataset sparkDataset = asSparkDataset(ds); - return "Dataset size: " + sparkDataset.getDataPoints().size(); - } - - public static Object show(Object o) { - if (o instanceof Dataset) { - SparkDataset dataset = asSparkDataset((Dataset) o); - var roles = - dataset.getDataStructure().entrySet().stream() - .collect( - Collectors.toMap( - Map.Entry::getKey, e -> e.getValue().getRole())); - showDataset(new SparkDataset(dataset.getSparkDataset().limit(50), roles)); - } else { - displayData.putText(o.toString()); - } - return o; - } - - private static void showDataset(Dataset dataset) { - displayData.putHTML(DatasetUtils.datasetToDisplay(dataset)); - } - - public static Object showMetadata(Object o) { - if (o instanceof Dataset) { - displayData.putHTML(DatasetUtils.datasetMetadataToDisplay((Dataset) o)); - } else { - displayData.putText(o.toString()); - } - return o; - } - - public static Dataset loadSDMXEmptySource(String path, String id) { - Structured.DataStructure structure = TrevasSDMXUtils.buildStructureFromSDMX3(path, id); - return new InMemoryDataset(List.of(List.of()), structure); - } - - public static Dataset loadSDMXSource(String path, String id, String dataPath) { - Structured.DataStructure structure = TrevasSDMXUtils.buildStructureFromSDMX3(path, id); - return new SparkDataset( - spark.read() - .option("header", "true") - .option("delimiter", ";") - .option("quote", "\"") - .csv(dataPath), - structure); - } - - public static void runSDMXPreview(String path) { - ReadableDataLocation rdl = new ReadableDataLocationTmp(path); - - SDMXVTLWorkflow sdmxVtlWorkflow = new SDMXVTLWorkflow(engine, rdl, Map.of()); - - Map emptyDatasets = sdmxVtlWorkflow.getEmptyDatasets(); - engine.getBindings(ScriptContext.ENGINE_SCOPE).putAll(emptyDatasets); - - Map results = sdmxVtlWorkflow.run(); - - var result = new StringBuilder(); - - results.forEach( - (k, v) -> { - result.append("

") - .append(k) - .append("

") - .append(DatasetUtils.datasetMetadataToDisplay(v)); - }); - - displayData.putHTML(result.toString()); - } - - public static void runSDMX(String path, String data) { - String[] dataList = data.split(","); - if (dataList.length % 2 != 0) { - throw new IllegalArgumentException("Data params length invalid: " + dataList.length); - } - Map dataBindings = new HashMap<>(); - for (int i = 0; i < dataList.length; i = i + 2) { - dataBindings.put(dataList[i].trim(), dataList[i + 1].trim()); - } - Map inputs = - dataBindings.entrySet().stream() - .collect( - Collectors.toMap( - Map.Entry::getKey, - e -> { - Structured.DataStructure structure = - TrevasSDMXUtils.buildStructureFromSDMX3( - path, e.getKey()); - return new SparkDataset( - spark.read() - .option("header", "true") - .option("delimiter", ";") - .option("quote", "\"") - .csv(e.getValue()), - structure); - })); - - ReadableDataLocation rdl = new ReadableDataLocationTmp(path); - SDMXVTLWorkflow sdmxVtlWorkflow = new SDMXVTLWorkflow(engine, rdl, inputs); - Map results = sdmxVtlWorkflow.run(); - - var result = new StringBuilder(); - - results.forEach( - (k, v) -> { - result.append("

") - .append(k) - .append("

") - .append(DatasetUtils.datasetToDisplay(v)); - }); - - displayData.putHTML(result.toString()); - } - - public static void getTransformationsVTL(String path) { - ReadableDataLocation rdl = new ReadableDataLocationTmp(path); - SDMXVTLWorkflow sdmxVtlWorkflow = new SDMXVTLWorkflow(engine, rdl, Map.of()); - String vtl = sdmxVtlWorkflow.getTransformationsVTL(); - - displayData.putText(vtl); - } - - public static void getRulesetsVTL(String path) { - ReadableDataLocation rdl = new ReadableDataLocationTmp(path); - SDMXVTLWorkflow sdmxVtlWorkflow = new SDMXVTLWorkflow(engine, rdl, Map.of()); - String dprs = sdmxVtlWorkflow.getRulesetsVTL(); - - displayData.putText(dprs); - } - - public static void main(String[] args) throws Exception { - - if (args.length < 1) throw new IllegalArgumentException("Missing connection file argument"); - - Path connectionFile = Paths.get(args[0]); - - if (!Files.isRegularFile(connectionFile)) - throw new IllegalArgumentException( - "Connection file '" + connectionFile + "' isn't a file."); - - String contents = new String(Files.readAllBytes(connectionFile)); - - JupyterSocket.JUPYTER_LOGGER.setLevel(Level.WARNING); - - KernelConnectionProperties connProps = KernelConnectionProperties.parse(contents); - JupyterConnection connection = new JupyterConnection(connProps); - - VtlKernel kernel = new VtlKernel(); - - kernel.becomeHandlerForConnection(connection); - - connection.connect(); - connection.waitUntilClose(); - } - - private void registerGlobalMethods() throws NoSuchMethodException { - this.engine.registerGlobalMethod( - "loadParquet", VtlKernel.class.getMethod("loadParquet", String.class)); - this.engine.registerGlobalMethod( - "loadCSV", VtlKernel.class.getMethod("loadCSV", String.class)); - this.engine.registerGlobalMethod( - "loadSas", VtlKernel.class.getMethod("loadSas", String.class)); - this.engine.registerGlobalMethod( - "writeParquet", - VtlKernel.class.getMethod("writeParquet", String.class, Dataset.class)); - this.engine.registerGlobalMethod( - "writeCSV", VtlKernel.class.getMethod("writeCSV", String.class, Dataset.class)); - this.engine.registerGlobalMethod("show", VtlKernel.class.getMethod("show", Object.class)); - this.engine.registerGlobalMethod( - "showMetadata", VtlKernel.class.getMethod("showMetadata", Object.class)); - this.engine.registerGlobalMethod( - "size", VtlKernel.class.getMethod("getSize", Dataset.class)); - - // SDMX - this.engine.registerGlobalMethod( - "loadSDMXEmptySource", - VtlKernel.class.getMethod("loadSDMXEmptySource", String.class, String.class)); - this.engine.registerGlobalMethod( - "loadSDMXSource", - VtlKernel.class.getMethod( - "loadSDMXSource", String.class, String.class, String.class)); - this.engine.registerGlobalMethod( - "runSDMXPreview", VtlKernel.class.getMethod("runSDMXPreview", String.class)); - this.engine.registerGlobalMethod( - "runSDMX", VtlKernel.class.getMethod("runSDMX", String.class, String.class) - ); - this.engine.registerGlobalMethod( - "getTransformationsVTL", - VtlKernel.class.getMethod("getTransformationsVTL", String.class)); - this.engine.registerGlobalMethod( - "getRulesetsVTL", VtlKernel.class.getMethod("getRulesetsVTL", String.class)); - } - - @Override - public synchronized DisplayData eval(String expr) throws Exception { - displayData = new DisplayData(); - this.engine.eval(expr); - return displayData; - } - - @Override - public ReplacementOptions complete(String code, int at) { - return this.autoCompleter.complete(code, at); - } - - @Override - public LanguageInfo getLanguageInfo() { - return this.info; - } + private static DisplayData displayData = new DisplayData(); + private static SparkSession spark; + private static VtlScriptEngine engine; + private final LanguageInfo info; + private final AutoCompleter autoCompleter; + + public VtlKernel() throws Exception { + spark = SparkUtils.buildSparkSession(); + engine = SparkUtils.buildSparkEngine(spark); + System.out.println("Loaded VTL engine " + engine.getFactory().getEngineName()); + ScriptEngineFactory factory = engine.getFactory(); + this.info = + new LanguageInfo.Builder(factory.getEngineName()) + .version(factory.getEngineVersion()) + .build(); + registerGlobalMethods(); + this.autoCompleter = new OranoranCompleter(); + } + + private static Map getRoleMap( + Collection components) { + return components.stream() + .collect( + Collectors.toMap( + Structured.Component::getName, Structured.Component::getRole)); + } + + private static Map getRoleMap(fr.insee.vtl.model.Dataset dataset) { + return getRoleMap(dataset.getDataStructure().values()); + } + + private static SparkDataset asSparkDataset(Dataset dataset) { + if (dataset instanceof SparkDataset) { + return (SparkDataset) dataset; + } + if (dataset instanceof PersistentDataset) { + fr.insee.vtl.model.Dataset ds = ((PersistentDataset) dataset).getDelegate(); + if (ds instanceof SparkDataset) { + return (SparkDataset) ds; + } else { + return new SparkDataset(ds, getRoleMap(dataset), spark); + } + } + throw new IllegalArgumentException("Unknow dataset type"); + } + + public static SparkDataset loadParquet(String path) throws Exception { + return SparkUtils.readParquetDataset(spark, path); + } + + public static SparkDataset loadCSV(String path) throws Exception { + return SparkUtils.readCSVDataset(spark, path); + } + + public static SparkDataset loadSas(String path) throws Exception { + return SparkUtils.readSasDataset(spark, path); + } + + public static String writeParquet(String path, Dataset ds) { + SparkUtils.writeParquetDataset(path, asSparkDataset(ds)); + return "Dataset written: '" + path + "'"; + } + + public static String writeCSV(String path, Dataset ds) { + SparkUtils.writeCSVDataset(path, asSparkDataset(ds)); + return "Dataset written: '" + path + "'"; + } + + public static String getSize(Dataset ds) { + SparkDataset sparkDataset = asSparkDataset(ds); + return "Dataset size: " + sparkDataset.getDataPoints().size(); + } + + public static Object show(Object o) { + if (o instanceof Dataset) { + SparkDataset dataset = asSparkDataset((Dataset) o); + var roles = + dataset.getDataStructure().entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, e -> e.getValue().getRole())); + showDataset(new SparkDataset(dataset.getSparkDataset().limit(50), roles)); + } else { + displayData.putText(o.toString()); + } + return o; + } + + private static void showDataset(Dataset dataset) { + displayData.putHTML(DatasetUtils.datasetToDisplay(dataset)); + } + + public static Object showMetadata(Object o) { + if (o instanceof Dataset) { + displayData.putHTML(DatasetUtils.datasetMetadataToDisplay((Dataset) o)); + } else { + displayData.putText(o.toString()); + } + return o; + } + + public static Dataset loadSDMXEmptySource(String path, String id) { + Structured.DataStructure structure = TrevasSDMXUtils.buildStructureFromSDMX3(path, id); + return new InMemoryDataset(List.of(List.of()), structure); + } + + public static Dataset loadSDMXSource(String path, String id, String dataPath) { + Structured.DataStructure structure = TrevasSDMXUtils.buildStructureFromSDMX3(path, id); + return new SparkDataset( + spark.read() + .option("header", "true") + .option("delimiter", ";") + .option("quote", "\"") + .csv(dataPath), + structure); + } + + public static void runSDMXPreview(String path) { + ReadableDataLocation rdl = new ReadableDataLocationTmp(path); + + SDMXVTLWorkflow sdmxVtlWorkflow = new SDMXVTLWorkflow(engine, rdl, Map.of()); + + Map emptyDatasets = sdmxVtlWorkflow.getEmptyDatasets(); + engine.getBindings(ScriptContext.ENGINE_SCOPE).putAll(emptyDatasets); + + Map results = sdmxVtlWorkflow.run(); + + var result = new StringBuilder(); + + results.forEach( + (k, v) -> { + result.append("

") + .append(k) + .append("

") + .append(DatasetUtils.datasetMetadataToDisplay(v)); + }); + + displayData.putHTML(result.toString()); + } + + public static void runSDMX(String path, String data) { + String[] dataList = data.split(","); + if (dataList.length % 2 != 0) { + throw new IllegalArgumentException("Data params length invalid: " + dataList.length); + } + Map dataBindings = new HashMap<>(); + for (int i = 0; i < dataList.length; i = i + 2) { + dataBindings.put(dataList[i].trim(), dataList[i + 1].trim()); + } + Map inputs = + dataBindings.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + e -> { + Structured.DataStructure structure = + TrevasSDMXUtils.buildStructureFromSDMX3( + path, e.getKey()); + return new SparkDataset( + spark.read() + .option("header", "true") + .option("delimiter", ";") + .option("quote", "\"") + .csv(e.getValue()), + structure); + })); + + ReadableDataLocation rdl = new ReadableDataLocationTmp(path); + SDMXVTLWorkflow sdmxVtlWorkflow = new SDMXVTLWorkflow(engine, rdl, inputs); + Map results = sdmxVtlWorkflow.run(); + + var result = new StringBuilder(); + + results.forEach( + (k, v) -> { + result.append("

") + .append(k) + .append("

") + .append(DatasetUtils.datasetToDisplay(v)); + }); + + displayData.putHTML(result.toString()); + } + + public static void getTransformationsVTL(String path) { + ReadableDataLocation rdl = new ReadableDataLocationTmp(path); + SDMXVTLWorkflow sdmxVtlWorkflow = new SDMXVTLWorkflow(engine, rdl, Map.of()); + String vtl = sdmxVtlWorkflow.getTransformationsVTL(); + + displayData.putText(vtl); + } + + public static void getRulesetsVTL(String path) { + ReadableDataLocation rdl = new ReadableDataLocationTmp(path); + SDMXVTLWorkflow sdmxVtlWorkflow = new SDMXVTLWorkflow(engine, rdl, Map.of()); + String dprs = sdmxVtlWorkflow.getRulesetsVTL(); + + displayData.putText(dprs); + } + + public static void main(String[] args) throws Exception { + + if (args.length < 1) throw new IllegalArgumentException("Missing connection file argument"); + + Path connectionFile = Paths.get(args[0]); + + if (!Files.isRegularFile(connectionFile)) + throw new IllegalArgumentException( + "Connection file '" + connectionFile + "' isn't a file."); + + String contents = new String(Files.readAllBytes(connectionFile)); + + JupyterSocket.JUPYTER_LOGGER.setLevel(Level.WARNING); + + KernelConnectionProperties connProps = KernelConnectionProperties.parse(contents); + JupyterConnection connection = new JupyterConnection(connProps); + + VtlKernel kernel = new VtlKernel(); + + kernel.becomeHandlerForConnection(connection); + + connection.connect(); + connection.waitUntilClose(); + } + + private void registerGlobalMethods() throws NoSuchMethodException { + this.engine.registerGlobalMethod( + "loadParquet", VtlKernel.class.getMethod("loadParquet", String.class)); + this.engine.registerGlobalMethod( + "loadCSV", VtlKernel.class.getMethod("loadCSV", String.class)); + this.engine.registerGlobalMethod( + "loadSas", VtlKernel.class.getMethod("loadSas", String.class)); + this.engine.registerGlobalMethod( + "writeParquet", + VtlKernel.class.getMethod("writeParquet", String.class, Dataset.class)); + this.engine.registerGlobalMethod( + "writeCSV", VtlKernel.class.getMethod("writeCSV", String.class, Dataset.class)); + this.engine.registerGlobalMethod("show", VtlKernel.class.getMethod("show", Object.class)); + this.engine.registerGlobalMethod( + "showMetadata", VtlKernel.class.getMethod("showMetadata", Object.class)); + this.engine.registerGlobalMethod( + "size", VtlKernel.class.getMethod("getSize", Dataset.class)); + + // SDMX + this.engine.registerGlobalMethod( + "loadSDMXEmptySource", + VtlKernel.class.getMethod("loadSDMXEmptySource", String.class, String.class)); + this.engine.registerGlobalMethod( + "loadSDMXSource", + VtlKernel.class.getMethod( + "loadSDMXSource", String.class, String.class, String.class)); + this.engine.registerGlobalMethod( + "runSDMXPreview", VtlKernel.class.getMethod("runSDMXPreview", String.class)); + this.engine.registerGlobalMethod( + "runSDMX", VtlKernel.class.getMethod("runSDMX", String.class, String.class) + ); + this.engine.registerGlobalMethod( + "getTransformationsVTL", + VtlKernel.class.getMethod("getTransformationsVTL", String.class)); + this.engine.registerGlobalMethod( + "getRulesetsVTL", VtlKernel.class.getMethod("getRulesetsVTL", String.class)); + } + + @Override + public synchronized DisplayData eval(String expr) throws Exception { + displayData = new DisplayData(); + this.engine.eval(expr); + return displayData; + } + + @Override + public ReplacementOptions complete(String code, int at) { + return this.autoCompleter.complete(code, at); + } + + @Override + public LanguageInfo getLanguageInfo() { + return this.info; + } } diff --git a/src/test/java/fr/insee/trevas/jupyter/SDMXTest.java b/src/test/java/fr/insee/trevas/jupyter/SDMXTest.java index 90dd20d..59c4b45 100644 --- a/src/test/java/fr/insee/trevas/jupyter/SDMXTest.java +++ b/src/test/java/fr/insee/trevas/jupyter/SDMXTest.java @@ -8,48 +8,48 @@ public class SDMXTest { - @Test - public void testLoadSDMXSource() { - Dataset ds = - VtlKernel.loadSDMXEmptySource( - "src/test/resources/sdmx/DSD_BPE_CENSUS.xml", "BPE_DETAIL_VTL"); - assertThat(ds.getDataStructure().size()).isEqualTo(6); - } - - @Test - public void testLoadSDMXSourceWithData() throws Exception { - new VtlKernel(); - Dataset ds = - VtlKernel.loadSDMXSource( - "src/test/resources/sdmx/DSD_BPE_CENSUS.xml", - "BPE_DETAIL_VTL", - "src/test/resources/sdmx/BPE_DETAIL_SAMPLE.csv"); - assertThat(ds.getDataStructure().size()).isEqualTo(6); - } - - @Test - public void testRunSDMXPreview() throws Exception { - new VtlKernel(); - VtlKernel.runSDMXPreview("src/test/resources/sdmx/DSD_BPE_CENSUS.xml"); - } - - @Test - public void testRunSDMX() throws Exception { - new VtlKernel(); - VtlKernel.runSDMX("src/test/resources/sdmx/DSD_BPE_CENSUS.xml", - "BPE_DETAIL_VTL,src/test/resources/sdmx/BPE_DETAIL_SAMPLE.csv," + - "LEGAL_POP,src/test/resources/sdmx/LEGAL_POP_NUTS3.csv"); - } - - @Test - public void testGetTransformationsVTL() throws Exception { - new VtlKernel(); - VtlKernel.getTransformationsVTL("src/test/resources/sdmx/DSD_BPE_CENSUS.xml"); - } - - @Test - public void testGetRulesetsVTL() throws Exception { - new VtlKernel(); - VtlKernel.getRulesetsVTL("src/test/resources/sdmx/DSD_BPE_CENSUS.xml"); - } + @Test + public void testLoadSDMXSource() { + Dataset ds = + VtlKernel.loadSDMXEmptySource( + "src/test/resources/sdmx/DSD_BPE_CENSUS.xml", "BPE_DETAIL_VTL"); + assertThat(ds.getDataStructure().size()).isEqualTo(6); + } + + @Test + public void testLoadSDMXSourceWithData() throws Exception { + new VtlKernel(); + Dataset ds = + VtlKernel.loadSDMXSource( + "src/test/resources/sdmx/DSD_BPE_CENSUS.xml", + "BPE_DETAIL_VTL", + "src/test/resources/sdmx/BPE_DETAIL_SAMPLE.csv"); + assertThat(ds.getDataStructure().size()).isEqualTo(6); + } + + @Test + public void testRunSDMXPreview() throws Exception { + new VtlKernel(); + VtlKernel.runSDMXPreview("src/test/resources/sdmx/DSD_BPE_CENSUS.xml"); + } + + @Test + public void testRunSDMX() throws Exception { + new VtlKernel(); + VtlKernel.runSDMX("src/test/resources/sdmx/DSD_BPE_CENSUS.xml", + "BPE_DETAIL_VTL,src/test/resources/sdmx/BPE_DETAIL_SAMPLE.csv," + + "LEGAL_POP,src/test/resources/sdmx/LEGAL_POP_NUTS3.csv"); + } + + @Test + public void testGetTransformationsVTL() throws Exception { + new VtlKernel(); + VtlKernel.getTransformationsVTL("src/test/resources/sdmx/DSD_BPE_CENSUS.xml"); + } + + @Test + public void testGetRulesetsVTL() throws Exception { + new VtlKernel(); + VtlKernel.getRulesetsVTL("src/test/resources/sdmx/DSD_BPE_CENSUS.xml"); + } }