From 088c83fc1b45e2dc2cf58c07cc5820e47a78c294 Mon Sep 17 00:00:00 2001 From: Sean Owen Date: Thu, 5 Jan 2023 08:36:12 -0600 Subject: [PATCH] Update references for 0.16.0 release --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 49e5e6d5..dd404238 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ You can link against this library in your program at the following coordinates: ``` groupId: com.databricks artifactId: spark-xml_2.12 -version: 0.15.0 +version: 0.16.0 ``` ## Using with Spark shell @@ -22,7 +22,7 @@ version: 0.15.0 This package can be added to Spark using the `--packages` command line option. For example, to include it when starting the spark shell: ``` -$SPARK_HOME/bin/spark-shell --packages com.databricks:spark-xml_2.12:0.15.0 +$SPARK_HOME/bin/spark-shell --packages com.databricks:spark-xml_2.12:0.16.0 ``` ## Features @@ -397,7 +397,7 @@ Automatically infer schema (data types) ```R library(SparkR) -sparkR.session("local[4]", sparkPackages = c("com.databricks:spark-xml_2.12:0.15.0")) +sparkR.session("local[4]", sparkPackages = c("com.databricks:spark-xml_2.12:0.16.0")) df <- read.df("books.xml", source = "xml", rowTag = "book") @@ -409,7 +409,7 @@ You can manually specify schema: ```R library(SparkR) -sparkR.session("local[4]", sparkPackages = c("com.databricks:spark-xml_2.12:0.15.0")) +sparkR.session("local[4]", sparkPackages = c("com.databricks:spark-xml_2.12:0.16.0")) customSchema <- structType( structField("_id", "string"), structField("author", "string"),