You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Looks like the seperatorChar has an issue with something other than tab
I tried the following
create external table book_ratings_tab(user_id int,isbn string, book_rating int)
ROW FORMAT serde 'com.bizo.hive.serde.csv.CSVSerde'
with serdeproperties (
"separatorChar" = ";",
"quoteChar" = "'",
"escapeChar" = ""
)
stored as textfile
;
this is what i get
hive> create external table book_ratings_3(user_id int,isbn string, book_rating int)
> ROW FORMAT serde 'com.bizo.hive.serde.csv.CSVSerde'
> with serdeproperties (
> "separatorChar" = ";",
> "quoteChar" = "'",
> "escapeChar" = ""
> )
> stored as textfile
> ;
MismatchedTokenException(-1!=289)
at org.antlr.runtime.BaseRecognizer.recoverFromMismatchedToken(BaseRecognizer.java:617)
at org.antlr.runtime.BaseRecognizer.match(BaseRecognizer.java:115)
at org.apache.hadoop.hive.ql.parse.HiveParser.keyValueProperty(HiveParser.java:26975)
at org.apache.hadoop.hive.ql.parse.HiveParser.tablePropertiesList(HiveParser.java:26745)
at org.apache.hadoop.hive.ql.parse.HiveParser.tableProperties(HiveParser.java:26608)
at org.apache.hadoop.hive.ql.parse.HiveParser.rowFormatSerde(HiveParser.java:25984)
at org.apache.hadoop.hive.ql.parse.HiveParser.tableRowFormat(HiveParser.java:26449)
at org.apache.hadoop.hive.ql.parse.HiveParser.createTableStatement(HiveParser.java:4513)
at org.apache.hadoop.hive.ql.parse.HiveParser.ddlStatement(HiveParser.java:2064)
at org.apache.hadoop.hive.ql.parse.HiveParser.execStatement(HiveParser.java:1330)
at org.apache.hadoop.hive.ql.parse.HiveParser.statement(HiveParser.java:970)
at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:190)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:434)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:352)
at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:995)
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1038)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:931)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:921)
at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:268)
at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:220)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:422)
at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:790)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:684)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:623)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hadoop.util.RunJar.main(RunJar.java:212)
FAILED: ParseException line 4:22 mismatched input '' expecting StringLiteral near '=' in specifying key/value property
The text was updated successfully, but these errors were encountered:
Looks like the seperatorChar has an issue with something other than tab
I tried the following
create external table book_ratings_tab(user_id int,isbn string, book_rating int)
ROW FORMAT serde 'com.bizo.hive.serde.csv.CSVSerde'
with serdeproperties (
"separatorChar" = ";",
"quoteChar" = "'",
"escapeChar" = ""
)
stored as textfile
;
this is what i get
hive> create external table book_ratings_3(user_id int,isbn string, book_rating int)
> ROW FORMAT serde 'com.bizo.hive.serde.csv.CSVSerde'
> with serdeproperties (
> "separatorChar" = ";",
> "quoteChar" = "'",
> "escapeChar" = ""
> )
> stored as textfile
> ;
MismatchedTokenException(-1!=289)
at org.antlr.runtime.BaseRecognizer.recoverFromMismatchedToken(BaseRecognizer.java:617)
at org.antlr.runtime.BaseRecognizer.match(BaseRecognizer.java:115)
at org.apache.hadoop.hive.ql.parse.HiveParser.keyValueProperty(HiveParser.java:26975)
at org.apache.hadoop.hive.ql.parse.HiveParser.tablePropertiesList(HiveParser.java:26745)
at org.apache.hadoop.hive.ql.parse.HiveParser.tableProperties(HiveParser.java:26608)
at org.apache.hadoop.hive.ql.parse.HiveParser.rowFormatSerde(HiveParser.java:25984)
at org.apache.hadoop.hive.ql.parse.HiveParser.tableRowFormat(HiveParser.java:26449)
at org.apache.hadoop.hive.ql.parse.HiveParser.createTableStatement(HiveParser.java:4513)
at org.apache.hadoop.hive.ql.parse.HiveParser.ddlStatement(HiveParser.java:2064)
at org.apache.hadoop.hive.ql.parse.HiveParser.execStatement(HiveParser.java:1330)
at org.apache.hadoop.hive.ql.parse.HiveParser.statement(HiveParser.java:970)
at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:190)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:434)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:352)
at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:995)
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1038)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:931)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:921)
at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:268)
at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:220)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:422)
at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:790)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:684)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:623)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hadoop.util.RunJar.main(RunJar.java:212)
FAILED: ParseException line 4:22 mismatched input '' expecting StringLiteral near '=' in specifying key/value property
The text was updated successfully, but these errors were encountered: