概述
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:An exception was thrown while adding/validating class(es) : You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near ‘[CHARACTER SET charset_name] [COLLATE collation_name] NULL,
VIEW_ORIGINAL_T' at line 13 java.sql.SQLSyntaxErrorException: You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near '[CHARACTER SET charset_name] [COLLATE collation_name] NULL,
VIEW_ORIGINAL_T’ at line 13
at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:120)
at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:97)
at com.mysql.cj.jdbc.exceptions.SQLExceptionsMapping.translateException(SQLExceptionsMapping.java:122)
at com.mysql.cj.jdbc.StatementImpl.executeInternal(StatementImpl.java:764)
at com.mysql.cj.jdbc.StatementImpl.execute(StatementImpl.java:648)
at com.jolbox.bonecp.StatementHandle.execute(StatementHandle.java:254)
at org.datanucleus.store.rdbms.table.AbstractTable.executeDdlStatement(AbstractTable.java:879)
at org.datanucleus.store.rdbms.table.AbstractTable.executeDdlStatementList(AbstractTable.java:830)
at org.datanucleus.store.rdbms.table.AbstractTable.create(AbstractTable.java:546)
at org.datanucleus.store.rdbms.table.AbstractTable.exists(AbstractTable.java:609)
at org.datanucleus.store.rdbms.RDBMSStoreManager
C
l
a
s
s
A
d
d
e
r
.
p
e
r
f
o
r
m
T
a
b
l
e
s
V
a
l
i
d
a
t
i
o
n
(
R
D
B
M
S
S
t
o
r
e
M
a
n
a
g
e
r
.
j
a
v
a
:
3385
)
a
t
o
r
g
.
d
a
t
a
n
u
c
l
e
u
s
.
s
t
o
r
e
.
r
d
b
m
s
.
R
D
B
M
S
S
t
o
r
e
M
a
n
a
g
e
r
ClassAdder.performTablesValidation(RDBMSStoreManager.java:3385) at org.datanucleus.store.rdbms.RDBMSStoreManager
ClassAdder.performTablesValidation(RDBMSStoreManager.java:3385)atorg.datanucleus.store.rdbms.RDBMSStoreManagerClassAdder.run(RDBMSStoreManager.java:2896)
at org.datanucleus.store.rdbms.AbstractSchemaTransaction.execute(AbstractSchemaTransaction.java:119)
at org.datanucleus.store.rdbms.RDBMSStoreManager.manageClasses(RDBMSStoreManager.java:1627)
at org.datanucleus.store.rdbms.RDBMSStoreManager.getDatastoreClass(RDBMSStoreManager.java:672)
at org.datanucleus.store.rdbms.RDBMSStoreManager.getPropertiesForGenerator(RDBMSStoreManager.java:2088)
at org.datanucleus.store.AbstractStoreManager.getStrategyValue(AbstractStoreManager.java:1271)
at org.datanucleus.ExecutionContextImpl.newObjectId(ExecutionContextImpl.java:3760)
at org.datanucleus.state.StateManagerImpl.setIdentity(StateManagerImpl.java:2267)
at org.datanucleus.state.StateManagerImpl.initialiseForPersistentNew(StateManagerImpl.java:484)
at org.datanucleus.state.StateManagerImpl.initialiseForPersistentNew(StateManagerImpl.java:120)
at org.datanucleus.state.ObjectProviderFactoryImpl.newForPersistentNew(ObjectProviderFactoryImpl.java:218)
at org.datanucleus.ExecutionContextImpl.persistObjectInternal(ExecutionContextImpl.java:2079)
at org.datanucleus.ExecutionContextImpl.persistObjectWork(ExecutionContextImpl.java:1923)
at org.datanucleus.ExecutionContextImpl.persistObject(ExecutionContextImpl.java:1778)
at org.datanucleus.ExecutionContextThreadedImpl.persistObject(ExecutionContextThreadedImpl.java:217)
at org.datanucleus.api.jdo.JDOPersistenceManager.jdoMakePersistent(JDOPersistenceManager.java:724)
at org.datanucleus.api.jdo.JDOPersistenceManager.makePersistent(JDOPersistenceManager.java:749)
at org.apache.hadoop.hive.metastore.ObjectStore.createTable(ObjectStore.java:995)
at sun.reflect.GeneratedMethodAccessor29.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:101)
at com.sun.proxy.
P
r
o
x
y
37.
c
r
e
a
t
e
T
a
b
l
e
(
U
n
k
n
o
w
n
S
o
u
r
c
e
)
a
t
o
r
g
.
a
p
a
c
h
e
.
h
a
d
o
o
p
.
h
i
v
e
.
m
e
t
a
s
t
o
r
e
.
H
i
v
e
M
e
t
a
S
t
o
r
e
Proxy37.createTable(Unknown Source) at org.apache.hadoop.hive.metastore.HiveMetaStore
Proxy37.createTable(UnknownSource)atorg.apache.hadoop.hive.metastore.HiveMetaStoreHMSHandler.create_table_core(HiveMetaStore.java:1457)
at org.apache.hadoop.hive.metastore.HiveMetaStore
H
M
S
H
a
n
d
l
e
r
.
c
r
e
a
t
e
t
a
b
l
e
w
i
t
h
e
n
v
i
r
o
n
m
e
n
t
c
o
n
t
e
x
t
(
H
i
v
e
M
e
t
a
S
t
o
r
e
.
j
a
v
a
:
1503
)
a
t
s
u
n
.
r
e
f
l
e
c
t
.
G
e
n
e
r
a
t
e
d
M
e
t
h
o
d
A
c
c
e
s
s
o
r
6.
i
n
v
o
k
e
(
U
n
k
n
o
w
n
S
o
u
r
c
e
)
a
t
s
u
n
.
r
e
f
l
e
c
t
.
D
e
l
e
g
a
t
i
n
g
M
e
t
h
o
d
A
c
c
e
s
s
o
r
I
m
p
l
.
i
n
v
o
k
e
(
D
e
l
e
g
a
t
i
n
g
M
e
t
h
o
d
A
c
c
e
s
s
o
r
I
m
p
l
.
j
a
v
a
:
43
)
a
t
j
a
v
a
.
l
a
n
g
.
r
e
f
l
e
c
t
.
M
e
t
h
o
d
.
i
n
v
o
k
e
(
M
e
t
h
o
d
.
j
a
v
a
:
498
)
a
t
o
r
g
.
a
p
a
c
h
e
.
h
a
d
o
o
p
.
h
i
v
e
.
m
e
t
a
s
t
o
r
e
.
R
e
t
r
y
i
n
g
H
M
S
H
a
n
d
l
e
r
.
i
n
v
o
k
e
I
n
t
e
r
n
a
l
(
R
e
t
r
y
i
n
g
H
M
S
H
a
n
d
l
e
r
.
j
a
v
a
:
148
)
a
t
o
r
g
.
a
p
a
c
h
e
.
h
a
d
o
o
p
.
h
i
v
e
.
m
e
t
a
s
t
o
r
e
.
R
e
t
r
y
i
n
g
H
M
S
H
a
n
d
l
e
r
.
i
n
v
o
k
e
(
R
e
t
r
y
i
n
g
H
M
S
H
a
n
d
l
e
r
.
j
a
v
a
:
107
)
a
t
c
o
m
.
s
u
n
.
p
r
o
x
y
.
HMSHandler.create_table_with_environment_context(HiveMetaStore.java:1503) at sun.reflect.GeneratedMethodAccessor6.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148) at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107) at com.sun.proxy.
HMSHandler.createtablewithenvironmentcontext(HiveMetaStore.java:1503)atsun.reflect.GeneratedMethodAccessor6.invoke(UnknownSource)atsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)atjava.lang.reflect.Method.invoke(Method.java:498)atorg.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)atorg.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)atcom.sun.proxy.Proxy38.create_table_with_environment_context(Unknown Source)
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.create_table_with_environment_context(HiveMetaStoreClient.java:2405)
at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.create_table_with_environment_context(SessionHiveMetaStoreClient.java:93)
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:752)
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:740)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:173)
at com.sun.proxy.$Proxy39.createTable(Unknown Source)
at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:852)
at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:867)
at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4356)
at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:354)
at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:199)
at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100)
at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2183)
at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1839)
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1526)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1237)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1227)
at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:233)
at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:184)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:403)
at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:821)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:759)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:686)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.util.RunJar.run(RunJar.java:244)
at org.apache.hadoop.util.RunJar.main(RunJar.java:158)
)
这是因为元数据库被破坏造成的,破坏的原因可以是,你删除了hive数据库,但是没有初始化,然后直接去使用hive客户端了,
此时就会导致自动生成的元数据库不完整。
解决方法如下:
进入mysql,然后
mysql>drop database hive;
mysql>create database hive;
$ schematool -initSchema -dbType mysql
把之前创建的元数据都同步到mysql 里
再次启动hive就OK了
最后
以上就是等待乌龟为你收集整理的hive创建表时出现MetaException(message:An exception was thrown while adding/validating class(es)的全部内容,希望文章能够帮你解决hive创建表时出现MetaException(message:An exception was thrown while adding/validating class(es)所遇到的程序开发问题。
如果觉得靠谱客网站的内容还不错,欢迎将靠谱客网站推荐给程序员好友。
发表评论 取消回复