[hadoop@hadoop000 bin]$
./spark-sql--master local --jars /home/hadoop/software/mysql-connector-java-5.1.27-bin.jar --driver-class-path /home/hadoop/software/mysql-connector-java-5.1.27-bin.jar
21/06/24 10:52:45 DEBUG metastore.HiveMetaStore: admin role already exists
InvalidObjectException(message:Role admin already exists.)
at org.apache.hadoop.hive.metastore.ObjectStore.addRole(ObjectStore.java:3223)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114)
at com.sun.proxy.$Proxy7.addRole(Unknown Source)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultRoles_core(HiveMetaStore.java:656)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultRoles(HiveMetaStore.java:645)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:462)
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:133)
at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:849)
at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:167)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:924)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:933)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
21/06/24 10:52:45 INFO metastore.HiveMetaStore: Added admin role in metastore
21/06/24 10:52:45 DEBUG DataNucleus.Transaction: Transaction created [DataNucleus Transaction, ID=Xid=, enlisted resources=[]]
21/06/24 10:52:45 DEBUG DataNucleus.Transaction: Transaction begun for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@2b0b4d53 (optimistic=false)
21/06/24 10:52:45 DEBUG metastore.ObjectStore: Open transaction: count = 1, isActive = true at:
org.apache.hadoop.hive.metastore.ObjectStore.addRole(ObjectStore.java:3220)
21/06/24 10:52:45 DEBUG metastore.ObjectStore: Open transaction: count = 2, isActive = true at:
org.apache.hadoop.hive.metastore.ObjectStore.getMRole(ObjectStore.java:3527)
21/06/24 10:52:45 DEBUG DataNucleus.Query: Query "SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MRole WHERE roleName == t1 PARAMETERS java.lang.String t1FetchPlan [default]" of language "JDOQL" has been run before so reusing existing generic compilation
21/06/24 10:52:45 DEBUG DataNucleus.Query: Query "SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MRole WHERE roleName == t1 PARAMETERS java.lang.String t1FetchPlan [default]" of language "JDOQL" for datastore "rdbms-mysql" has been run before so reusing existing datastore compilation
21/06/24 10:52:45 DEBUG DataNucleus.Connection: Connection "com.jolbox.bonecp.ConnectionHandle@3e7c4815" opened with isolation level "read-committed" and auto-commit=false
21/06/24 10:52:45 DEBUG DataNucleus.Transaction: Running enlist operation on resource: org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@27df0f3d, error code TMNOFLAGS and transaction: [DataNucleus Transaction, ID=Xid=, enlisted resources=[]]
21/06/24 10:52:45 DEBUG DataNucleus.Connection: Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@27df0f3d is starting for transaction Xid= with flags 0
21/06/24 10:52:45 DEBUG DataNucleus.Connection: Connection added to the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@44550792 [conn=com.jolbox.bonecp.ConnectionHandle@3e7c4815, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@2b0b4d53 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@dffa30b]
21/06/24 10:52:45 DEBUG DataNucleus.Query: JDOQL Query : Executing "SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MRole WHERE roleName == t1 PARAMETERS java.lang.String t1" ...
21/06/24 10:52:45 DEBUG DataNucleus.Datastore: Closing PreparedStatement "com.jolbox.bonecp.PreparedStatementHandle@474c9131"
21/06/24 10:52:45 DEBUG Datastore.Native: SELECT 'org.apache.hadoop.hive.metastore.model.MRole' AS NUCLEUS_TYPE,`A0`.`CREATE_TIME`,`A0`.`OWNER_NAME`,`A0`.`ROLE_NAME`,`A0`.`ROLE_ID` FROM `ROLES` `A0` WHERE `A0`.`ROLE_NAME` = <'public'>
21/06/24 10:52:45 DEBUG Datastore.Retrieve: Execution Time = 1 ms
21/06/24 10:52:45 DEBUG DataNucleus.Query: JDOQL Query : Execution Time = 1 ms
21/06/24 10:52:45 DEBUG DataNucleus.Persistence: Retrieved object with OID "2[OID]org.apache.hadoop.hive.metastore.model.MRole"
21/06/24 10:52:45 DEBUG DataNucleus.Cache: Object with id "2[OID]org.apache.hadoop.hive.metastore.model.MRole" not found in Level 1 cache [cache size = 0]
21/06/24 10:52:45 DEBUG DataNucleus.Cache: Object "org.apache.hadoop.hive.metastore.model.MRole@66213a0d" (id="2[OID]org.apache.hadoop.hive.metastore.model.MRole") added to Level 1 cache (loadedFlags="[NNN]")
21/06/24 10:52:45 DEBUG DataNucleus.Lifecycle: Object "org.apache.hadoop.hive.metastore.model.MRole@66213a0d" (id="2[OID]org.apache.hadoop.hive.metastore.model.MRole") has a lifecycle change : "HOLLOW"->"P_CLEAN"
21/06/24 10:52:45 DEBUG DataNucleus.Transaction: Object "org.apache.hadoop.hive.metastore.model.MRole@66213a0d" (id="2[OID]org.apache.hadoop.hive.metastore.model.MRole") enlisted in transactional cache
21/06/24 10:52:45 DEBUG metastore.ObjectStore: Commit transaction: count = 1, isactive true at:
org.apache.hadoop.hive.metastore.ObjectStore.getMRole(ObjectStore.java:3533)
21/06/24 10:52:45 DEBUG metastore.ObjectStore: Rollback transaction, isActive: true at:
org.apache.hadoop.hive.metastore.ObjectStore.addRole(ObjectStore.java:3233)
21/06/24 10:52:45 DEBUG DataNucleus.Transaction: Transaction rolling back for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@2b0b4d53
21/06/24 10:52:45 DEBUG DataNucleus.Lifecycle: Object "org.apache.hadoop.hive.metastore.model.MRole@66213a0d" (id="2[OID]org.apache.hadoop.hive.metastore.model.MRole") has a lifecycle change : "P_CLEAN"->"HOLLOW"
21/06/24 10:52:45 DEBUG DataNucleus.Transaction: Object "org.apache.hadoop.hive.metastore.model.MRole@66213a0d" (id="2[OID]org.apache.hadoop.hive.metastore.model.MRole") being evicted from transactional cache
21/06/24 10:52:45 DEBUG DataNucleus.Transaction: Rolling back [DataNucleus Transaction, ID=Xid=, enlisted resources=[org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@27df0f3d]]
21/06/24 10:52:45 DEBUG DataNucleus.Connection: Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@27df0f3d is rolling back for transaction Xid=
21/06/24 10:52:45 DEBUG DataNucleus.Connection: Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@27df0f3d rolled back connection for transaction Xid=
21/06/24 10:52:45 DEBUG DataNucleus.Connection: Connection "com.jolbox.bonecp.ConnectionHandle@3e7c4815" closed
21/06/24 10:52:45 DEBUG DataNucleus.Connection: Connection removed from the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@44550792 [conn=com.jolbox.bonecp.ConnectionHandle@3e7c4815, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@2b0b4d53 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@dffa30b]
21/06/24 10:52:45 DEBUG DataNucleus.Transaction: Transaction rolled back in 1 ms
21/06/24 10:52:45 DEBUG DataNucleus.Cache: Object with id="2[OID]org.apache.hadoop.hive.metastore.model.MRole" being removed from Level 1 cache [current cache size = 1]
21/06/24 10:52:45 DEBUG metastore.HiveMetaStore: public role already exists
InvalidObjectException(message:Role public already exists.)
at org.apache.hadoop.hive.metastore.ObjectStore.addRole(ObjectStore.java:3223)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114)
at com.sun.proxy.$Proxy7.addRole(Unknown Source)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultRoles_core(HiveMetaStore.java:665)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultRoles(HiveMetaStore.java:645)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:462)
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:133)
at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:849)
at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:167)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:924)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:933)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
21/06/24 10:52:45 INFO metastore.HiveMetaStore: Added public role in metastore
21/06/24 10:52:45 DEBUG DataNucleus.Transaction: Transaction created [DataNucleus Transaction, ID=Xid=, enlisted resources=[]]
21/06/24 10:52:45 DEBUG DataNucleus.Transaction: Transaction begun for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@2b0b4d53 (optimistic=false)
21/06/24 10:52:45 DEBUG metastore.ObjectStore: Open transaction: count = 1, isActive = true at:
org.apache.hadoop.hive.metastore.ObjectStore.grantPrivileges(ObjectStore.java:3912)
21/06/24 10:52:45 DEBUG metastore.ObjectStore: Open transaction: count = 2, isActive = true at:
org.apache.hadoop.hive.metastore.ObjectStore.getMRole(ObjectStore.java:3527)
21/06/24 10:52:45 DEBUG DataNucleus.Query: Query "SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MRole WHERE roleName == t1 PARAMETERS java.lang.String t1FetchPlan [default]" of language "JDOQL" has been run before so reusing existing generic compilation
21/06/24 10:52:45 DEBUG DataNucleus.Query: Query "SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MRole WHERE roleName == t1 PARAMETERS java.lang.String t1FetchPlan [default]" of language "JDOQL" for datastore "rdbms-mysql" has been run before so reusing existing datastore compilation
21/06/24 10:52:45 DEBUG DataNucleus.Connection: Connection "com.jolbox.bonecp.ConnectionHandle@20de05e5" opened with isolation level "read-committed" and auto-commit=false
21/06/24 10:52:45 DEBUG DataNucleus.Transaction: Running enlist operation on resource: org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@36c0d0bd, error code TMNOFLAGS and transaction: [DataNucleus Transaction, ID=Xid=, enlisted resources=[]]
21/06/24 10:52:45 DEBUG DataNucleus.Connection: Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@36c0d0bd is starting for transaction Xid= with flags 0
21/06/24 10:52:45 DEBUG DataNucleus.Connection: Connection added to the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@688d411b [conn=com.jolbox.bonecp.ConnectionHandle@20de05e5, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@2b0b4d53 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@dffa30b]
21/06/24 10:52:45 DEBUG DataNucleus.Query: JDOQL Query : Executing "SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MRole WHERE roleName == t1 PARAMETERS java.lang.String t1" ...
21/06/24 10:52:45 DEBUG DataNucleus.Datastore: Closing PreparedStatement "com.jolbox.bonecp.PreparedStatementHandle@859ea42"
21/06/24 10:52:45 DEBUG Datastore.Native: SELECT 'org.apache.hadoop.hive.metastore.model.MRole' AS NUCLEUS_TYPE,`A0`.`CREATE_TIME`,`A0`.`OWNER_NAME`,`A0`.`ROLE_NAME`,`A0`.`ROLE_ID` FROM `ROLES` `A0` WHERE `A0`.`ROLE_NAME` = <'admin'>
21/06/24 10:52:45 DEBUG Datastore.Retrieve: Execution Time = 0 ms
21/06/24 10:52:45 DEBUG DataNucleus.Query: JDOQL Query : Execution Time = 0 ms
21/06/24 10:52:45 DEBUG DataNucleus.Persistence: Retrieved object with OID "1[OID]org.apache.hadoop.hive.metastore.model.MRole"
21/06/24 10:52:45 DEBUG DataNucleus.Cache: Object with id "1[OID]org.apache.hadoop.hive.metastore.model.MRole" not found in Level 1 cache [cache size = 0]
21/06/24 10:52:45 DEBUG DataNucleus.Cache: Object "org.apache.hadoop.hive.metastore.model.MRole@28737371" (id="1[OID]org.apache.hadoop.hive.metastore.model.MRole") added to Level 1 cache (loadedFlags="[NNN]")
21/06/24 10:52:45 DEBUG DataNucleus.Lifecycle: Object "org.apache.hadoop.hive.metastore.model.MRole@28737371" (id="1[OID]org.apache.hadoop.hive.metastore.model.MRole") has a lifecycle change : "HOLLOW"->"P_CLEAN"
21/06/24 10:52:45 DEBUG DataNucleus.Transaction: Object "org.apache.hadoop.hive.metastore.model.MRole@28737371" (id="1[OID]org.apache.hadoop.hive.metastore.model.MRole") enlisted in transactional cache
21/06/24 10:52:45 DEBUG metastore.ObjectStore: Commit transaction: count = 1, isactive true at:
org.apache.hadoop.hive.metastore.ObjectStore.getMRole(ObjectStore.java:3533)
21/06/24 10:52:45 DEBUG metastore.ObjectStore: Open transaction: count = 2, isActive = true at:
org.apache.hadoop.hive.metastore.ObjectStore.listPrincipalGlobalGrants(ObjectStore.java:4397)
21/06/24 10:52:45 DEBUG DataNucleus.Query: JDOQL Query : Compiling "SELECT FROM org.apache.hadoop.hive.metastore.model.MGlobalPrivilege WHERE principalName == t1 && principalType == t2 PARAMETERS java.lang.String t1, java.lang.String t2"
21/06/24 10:52:45 DEBUG DataNucleus.Query: JDOQL Query : Compile Time = 1 ms
21/06/24 10:52:45 DEBUG DataNucleus.Query: QueryCompilation:
[filter:DyadicExpression{DyadicExpression{PrimaryExpression{principalName} = ParameterExpression{t1}} AND DyadicExpression{PrimaryExpression{principalType} = ParameterExpression{t2}}}]
[symbols: this type=org.apache.hadoop.hive.metastore.model.MGlobalPrivilege, t1 type=java.lang.String, t2 type=java.lang.String]
21/06/24 10:52:45 DEBUG DataNucleus.Query: JDOQL Query : Compiling "SELECT FROM org.apache.hadoop.hive.metastore.model.MGlobalPrivilege WHERE principalName == t1 && principalType == t2 PARAMETERS java.lang.String t1, java.lang.String t2" for datastore
21/06/24 10:52:45 DEBUG DataNucleus.Persistence: Managing Persistence of Class : org.apache.hadoop.hive.metastore.model.MGlobalPrivilege [Table : `GLOBAL_PRIVS`, InheritanceStrategy : new-table]
21/06/24 10:52:45 DEBUG Datastore.Schema: Column "`GLOBAL_PRIVS`.`USER_GRANT_ID`" added to internal representation of table.
21/06/24 10:52:45 DEBUG Datastore.Schema: Table `GLOBAL_PRIVS` will manage the persistence of the fields for class org.apache.hadoop.hive.metastore.model.MGlobalPrivilege (inheritance strategy="new-table")
21/06/24 10:52:45 DEBUG Datastore.Schema: Column "`GLOBAL_PRIVS`.`CREATE_TIME`" added to internal representation of table.
21/06/24 10:52:45 DEBUG Datastore.Schema: Field [org.apache.hadoop.hive.metastore.model.MGlobalPrivilege.createTime] -> Column(s) [`GLOBAL_PRIVS`.`CREATE_TIME`] using mapping of type "org.datanucleus.store.rdbms.mapping.java.IntegerMapping" (org.datanucleus.store.rdbms.mapping.datastore.IntegerRDBMSMapping)
21/06/24 10:52:45 DEBUG Datastore.Schema: Column "`GLOBAL_PRIVS`.`GRANT_OPTION`" added to internal representation of table.
21/06/24 10:52:45 DEBUG Datastore.Schema: Field [org.apache.hadoop.hive.metastore.model.MGlobalPrivilege.grantOption] -> Column(s) [`GLOBAL_PRIVS`.`GRANT_OPTION`] using mapping of type "org.datanucleus.store.rdbms.mapping.java.BooleanMapping" (org.datanucleus.store.rdbms.mapping.datastore.SmallIntRDBMSMapping)
21/06/24 10:52:45 DEBUG Datastore.Schema: Column "`GLOBAL_PRIVS`.`GRANTOR`" added to internal representation of table.
21/06/24 10:52:45 DEBUG Datastore.Schema: Field [org.apache.hadoop.hive.metastore.model.MGlobalPrivilege.grantor] -> Column(s) [`GLOBAL_PRIVS`.`GRANTOR`] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
21/06/24 10:52:45 DEBUG Datastore.Schema: Column "`GLOBAL_PRIVS`.`GRANTOR_TYPE`" added to internal representation of table.
21/06/24 10:52:45 DEBUG Datastore.Schema: Field [org.apache.hadoop.hive.metastore.model.MGlobalPrivilege.grantorType] -> Column(s) [`GLOBAL_PRIVS`.`GRANTOR_TYPE`] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
21/06/24 10:52:45 DEBUG Datastore.Schema: Column "`GLOBAL_PRIVS`.`PRINCIPAL_NAME`" added to internal representation of table.
21/06/24 10:52:45 DEBUG Datastore.Schema: Field [org.apache.hadoop.hive.metastore.model.MGlobalPrivilege.principalName] -> Column(s) [`GLOBAL_PRIVS`.`PRINCIPAL_NAME`] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
21/06/24 10:52:45 DEBUG Datastore.Schema: Column "`GLOBAL_PRIVS`.`PRINCIPAL_TYPE`" added to internal representation of table.
21/06/24 10:52:45 DEBUG Datastore.Schema: Field [org.apache.hadoop.hive.metastore.model.MGlobalPrivilege.principalType] -> Column(s) [`GLOBAL_PRIVS`.`PRINCIPAL_TYPE`] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
21/06/24 10:52:45 DEBUG Datastore.Schema: Column "`GLOBAL_PRIVS`.`USER_PRIV`" added to internal representation of table.
21/06/24 10:52:45 DEBUG Datastore.Schema: Field [org.apache.hadoop.hive.metastore.model.MGlobalPrivilege.privilege] -> Column(s) [`GLOBAL_PRIVS`.`USER_PRIV`] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
21/06/24 10:52:45 DEBUG Datastore.Schema: Table/View `GLOBAL_PRIVS` has been initialised
21/06/24 10:52:45 DEBUG DataNucleus.Connection: Connection "com.jolbox.bonecp.ConnectionHandle@3a08078c" opened with isolation level "serializable" and auto-commit=false
21/06/24 10:52:45 DEBUG Datastore.Schema: Schema Transaction started with connection "com.jolbox.bonecp.ConnectionHandle@3a08078c" with isolation "serializable"
21/06/24 10:52:45 DEBUG Datastore.Schema: Check of existence of `GLOBAL_PRIVS` returned table type of TABLE
21/06/24 10:52:45 DEBUG Datastore.Schema: Loading column info for table(s) "GLOBAL_PRIVS, PARTITION_PARAMS, SERDE_PARAMS, TABLE_PARAMS, PART_COL_STATS, SKEWED_COL_VALUE_LOC_MAP, SKEWED_STRING_LIST_VALUES, TBLS, CDS, DBS, DATABASE_PARAMS, SD_PARAMS, PARTITIONS, SDS, SERDES, COLUMNS_V2, SORT_COLS, PARTITION_KEYS, SKEWED_VALUES, PARTITION_KEY_VALS, TAB_COL_STATS, ROLES, SKEWED_STRING_LIST, BUCKETING_COLS, VERSION, SKEWED_COL_NAMES" in Catalog "", Schema ""
XshellXshellXshellXshellXshellXshell21/06/24 10:52:45 DEBUG Datastore.Schema: Column info loaded for Catalog "", Schema "", 26 tables, time = 27 ms
21/06/24 10:52:45 DEBUG Datastore.Schema: Column info retrieved for table "GLOBAL_PRIVS" : 8 columns found
21/06/24 10:52:45 DEBUG Datastore.Schema: Validating 2 index(es) for table `GLOBAL_PRIVS`
21/06/24 10:52:45 DEBUG Datastore.Schema: Validating 0 foreign key(s) for table `GLOBAL_PRIVS`
21/06/24 10:52:45 DEBUG Datastore.Schema: Validating 2 unique key(s) for table `GLOBAL_PRIVS`
21/06/24 10:52:45 DEBUG Datastore.Schema: Schema Transaction committing with connection "com.jolbox.bonecp.ConnectionHandle@3a08078c"
21/06/24 10:52:45 DEBUG Datastore.Schema: Schema Transaction closing with connection "com.jolbox.bonecp.ConnectionHandle@3a08078c"
21/06/24 10:52:45 DEBUG DataNucleus.Connection: Connection "com.jolbox.bonecp.ConnectionHandle@3a08078c" non enlisted to a transaction is being committed.
21/06/24 10:52:45 DEBUG DataNucleus.Connection: Connection "com.jolbox.bonecp.ConnectionHandle@3a08078c" closed
21/06/24 10:52:45 DEBUG DataNucleus.Query: JDOQL Query : Compile Time for datastore = 34 ms
21/06/24 10:52:45 DEBUG DataNucleus.Query: SELECT FROM org.apache.hadoop.hive.metastore.model.MGlobalPrivilege WHERE principalName == t1 && principalType == t2 PARAMETERS java.lang.String t1, java.lang.String t2 Query compiled to datastore query "SELECT 'org.apache.hadoop.hive.metastore.model.MGlobalPrivilege' AS NUCLEUS_TYPE,`A0`.`CREATE_TIME`,`A0`.`GRANT_OPTION`,`A0`.`GRANTOR`,`A0`.`GRANTOR_TYPE`,`A0`.`PRINCIPAL_NAME`,`A0`.`PRINCIPAL_TYPE`,`A0`.`USER_PRIV`,`A0`.`USER_GRANT_ID` FROM `GLOBAL_PRIVS` `A0` WHERE `A0`.`PRINCIPAL_NAME` = ? AND `A0`.`PRINCIPAL_TYPE` = ?"
21/06/24 10:52:45 DEBUG DataNucleus.Connection: Connection found in the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@688d411b [conn=com.jolbox.bonecp.ConnectionHandle@20de05e5, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@2b0b4d53 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@dffa30b]
21/06/24 10:52:45 DEBUG DataNucleus.Query: JDOQL Query : Executing "SELECT FROM org.apache.hadoop.hive.metastore.model.MGlobalPrivilege WHERE principalName == t1 && principalType == t2 PARAMETERS java.lang.String t1, java.lang.String t2" ...
21/06/24 10:52:45 DEBUG DataNucleus.Datastore: Closing PreparedStatement "com.jolbox.bonecp.PreparedStatementHandle@5d425813"
21/06/24 10:52:45 DEBUG Datastore.Native: SELECT 'org.apache.hadoop.hive.metastore.model.MGlobalPrivilege' AS NUCLEUS_TYPE,`A0`.`CREATE_TIME`,`A0`.`GRANT_OPTION`,`A0`.`GRANTOR`,`A0`.`GRANTOR_TYPE`,`A0`.`PRINCIPAL_NAME`,`A0`.`PRINCIPAL_TYPE`,`A0`.`USER_PRIV`,`A0`.`USER_GRANT_ID` FROM `GLOBAL_PRIVS` `A0` WHERE `A0`.`PRINCIPAL_NAME` = <'admin'> AND `A0`.`PRINCIPAL_TYPE` = <'ROLE'>
21/06/24 10:52:45 DEBUG Datastore.Retrieve: Execution Time = 0 ms
21/06/24 10:52:45 DEBUG DataNucleus.Query: JDOQL Query : Execution Time = 1 ms
21/06/24 10:52:45 DEBUG DataNucleus.Persistence: Retrieved object with OID "1[OID]org.apache.hadoop.hive.metastore.model.MGlobalPrivilege"
21/06/24 10:52:45 DEBUG DataNucleus.Cache: Object with id "1[OID]org.apache.hadoop.hive.metastore.model.MGlobalPrivilege" not found in Level 1 cache [cache size = 1]
21/06/24 10:52:45 DEBUG DataNucleus.MetaData: Listener found initialisation for persistable class org.apache.hadoop.hive.metastore.model.MGlobalPrivilege
21/06/24 10:52:45 DEBUG DataNucleus.Cache: Object "org.apache.hadoop.hive.metastore.model.MGlobalPrivilege@6a937336" (id="1[OID]org.apache.hadoop.hive.metastore.model.MGlobalPrivilege") added to Level 1 cache (loadedFlags="[NNNNNNN]")
21/06/24 10:52:45 DEBUG DataNucleus.Lifecycle: Object "org.apache.hadoop.hive.metastore.model.MGlobalPrivilege@6a937336" (id="1[OID]org.apache.hadoop.hive.metastore.model.MGlobalPrivilege") has a lifecycle change : "HOLLOW"->"P_CLEAN"
21/06/24 10:52:45 DEBUG DataNucleus.Transaction: Object "org.apache.hadoop.hive.metastore.model.MGlobalPrivilege@6a937336" (id="1[OID]org.apache.hadoop.hive.metastore.model.MGlobalPrivilege") enlisted in transactional cache
21/06/24 10:52:45 DEBUG metastore.ObjectStore: Commit transaction: count = 1, isactive true at:
org.apache.hadoop.hive.metastore.ObjectStore.listPrincipalGlobalGrants(ObjectStore.java:4407)
21/06/24 10:52:45 DEBUG metastore.ObjectStore: Rollback transaction, isActive: true at:
org.apache.hadoop.hive.metastore.ObjectStore.grantPrivileges(ObjectStore.java:4115)
21/06/24 10:52:45 DEBUG DataNucleus.Transaction: Transaction rolling back for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@2b0b4d53
21/06/24 10:52:45 DEBUG DataNucleus.Lifecycle: Object "org.apache.hadoop.hive.metastore.model.MGlobalPrivilege@6a937336" (id="1[OID]org.apache.hadoop.hive.metastore.model.MGlobalPrivilege") has a lifecycle change : "P_CLEAN"->"HOLLOW"
21/06/24 10:52:45 DEBUG DataNucleus.Transaction: Object "org.apache.hadoop.hive.metastore.model.MGlobalPrivilege@6a937336" (id="1[OID]org.apache.hadoop.hive.metastore.model.MGlobalPrivilege") being evicted from transactional cache
21/06/24 10:52:45 DEBUG DataNucleus.Lifecycle: Object "org.apache.hadoop.hive.metastore.model.MRole@28737371" (id="1[OID]org.apache.hadoop.hive.metastore.model.MRole") has a lifecycle change : "P_CLEAN"->"HOLLOW"
21/06/24 10:52:45 DEBUG DataNucleus.Transaction: Object "org.apache.hadoop.hive.metastore.model.MRole@28737371" (id="1[OID]org.apache.hadoop.hive.metastore.model.MRole") being evicted from transactional cache
21/06/24 10:52:45 DEBUG DataNucleus.Transaction: Rolling back [DataNucleus Transaction, ID=Xid=, enlisted resources=[org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@36c0d0bd]]
21/06/24 10:52:45 DEBUG DataNucleus.Connection: Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@36c0d0bd is rolling back for transaction Xid=
21/06/24 10:52:45 DEBUG DataNucleus.Connection: Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@36c0d0bd rolled back connection for transaction Xid=
21/06/24 10:52:45 DEBUG DataNucleus.Connection: Connection "com.jolbox.bonecp.ConnectionHandle@20de05e5" closed
21/06/24 10:52:45 DEBUG DataNucleus.Connection: Connection removed from the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@688d411b [conn=com.jolbox.bonecp.ConnectionHandle@20de05e5, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@2b0b4d53 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@dffa30b]
21/06/24 10:52:45 DEBUG DataNucleus.Transaction: Transaction rolled back in 1 ms
21/06/24 10:52:45 DEBUG DataNucleus.Cache: Object with id="1[OID]org.apache.hadoop.hive.metastore.model.MGlobalPrivilege" being removed from Level 1 cache [current cache size = 2]
21/06/24 10:52:45 DEBUG DataNucleus.Cache: Object with id="1[OID]org.apache.hadoop.hive.metastore.model.MRole" being removed from Level 1 cache [current cache size = 1]
21/06/24 10:52:45 DEBUG metastore.HiveMetaStore: Failed while granting global privs to admin
InvalidObjectException(message:All is already granted by admin)
at org.apache.hadoop.hive.metastore.ObjectStore.grantPrivileges(ObjectStore.java:3948)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114)
at com.sun.proxy.$Proxy7.grantPrivileges(Unknown Source)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultRoles_core(HiveMetaStore.java:679)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultRoles(HiveMetaStore.java:645)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:462)
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:133)
at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:849)
at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:167)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:924)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:933)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
21/06/24 10:52:45 INFO metastore.HiveMetaStore: No user is added in admin role, since config is empty
21/06/24 10:52:46 INFO metastore.HiveMetaStore: 0: get_all_databases
21/06/24 10:52:46 INFO HiveMetaStore.audit: ugi=hadoop ip=unknown-ip-addr cmd=get_all_databases
21/06/24 10:52:46 DEBUG DataNucleus.Transaction: Transaction created [DataNucleus Transaction, ID=Xid=, enlisted resources=[]]
21/06/24 10:52:46 DEBUG DataNucleus.Transaction: Transaction begun for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@2b0b4d53 (optimistic=false)
21/06/24 10:52:46 DEBUG metastore.ObjectStore: Open transaction: count = 1, isActive = true at:
org.apache.hadoop.hive.metastore.ObjectStore.getDatabases(ObjectStore.java:677)
21/06/24 10:52:46 DEBUG DataNucleus.Query: JDOQL Single-String with "select name from org.apache.hadoop.hive.metastore.model.MDatabase where ( name.matches("(?i)..*"))"
21/06/24 10:52:46 DEBUG DataNucleus.Query: JDOQL Query : Compiling "SELECT name FROM org.apache.hadoop.hive.metastore.model.MDatabase WHERE ( name.matches("(?i)..*")) ORDER BY name ascending"
21/06/24 10:52:46 DEBUG DataNucleus.Query: JDOQL Query : Compile Time = 1 ms
21/06/24 10:52:46 DEBUG DataNucleus.Query: QueryCompilation:
[result:PrimaryExpression{name}]
[filter:InvokeExpression{[PrimaryExpression{name}].matches(Literal{(?i)..*})}]
[ordering:OrderExpression{PrimaryExpression{name} ascendingnull}]
[symbols: this type=org.apache.hadoop.hive.metastore.model.MDatabase]
21/06/24 10:52:46 DEBUG DataNucleus.Query: JDOQL Query : Compiling "SELECT name FROM org.apache.hadoop.hive.metastore.model.MDatabase WHERE ( name.matches("(?i)..*")) ORDER BY name ascending" for datastore
21/06/24 10:52:46 DEBUG DataNucleus.Query: JDOQL Query : Compile Time for datastore = 5 ms
21/06/24 10:52:46 DEBUG DataNucleus.Query: SELECT name FROM org.apache.hadoop.hive.metastore.model.MDatabase WHERE ( name.matches("(?i)..*")) ORDER BY name ascending Query compiled to datastore query "SELECT `A0`.`NAME` AS NUCORDER0 FROM `DBS` `A0` WHERE LOWER(`A0`.`NAME`) LIKE '_%' ESCAPE '\\' ORDER BY NUCORDER0"
21/06/24 10:52:46 DEBUG DataNucleus.Connection: Connection "com.jolbox.bonecp.ConnectionHandle@5633ed82" opened with isolation level "read-committed" and auto-commit=false
21/06/24 10:52:46 DEBUG DataNucleus.Transaction: Running enlist operation on resource: org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@767191b1, error code TMNOFLAGS and transaction: [DataNucleus Transaction, ID=Xid=, enlisted resources=[]]
21/06/24 10:52:46 DEBUG DataNucleus.Connection: Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@767191b1 is starting for transaction Xid= with flags 0
21/06/24 10:52:46 DEBUG DataNucleus.Connection: Connection added to the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@5d21202d [conn=com.jolbox.bonecp.ConnectionHandle@5633ed82, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@2b0b4d53 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@dffa30b]
21/06/24 10:52:46 DEBUG DataNucleus.Query: JDOQL Query : Executing "SELECT name FROM org.apache.hadoop.hive.metastore.model.MDatabase WHERE ( name.matches("(?i)..*")) ORDER BY name ascending" ...
21/06/24 10:52:46 DEBUG DataNucleus.Datastore: Closing PreparedStatement "com.jolbox.bonecp.PreparedStatementHandle@6eb17ec8"
21/06/24 10:52:46 DEBUG Datastore.Native: SELECT `A0`.`NAME` AS NUCORDER0 FROM `DBS` `A0` WHERE LOWER(`A0`.`NAME`) LIKE '_%' ESCAPE '\\' ORDER BY NUCORDER0
21/06/24 10:52:46 DEBUG Datastore.Retrieve: Execution Time = 0 ms
21/06/24 10:52:46 DEBUG DataNucleus.Query: JDOQL Query : Execution Time = 0 ms
21/06/24 10:52:46 DEBUG metastore.ObjectStore: Commit transaction: count = 0, isactive true at:
org.apache.hadoop.hive.metastore.ObjectStore.getDatabases(ObjectStore.java:701)
21/06/24 10:52:46 DEBUG DataNucleus.Transaction: Transaction committing for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@2b0b4d53
21/06/24 10:52:46 DEBUG DataNucleus.Persistence: ExecutionContext.internalFlush() process started using ordered flush - 0 enlisted objects
21/06/24 10:52:46 DEBUG DataNucleus.Persistence: ExecutionContext.internalFlush() process finished
21/06/24 10:52:46 DEBUG DataNucleus.Persistence: Performing check of objects for "persistence-by-reachability" (commit) ...
21/06/24 10:52:46 DEBUG DataNucleus.Persistence: Completed check of objects for "persistence-by-reachability" (commit).
21/06/24 10:52:46 DEBUG DataNucleus.Transaction: Committing [DataNucleus Transaction, ID=Xid=, enlisted resources=[org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@767191b1]]
21/06/24 10:52:46 DEBUG DataNucleus.Connection: Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@767191b1 is committing for transaction Xid= with onePhase=true
21/06/24 10:52:46 DEBUG DataNucleus.Connection: Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@767191b1 committed connection for transaction Xid= with onePhase=true
21/06/24 10:52:46 DEBUG DataNucleus.Connection: Connection "com.jolbox.bonecp.ConnectionHandle@5633ed82" closed
21/06/24 10:52:46 DEBUG DataNucleus.Connection: Connection removed from the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@5d21202d [conn=com.jolbox.bonecp.ConnectionHandle@5633ed82, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@2b0b4d53 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@dffa30b]
21/06/24 10:52:46 DEBUG DataNucleus.Transaction: Transaction committed in 1 ms
21/06/24 10:52:46 INFO metastore.HiveMetaStore: 0: get_functions: db=default pat=*
21/06/24 10:52:46 INFO HiveMetaStore.audit: ugi=hadoop ip=unknown-ip-addr cmd=get_functions: db=default pat=*
21/06/24 10:52:46 DEBUG DataNucleus.Transaction: Transaction created [DataNucleus Transaction, ID=Xid, enlisted resources=[]]
21/06/24 10:52:46 DEBUG DataNucleus.Transaction: Transaction begun for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@2b0b4d53 (optimistic=false)
21/06/24 10:52:46 DEBUG metastore.ObjectStore: Open transaction: count = 1, isActive = true at:
org.apache.hadoop.hive.metastore.ObjectStore.getFunctions(ObjectStore.java:7013)
21/06/24 10:52:46 DEBUG DataNucleus.Query: JDOQL Single-String with "select functionName from org.apache.hadoop.hive.metastore.model.MFunction where database.name == dbName && ( functionName.matches("(?i).*"))"
21/06/24 10:52:46 DEBUG DataNucleus.Query: JDOQL Query : Compiling "SELECT functionName FROM org.apache.hadoop.hive.metastore.model.MFunction WHERE database.name == dbName && ( functionName.matches("(?i).*")) PARAMETERS java.lang.String dbName ORDER BY functionName ascending"
21/06/24 10:52:46 DEBUG DataNucleus.Query: JDOQL Query : Compile Time = 0 ms
21/06/24 10:52:46 DEBUG DataNucleus.Query: QueryCompilation:
[result:PrimaryExpression{functionName}]
[filter:DyadicExpression{DyadicExpression{PrimaryExpression{database.name} = ParameterExpression{dbName}} AND InvokeExpression{[PrimaryExpression{functionName}].matches(Literal{(?i).*})}}]
[ordering:OrderExpression{PrimaryExpression{functionName} ascendingnull}]
[symbols: dbName type=java.lang.String, this type=org.apache.hadoop.hive.metastore.model.MFunction]
21/06/24 10:52:46 DEBUG DataNucleus.Query: JDOQL Query : Compiling "SELECT functionName FROM org.apache.hadoop.hive.metastore.model.MFunction WHERE database.name == dbName && ( functionName.matches("(?i).*")) PARAMETERS java.lang.String dbName ORDER BY functionName ascending" for datastore
21/06/24 10:52:46 INFO DataNucleus.Datastore: The class "org.apache.hadoop.hive.metastore.model.MResourceUri" is tagged as "embedded-only" so does not have its own datastore table.
21/06/24 10:52:46 DEBUG DataNucleus.Persistence: Managing Persistence of Class : org.apache.hadoop.hive.metastore.model.MFunction [Table : `FUNCS`, InheritanceStrategy : new-table]
21/06/24 10:52:46 DEBUG Datastore.Schema: Column "`FUNCS`.`FUNC_ID`" added to internal representation of table.
21/06/24 10:52:46 DEBUG Datastore.Schema: Table `FUNCS` will manage the persistence of the fields for class org.apache.hadoop.hive.metastore.model.MFunction (inheritance strategy="new-table")
21/06/24 10:52:46 DEBUG Datastore.Schema: Column "`FUNCS`.`CLASS_NAME`" added to internal representation of table.
21/06/24 10:52:46 DEBUG Datastore.Schema: Field [org.apache.hadoop.hive.metastore.model.MFunction.className] -> Column(s) [`FUNCS`.`CLASS_NAME`] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
21/06/24 10:52:46 DEBUG Datastore.Schema: Column "`FUNCS`.`CREATE_TIME`" added to internal representation of table.
21/06/24 10:52:46 DEBUG Datastore.Schema: Field [org.apache.hadoop.hive.metastore.model.MFunction.createTime] -> Column(s) [`FUNCS`.`CREATE_TIME`] using mapping of type "org.datanucleus.store.rdbms.mapping.java.IntegerMapping" (org.datanucleus.store.rdbms.mapping.datastore.IntegerRDBMSMapping)
21/06/24 10:52:46 DEBUG Datastore.Schema: Column "`FUNCS`.`DB_ID`" added to internal representation of table.
21/06/24 10:52:46 DEBUG Datastore.Schema: Field [org.apache.hadoop.hive.metastore.model.MFunction.database] -> Column(s) [`FUNCS`.`DB_ID`] using mapping of type "org.datanucleus.store.rdbms.mapping.java.PersistableMapping" (org.datanucleus.store.rdbms.mapping.datastore.BigIntRDBMSMapping)
21/06/24 10:52:46 DEBUG Datastore.Schema: Column "`FUNCS`.`FUNC_NAME`" added to internal representation of table.
21/06/24 10:52:46 DEBUG Datastore.Schema: Field [org.apache.hadoop.hive.metastore.model.MFunction.functionName] -> Column(s) [`FUNCS`.`FUNC_NAME`] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
21/06/24 10:52:46 DEBUG Datastore.Schema: Column "`FUNCS`.`FUNC_TYPE`" added to internal representation of table.
21/06/24 10:52:46 DEBUG Datastore.Schema: Field [org.apache.hadoop.hive.metastore.model.MFunction.functionType] -> Column(s) [`FUNCS`.`FUNC_TYPE`] using mapping of type "org.datanucleus.store.rdbms.mapping.java.IntegerMapping" (org.datanucleus.store.rdbms.mapping.datastore.IntegerRDBMSMapping)
21/06/24 10:52:46 DEBUG Datastore.Schema: Column "`FUNCS`.`OWNER_NAME`" added to internal representation of table.
21/06/24 10:52:46 DEBUG Datastore.Schema: Field [org.apache.hadoop.hive.metastore.model.MFunction.ownerName] -> Column(s) [`FUNCS`.`OWNER_NAME`] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
21/06/24 10:52:46 DEBUG Datastore.Schema: Column "`FUNCS`.`OWNER_TYPE`" added to internal representation of table.
21/06/24 10:52:46 DEBUG Datastore.Schema: Field [org.apache.hadoop.hive.metastore.model.MFunction.ownerType] -> Column(s) [`FUNCS`.`OWNER_TYPE`] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
21/06/24 10:52:46 DEBUG DataNucleus.Persistence: Managing Persistence of Field : org.apache.hadoop.hive.metastore.model.MFunction.resourceUris [Table : `FUNC_RU`]
21/06/24 10:52:46 DEBUG Datastore.Schema: Field [org.apache.hadoop.hive.metastore.model.MFunction.resourceUris] -> Column(s) [[none]] using mapping of type "org.datanucleus.store.rdbms.mapping.java.CollectionMapping" ()
21/06/24 10:52:46 DEBUG Datastore.Schema: Table/View `FUNCS` has been initialised
21/06/24 10:52:46 DEBUG Datastore.Schema: Column "`FUNC_RU`.`FUNC_ID`" added to internal representation of table.
21/06/24 10:52:46 DEBUG DataNucleus.Datastore: Field [org.apache.hadoop.hive.metastore.model.MFunction.resourceUris] -> Column(s) [`FUNC_RU`.`FUNC_ID`] using mapping of type "org.datanucleus.store.rdbms.mapping.java.PersistableMapping" (org.datanucleus.store.rdbms.mapping.datastore.BigIntRDBMSMapping)
21/06/24 10:52:46 DEBUG Datastore.Schema: Column "`FUNC_RU`.`RESOURCE_TYPE`" added to internal representation of table.
21/06/24 10:52:46 DEBUG Datastore.Schema: Column "`FUNC_RU`.`RESOURCE_URI`" added to internal representation of table.
21/06/24 10:52:46 DEBUG DataNucleus.Datastore: Field [org.apache.hadoop.hive.metastore.model.MFunction.resourceUris] -> Column(s) [`FUNC_RU`.`RESOURCE_TYPE`,`FUNC_RU`.`RESOURCE_URI`] using mapping of type "org.datanucleus.store.rdbms.mapping.java.EmbeddedElementPCMapping" (org.datanucleus.store.rdbms.mapping.datastore.IntegerRDBMSMapping,org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
21/06/24 10:52:46 DEBUG Datastore.Schema: Column "`FUNC_RU`.`INTEGER_IDX`" added to internal representation of table.
21/06/24 10:52:46 DEBUG DataNucleus.Datastore: Field [org.apache.hadoop.hive.metastore.model.MFunction.resourceUris] -> Column(s) [`FUNC_RU`.`INTEGER_IDX`] using mapping of type "org.datanucleus.store.rdbms.mapping.java.IntegerMapping" (org.datanucleus.store.rdbms.mapping.datastore.IntegerRDBMSMapping)
21/06/24 10:52:46 DEBUG Datastore.Schema: Table/View `FUNC_RU` has been initialised
21/06/24 10:52:46 DEBUG DataNucleus.Connection: Connection "com.jolbox.bonecp.ConnectionHandle@5c77ba8f" opened with isolation level "serializable" and auto-commit=false
21/06/24 10:52:46 DEBUG Datastore.Schema: Schema Transaction started with connection "com.jolbox.bonecp.ConnectionHandle@5c77ba8f" with isolation "serializable"
21/06/24 10:52:46 DEBUG Datastore.Schema: Check of existence of `FUNCS` returned table type of TABLE
21/06/24 10:52:46 DEBUG DataNucleus.Connection: Connection "com.jolbox.bonecp.ConnectionHandle@5c77ba8f" non enlisted to a transaction is being committed.
21/06/24 10:52:46 DEBUG DataNucleus.Connection: Connection "com.jolbox.bonecp.ConnectionHandle@5c77ba8f" closed
21/06/24 10:52:46 DEBUG DataNucleus.Query: JDOQL Query : Compile Time for datastore = 32 ms
java.net.ConnectException: Connection refused
at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:494)
at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:648)
at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:744)
at org.apache.hadoop.ipc.Client$Connection.access$3000(Client.java:396)
at org.apache.hadoop.ipc.Client.getConnection(Client.java:1557)
at org.apache.hadoop.ipc.Client.call(Client.java:1480)
at org.apache.hadoop.ipc.Client.call(Client.java:1441)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
at com.sun.proxy.$Proxy16.getFileInfo(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:788)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:258)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:104)
at com.sun.proxy.$Proxy17.getFileInfo(Unknown Source)
at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:2168)
at org.apache.hadoop.hdfs.DistributedFileSystem$20.doCall(DistributedFileSystem.java:1266)
at org.apache.hadoop.hdfs.DistributedFileSystem$20.doCall(DistributedFileSystem.java:1262)
at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1262)
at org.apache.hadoop.fs.FileSystem.exists(FileSystem.java:1418)
at org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:596)
at org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554)
at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)
at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:133)
at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:849)
at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:167)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:924)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:933)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
21/06/24 10:52:46 DEBUG ipc.Client: IPC Client (1933224408) connection to hadoop000/192.168.150.5:8020 from hadoop: closed
Exception in thread "main" java.lang.RuntimeException: java.net.ConnectException: Call From hadoop000/192.168.150.5 to hadoop000:8020 failed on connection exception: java.net.ConnectException: Connection refused; For more details see: http://wiki.apache.org/hadoop/ConnectionRefused
at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:522)
at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:133)
at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:849)
at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:167)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:924)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:933)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.net.ConnectException: Call From hadoop000/192.168.150.5 to hadoop000:8020 failed on connection exception: java.net.ConnectException: Connection refused; For more details see: http://wiki.apache.org/hadoop/ConnectionRefused
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:791)
at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:731)
at org.apache.hadoop.ipc.Client.call(Client.java:1508)
at org.apache.hadoop.ipc.Client.call(Client.java:1441)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
at com.sun.proxy.$Proxy16.getFileInfo(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:788)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:258)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:104)
at com.sun.proxy.$Proxy17.getFileInfo(Unknown Source)
at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:2168)
at org.apache.hadoop.hdfs.DistributedFileSystem$20.doCall(DistributedFileSystem.java:1266)
at org.apache.hadoop.hdfs.DistributedFileSystem$20.doCall(DistributedFileSystem.java:1262)
at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1262)
at org.apache.hadoop.fs.FileSystem.exists(FileSystem.java:1418)
at org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:596)
at org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554)
at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)
... 14 more
Caused by: java.net.ConnectException: Connection refused
at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:494)
at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:648)
at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:744)
at org.apache.hadoop.ipc.Client$Connection.access$3000(Client.java:396)
at org.apache.hadoop.ipc.Client.getConnection(Client.java:1557)
at org.apache.hadoop.ipc.Client.call(Client.java:1480)
... 34 more
21/06/24 10:52:46 INFO util.ShutdownHookManager: Shutdown hook called
21/06/24 10:52:46 INFO util.ShutdownHookManager: Deleting directory /tmp/spark-0a2991bd-99b8-4981-b2a6-bd4b68c5cfa1
21/06/24 10:52:46 DEBUG ipc.Client: stopping client from cache: org.apache.hadoop.ipc.Client@1c26273d
21/06/24 10:52:46 DEBUG ipc.Client: removing client from cache: org.apache.hadoop.ipc.Client@1c26273d
21/06/24 10:52:46 DEBUG ipc.Client: stopping actual client because no more references remain: org.apache.hadoop.ipc.Client@1c26273d
21/06/24 10:52:46 DEBUG ipc.Client: Stopping client
[hadoop@hadoop000 bin]$ XshellXshellXshellXshellXshellXshell