org.apache.spark.SparkException: Exception thrown in awaitResult:
at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:227)
at org.apache.spark.deploy.yarn.ApplicationMaster.runDriver(ApplicationMaster.scala:471)
at org.apache.spark.deploy.yarn.ApplicationMaster.org$apache$spark$deploy$yarn$ApplicationMaster$$runImpl(ApplicationMaster.scala:307)
at org.apache.spark.deploy.yarn.ApplicationMaster$$anonfun$run$1.apply$mcV$sp(ApplicationMaster.scala:247)
at org.apache.spark.deploy.yarn.ApplicationMaster$$anonfun$run$1.apply(ApplicationMaster.scala:247)
at org.apache.spark.deploy.yarn.ApplicationMaster$$anonfun$run$1.apply(ApplicationMaster.scala:247)
at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$3.run(ApplicationMaster.scala:802)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1761)
at org.apache.spark.deploy.yarn.ApplicationMaster.doAsUser(ApplicationMaster.scala:801)
at org.apache.spark.deploy.yarn.ApplicationMaster.run(ApplicationMaster.scala:246)
at org.apache.spark.deploy.yarn.ApplicationMaster$.main(ApplicationMaster.scala:828)
at org.apache.spark.deploy.yarn.ApplicationMaster.main(ApplicationMaster.scala)
Caused by: java.util.concurrent.ExecutionException: Boxed Error
at scala.concurrent.impl.Promise$.resolver(Promise.scala:59)
at scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:51)
at scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:248)
at scala.concurrent.Promise$class.tryFailure(Promise.scala:112)
at scala.concurrent.impl.Promise$DefaultPromise.tryFailure(Promise.scala:157)
at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:713)
Caused by: java.lang.ExceptionInInitializerError
at com.ck.data.batch.customer.Test.main(Test.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:694)
Caused by: java.lang.RuntimeException: Failed to get driver instance for jdbcUrl=jdbc:mysql://10.255.11.118:3306/scrm?characterEncoding=utf8&useSSL=false&tinyInt1isBit=false
at com.zaxxer.hikari.util.DriverDataSource.
at com.zaxxer.hikari.pool.PoolBase.initializeDataSource(PoolBase.java:323)
at com.zaxxer.hikari.pool.PoolBase.
at com.zaxxer.hikari.pool.HikariPool.
at com.zaxxer.hikari.HikariDataSource.
at com.cds.common.java.jdbc.DbDataSource.newInstance(DbDataSource.java:52)
at com.cds.common.java.jdbc.DbDataSource.newInstance(DbDataSource.java:18)
at com.cds.common.java.connection.AbstractLazyInitClosableConnection.getInstance(AbstractLazyInitClosableConnection.java:69)
at com.ck.data.common.instance.mysql.DbInstance.getDataSource(DbInstance.java:26)
at com.ck.data.batch.customer.Test$.
at com.ck.data.batch.customer.Test$.
... 6 more
Caused by: java.sql.SQLException: No suitable driver
at java.sql.DriverManager.getDriver(DriverManager.java:315)
at com.zaxxer.hikari.util.DriverDataSource.
... 16 more
报错信息如上:
import java.util.Properties
import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
import javax.sql.DataSource
import org.apache.commons.dbutils.QueryRunner
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
object Test {
val instance:DataSource = getDataSource
def main(args: Array[String]): Unit = {
val spark: SparkSession = SparkSession.builder().appName("test").getOrCreate()
val sc = spark.sparkContext
process(sc)
sc.stop()
}
def process(sc: SparkContext) = {
sc.makeRDD(List(1,2,3,4,5,6,7,8,9,10))
.map(x => "test" + x)
.foreachPartition(par => {
val params: Array[Array[AnyRef]] = par.map(d => Array(d.asInstanceOf[AnyRef])).toArray
saveData(params)
})
}
def saveData(params: Array[Array[AnyRef]]) = {
val sql = " INSERT INTO test (text) values (?) "
val qr: QueryRunner = new QueryRunner(instance)
qr.batch(sql, params)
}
def getDataSource(): HikariDataSource = {
val properties = new Properties
properties.put("jdbcUrl", "jdbc:mysql://localhost:3306/db?characterEncoding=utf8&useSSL=false&tinyInt1isBit=false")
properties.put("username", "root")
properties.put("password", "root!2022")
val config = new HikariConfig(properties)
// config.setDriverClassName("com.mysql.jdbc.Driver")
new HikariDataSource(config)
}
}
代码如上:没有使用spark sql 。但是也是有写mysql的操作
1、查找原因
网上有很多的解决方法,但是基本都不太符合我的情况。罗列一下其他的解决方法
sparkSql的需要手动添加 。option("driver", "com.mysql.jdbc.Driver" )就是驱动的名字写错了(逗号 、分号、等等)驱动缺失,去spark集群添加mysql的驱动,或者提交任务的时候手动指定驱动 --conf spark.executor.extraClassPath=/opt/client/Spark2x/spark/jars/mysql-connector-java-5.1.48.jar \
--conf spark.driver.extraClassPath=/opt/client/Spark2x/spark/jars/mysql-connector-java-5.1.48.jar \
驱动jar冲突,spark-submit 的时候手动指定驱动 --jars --driver-class-path spark-submit \
--master yarn \
--deploy-mode cluster \
--name "test" \
--num-executors 3 \
--executor-cores 1 \
--executor-memory 2G \
--driver-memory 1G \
--jars /opt/client/Spark2x/spark/jars/mysql-connector-java-5.1.48.jar \
--driver-class-path /opt/client/Spark2x/spark/jars/mysql-connector-java-5.1.48.jar \
--conf spark.executor.extraClassPath=/opt/client/Spark2x/spark/jars/mysql-connector-java-5.1.48.jar \
--class com.data.Test \
test-1.0-SNAPSHOT-jar-with-dependencies.jar
我的问题解决:
--jars /opt/client/Spark2x/spark/jars/mysql-connector-java-5.1.48.jar \
--conf spark.yarn.user.classpath.first=true \
2、分析总结
精彩文章
发表评论