SparkSession spark = SparkSession.builder()

.enableHiveSupport() //这个必须加,是获取hivemetastore

.getOrCreate();

spark.sql("use " + args[0]);//args[0] is database ,args[1]为sql语句, args[2]为输出hdfs目录,格式"/task/data"

spark.sql(args[1]).write().format("csv").save(args[2]);

spark.close();

final CountDownLatch countDownLatch = new CountDownLatch(1);

HashMap env = new HashMap();

//env.put("JAVA_HOME","/usr/java8/jdk");

env.put("HADOOP_CONF_DIR","/etc/hadoop/conf");

SparkAppHandle handler = new SparkLauncher(env)

.setSparkHome("/opt/ZDH/parcels/lib/spark")

.setMaster("yarn")

.setDeployMode("cluster")

.setConf(SparkLauncher.EXECUTOR_MEMORY, "4g")

.setConf(SparkLauncher.EXECUTOR_CORES, "2")

.setConf("spark.cores.max","22")

.setConf("spark.executorEnv.JAVA_HOME","/usr/java8/jdk")

.setConf("spark.driverEnv.JAVA_HOME", "/usr/java8/jdk")

.setConf("spark.yarn.appMasterEnv.JAVA_HOME", "/usr/java8/jdk") //yarn模式下

.setConf("spark.sql.shuffle.partitions", "800")

.setConf("spark.driver.extraClassPath", "/opt/ZDH//parcels/lib/spark/libext/*") //一些jar包无法加载,需要手动添加conf配置

.setConf("spark.executor.extraClassPath", "/opt/ZDH//parcels/lib/spark/libext/*")

.addSparkArg("--files", "/etc/spark/conf/hive-site.xml")

.setAppResource("hdfs://nameservice/task/algorithm/jar/KLTAnonymous-1.0-SNAPSHOT.jar") //TODO

.setMainClass("anonymous.main.KLMainSpark")

.addAppArgs("hdfs://nameservice" + algCfgPath)

.setVerbose(true) //--verbose for debug output

.startApplication(new SparkAppHandle.Listener() {

//监听任务的状态,任务结束时(不管什么原因导致结束),isFinal()方法会返回true,否则返回false

@Override

public void stateChanged(SparkAppHandle sparkAppHandle) {

if (sparkAppHandle.getState().isFinal()) {

countDownLatch.countDown();

logger.info("SparkJob finished! state =>" + sparkAppHandle.getState().toString());

if(SparkAppHandle.State.FINISHED.equals(sparkAppHandle.getState())) {

updateTaskStatus(jobId, 2);

} else {

updateTaskStatus(jobId, 3);

}

}

}

@Override

public void infoChanged(SparkAppHandle handler) {

logger.info("infoChanged=>"+handler.getState().toString());

}

});

while(!"FINISHED".equalsIgnoreCase(handler.getState().toString())

&& SparkAppHandle.State.FAILED != handler.getState()){

logger.info("SparkJob executing, applicationId=>" + handler.getAppId() + ", state=>" + handler.getState());

try {

Thread.sleep(10000);

} catch(InterruptedException e) {

logger.error("Thread sleep error: " + e.getMessage());

}

}

}

String localPath = new File("").getAbsolutePath();

String sparkJarsPath = localPath + File.separator + "jars";

String appJarPath = localPath + File.separator + "target";

System.setProperty("HADOOP_USER_NAME", "mr");

System.setProperty("SPARK_YARN_MODE", "true");

SparkConf sparkConf = new SparkConf();

sparkConf.setMaster("yarn");

sparkConf.setAppName("SparkOnYarnTest");

//sparkConf.set("spark.yarn.jars", sparkJarsPath+ File.separator + "*");

sparkConf.set("spark.yarn.jars", "local:///opt/ZDH/parcels/lib/spark/jars/*");

sparkConf.set("spark.submit.deployMode", "cluster");

List argsList = new ArrayList();

argsList.add("--jar");

argsList.add(appJarPath + File.separator + "SparkApplication-1.0-SNAPSHOT.jar");

argsList.add("--class");

argsList.add("com.zte.spark.sql.debug.SqlDebugger");

String[] arr = argsList.toArray(new String[argsList.size()]);

ClientArguments clientArgs = new ClientArguments(arr);

Client client = new Client(clientArgs, sparkConf);

client.run();

问题分析:

问题一

org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.ipc.StandbyException): Operation category WRITE is not supported in state standby

at org.apache.hadoop.hdfs.server.namenode.ha.StandbyState.checkOperation(StandbyState.java:87)

at org.apache.hadoop.hdfs.server.namenode.NameNodeNameNodeHAContext.checkOperation(NameNode.java:1800) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkOperation(FSNamesystem.java:1327) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:616) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:401) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtosClientNamenodeProtocol2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngineServerProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616) at org.apache.hadoop.ipc.RPCServer.call(RPC.java:982)

at org.apache.hadoop.ipc.ServerHandler1.run(Server.java:2075)

at org.apache.hadoop.ipc.ServerHandler1.run(Server.java:2071)

at java.security.AccessController.doPrivileged(Native Method)

at javax.security.auth.Subject.doAs(Subject.java:415)

at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1698)

at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2069)

此报错因为程序部署在141机器,141机器为namenode备节点

问题二

fs = FileSystem.get(URI.create("hdfs://10.43.159.142:9000"),conf); 没有加用户标识

Call From dap141-159/10.43.159.141 to dap142-159:9000 failed on connection exception: java.net.ConnectException: 拒绝连接; For more details see: http://wiki.apache.org/hadoop/ConnectionRefused

此报错因为程序部署在141机器,代码去连dap142-159:9000 拒绝连接

java创建hdfs目录和文件,fs = FileSystem.get(URI.create("hdfs://10.43.159.142:9000"),conf); URI要配置nameNode节点信息

问题三

严重: Servlet.service() for servlet [REST Service] in context with path [] threw exception [java.lang.UnsupportedClassVersionError: org/apache/spark/launcher/SparkLauncher : Unsupported major.minor version 52.0 (unable to load class org.apache.spark.launcher.SparkLauncher)] with root cause

java.lang.UnsupportedClassVersionError: org/apache/spark/launcher/SparkLauncher : Unsupported major.minor version 52.0 (unable to load class org.apache.spark.launcher.SparkLauncher)

at org.apache.catalina.loader.WebappClassLoaderBase.findClassInternal(WebappClassLoaderBase.java:3209)

at org.apache.catalina.loader.WebappClassLoaderBase.findClass(WebappClassLoaderBase.java:1373)

at org.apache.catalina.loader.WebappClassLoaderBase.loadClass(WebappClassLoaderBase.java:1861)

at org.apache.catalina.loader.WebappClassLoaderBase.loadClass(WebappClassLoaderBase.java:1735)

at com.zte.rest.TestRest.createMaskingPlan(TestRest.java:63)

at com.zte.rest.TestRestFastClassBySpringCGLIB19ca2510.invoke()

at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:204)

at org.springframework.aop.framework.CglibAopProxyCglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:700) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:150) at org.springframework.transaction.interceptor.TransactionInterceptor1.proceedWithInvocation(TransactionInterceptor.java:96)

at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:260)

at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:94)

at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:172)

at org.springframework.aop.framework.CglibAopProxyDynamicAdvisedInterceptor.intercept(CglibAopProxy.java:633) at com.zte.rest.TestRest$$EnhancerBySpringCGLIB$$e60e9ed0.createMaskingPlan() at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at com.sun.jersey.spi.container.JavaMethodInvokerFactory1.invoke(JavaMethodInvokerFactory.java:60)

at com.sun.jersey.server.impl.model.method.dispatch.AbstractResourceMethodDispatchProvider$VoidOutInvoker._dispatch(AbstractResourceMethodDispatchProvider.java:167)

at com.sun.jersey.server.impl.model.method.dispatch.ResourceJavaMethodDispatcher.dispatch(ResourceJavaMethodDispatcher.java:75)

at com.sun.jersey.server.impl.uri.rules.HttpMethodRule.accept(HttpMethodRule.java:302)

at com.sun.jersey.server.impl.uri.rules.RightHandPathRule.accept(RightHandPathRule.java:147)

at com.sun.jersey.server.impl.uri.rules.ResourceClassRule.accept(ResourceClassRule.java:108)

at com.sun.jersey.server.impl.uri.rules.RightHandPathRule.accept(RightHandPathRule.java:147)

at com.sun.jersey.server.impl.uri.rules.RootResourceClassesRule.accept(RootResourceClassesRule.java:84)

at com.sun.jersey.server.impl.application.WebApplicationImpl._handleRequest(WebApplicationImpl.java:1542)

at com.sun.jersey.server.impl.application.WebApplicationImpl._handleRequest(WebApplicationImpl.java:1473)

at com.sun.jersey.server.impl.application.WebApplicationImpl.handleRequest(WebApplicationImpl.java:1419)

at com.sun.jersey.server.impl.application.WebApplicationImpl.handleRequest(WebApplicationImpl.java:1409)

at com.sun.jersey.spi.container.servlet.WebComponent.service(WebComponent.java:409)

at com.sun.jersey.spi.container.servlet.ServletContainer.service(ServletContainer.java:558)

at com.sun.jersey.spi.container.servlet.ServletContainer.service(ServletContainer.java:733)

jdk版本不对

本地代码编译是基于java1.7编译的

主要原因是因为mask-web工程启动的jdk是1.7,需要修改为1.8

命令行参数:spark.executorEnv.JAVA_HOME=/usr/java8/jdk

spark.yarn.appMasterEnv.JAVA_HOME=/usr/java8/jdk

spark.yarn.queue=mr

spark.master=spark://xinhuo113:7077,xinhuo111:7077 standalone模式下需要

问题四

六月 12, 2018 1:47:53 下午 org.apache.spark.launcher.OutputRedirector redirect

INFO: Error: Cluster deploy mode is not compatible with master "local"

六月 12, 2018 1:47:53 下午 org.apache.spark.launcher.OutputRedirector redirect

INFO: Run with --help for usage help or --verbose for debug output

不设置如下参数,运行时是local模式 提交报错

先设置yarn模式运行

.setMaster("yarn")

.setDeployMode("cluster")

报错用户权限,需要赋权 先默认以mr用户启动工程

六月 12, 2018 2:01:42 下午 org.apache.spark.launcher.OutputRedirector redirect

INFO: Exception in thread "main" org.apache.hadoop.yarn.exceptions.YarnException: Failed to submit application_1528709704391_0001 to YARN : User root cannot submit applications to queue root.root

日志文件:

/home/xiehh/mask/mask-1.0.0-zdh8.2.1-SNAPSHOT-web/target/unit-tests.log

spark-submit --master yarn --deploy-mode cluster --executor-cores 2 --executor-memory 4g --conf spark.sql.shuffle.partitions=800 --class anonymous.main.KLMainSpark Ka-Ld-1.0-SNAPSHOT.jar KAn_DoubleNum.xml

spark-submit --master yarn --deploy-mode client --executor-cores 1 --executor-memory 1g --num-executors 2 --conf spark.sql.shuffle.partitions=800 --class anonymous.main.KLMainSpark Ka-Ld-1.0-SNAPSHOT.jar KAn_DoubleNum.xml

client模式和cluster模式的区别

jar包和配置文件都在hdfs目录: 注:集群模式cluster 存在读不到/etc/spark/conf/hive-site.xml文件的情况,需要手动指定

spark-submit --master yarn --deploy-mode cluster --executor-cores 2 --executor-memory 4g --conf spark.sql.shuffle.partitions=800 --class anonymous.main.KLMainSpark hdfs://nameservice/job/KLTAnonymous-1.0-SNAPSHOT.jar hdfs://nameservice/job/KAn_DoubleNum.xml

此命令会读不到hive-site文件,

2018-06-13 09:43:42,785 INFO org.apache.hadoop.hive.metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY

2018-06-13 09:43:42,789 INFO org.apache.hadoop.hive.metastore.ObjectStore: Initialized ObjectStore

2018-06-13 09:43:43,092 WARN org.apache.hadoop.hive.metastore.ObjectStore: Version information not found in metastore. hive.metastore.schema.verification is not enabled so recording the schema version 1.2.0

2018-06-13 09:43:43,421 WARN org.apache.hadoop.hive.metastore.ObjectStore: Failed to get database default, returning NoSuchObjectException

指定hive-site.xml文件

spark-submit --master yarn --deploy-mode cluster --files /etc/spark/conf/hive-site.xml --executor-cores 2 --executor-memory 4g --conf spark.sql.shuffle.partitions=800 --class anonymous.main.KLMainSpark hdfs://nameservice/job/KLTAnonymous-1.0-SNAPSHOT.jar hdfs://nameservice/job/KAn_DoubleNum.xml

yarn-cluster模式运行报错:为分布式运行,需要在hdfs目录下找文件KAn_DoubleNum.xml 分发到每台机器

mr@dap132-183:/home/xiehh> yarn logs -applicationId application_1528709704391_0017

2018-06-13 09:19:40,881 INFO org.apache.hadoop.yarn.client.ConfiguredRMFailoverProxyProvider: Failing over to rm2

Container: container_1528709704391_0017_02_000001 on dap134-183_8041

LogType:stderr

Log Upload Time:星期三 六月 13 09:17:28 +0800 2018

LogLength:9816

Log Contents:

SLF4J: Class path contains multiple SLF4J bindings.

SLF4J: Found binding in [jar:file:/data2/zdh/yarn/local/usercache/mr/filecache/45/__spark_libs__558832802002154562.zip/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]

SLF4J: Found binding in [jar:file:/opt/ZDH/parcels/lib/hadoop/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]

SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.

SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]

2018-06-13 09:17:22,186 INFO org.apache.spark.util.SignalUtils: Registered signal handler for TERM

2018-06-13 09:17:22,188 INFO org.apache.spark.util.SignalUtils: Registered signal handler for HUP

2018-06-13 09:17:22,188 INFO org.apache.spark.util.SignalUtils: Registered signal handler for INT

2018-06-13 09:17:23,835 INFO org.apache.spark.deploy.yarn.ApplicationMaster: Preparing Local resources

2018-06-13 09:17:24,691 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable

2018-06-13 09:17:24,699 WARN org.apache.hadoop.hdfs.shortcircuit.DomainSocketFactory: The short-circuit local reads feature cannot be used because libhadoop cannot be loaded.

2018-06-13 09:17:25,162 INFO org.apache.spark.deploy.yarn.ApplicationMaster: ApplicationAttemptId: appattempt_1528709704391_0017_000002

2018-06-13 09:17:25,173 INFO org.apache.spark.SecurityManager: Changing view acls to: mr

2018-06-13 09:17:25,173 INFO org.apache.spark.SecurityManager: Changing modify acls to: mr

2018-06-13 09:17:25,297 INFO org.apache.spark.SecurityManager: Changing view acls groups to:

2018-06-13 09:17:25,298 INFO org.apache.spark.SecurityManager: Changing modify acls groups to:

2018-06-13 09:17:25,298 INFO org.apache.spark.SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(mr); groups with view permissions: Set(); users with modify permissions: Set(mr); groups with modify permissions: Set()

2018-06-13 09:17:25,430 INFO org.apache.spark.deploy.yarn.ApplicationMaster: Starting the user application in a separate Thread

2018-06-13 09:17:25,439 INFO org.apache.spark.deploy.yarn.ApplicationMaster: Waiting for spark context initialization...

2018-06-13 09:17:25,449 ERROR config.DOMParser: /data2/zdh/yarn/local/usercache/mr/appcache/application_1528709704391_0017/container_1528709704391_0017_02_000001/KAn_DoubleNum.xml (没有那个文件或目录)

java.io.FileNotFoundException: /data2/zdh/yarn/local/usercache/mr/appcache/application_1528709704391_0017/container_1528709704391_0017_02_000001/KAn_DoubleNum.xml (没有那个文件或目录)

at java.io.FileInputStream.open0(Native Method)

at java.io.FileInputStream.open(FileInputStream.java:195)

at java.io.FileInputStream.(FileInputStream.java:138)

at java.io.FileInputStream.(FileInputStream.java:93)

at sun.net.www.protocol.file.FileURLConnection.connect(FileURLConnection.java:90)

at sun.net.www.protocol.file.FileURLConnection.getInputStream(FileURLConnection.java:188)

at org.apache.xerces.impl.XMLEntityManager.setupCurrentEntity(Unknown Source)

at org.apache.xerces.impl.XMLVersionDetector.determineDocVersion(Unknown Source)

at org.apache.xerces.parsers.XML11Configuration.parse(Unknown Source)

at org.apache.xerces.parsers.XML11Configuration.parse(Unknown Source)

at org.apache.xerces.parsers.XMLParser.parse(Unknown Source)

at org.apache.xerces.parsers.DOMParser.parse(Unknown Source)

at org.apache.xerces.jaxp.DocumentBuilderImpl.parse(Unknown Source)

at javax.xml.parsers.DocumentBuilder.parse(DocumentBuilder.java:205)

at config.DOMParser.parse(DOMParser.java:29)

at config.DOMParser.parseConfig(DOMParser.java:46)

at anonymous.main.KLMainSpark.function(KLMainSpark.java:65)

at anonymous.main.KLMainSpark.init(KLMainSpark.java:57)

at anonymous.main.KLMainSpark.main(KLMainSpark.java:53)

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)

at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:498)

at org.apache.spark.deploy.yarn.ApplicationMasteranon$2.run(ApplicationMaster.scala:637) 2018-06-13 09:17:25,485 ERROR org.apache.spark.deploy.yarn.ApplicationMaster: User class threw exception: java.lang.Exception: /data2/zdh/yarn/local/usercache/mr/appcache/application_1528709704391_0017/container_1528709704391_0017_02_000001/KAn_DoubleNum.xml (没有那个文件或目录) java.lang.Exception: /data2/zdh/yarn/local/usercache/mr/appcache/application_1528709704391_0017/container_1528709704391_0017_02_000001/KAn_DoubleNum.xml (没有那个文件或目录) at config.DOMParser.parse(DOMParser.java:32) at config.DOMParser.parseConfig(DOMParser.java:46) at anonymous.main.KLMainSpark.function(KLMainSpark.java:65) at anonymous.main.KLMainSpark.init(KLMainSpark.java:57) at anonymous.main.KLMainSpark.main(KLMainSpark.java:53) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.yarn.ApplicationMasteranon2.run(ApplicationMaster.scala:637) 2018-06-13 09:17:25,488 INFO org.apache.spark.deploy.yarn.ApplicationMaster: Final app status: FAILED, exitCode: 15, (reason: User class threw exception: java.lang.Exception: /data2/zdh/yarn/local/usercache/mr/appcache/application_1528709704391_0017/container_1528709704391_0017_02_000001/KAn_DoubleNum.xml (没有那个文件或目录)) 2018-06-13 09:17:25,503 ERROR org.apache.spark.deploy.yarn.ApplicationMaster: Uncaught exception: org.apache.spark.SparkException: Exception thrown in awaitResult: at org.apache.spark.util.ThreadUtils.awaitResult(ThreadUtils.scala:205)

at org.apache.spark.deploy.yarn.ApplicationMaster.runDriver(ApplicationMaster.scala:403)

at org.apache.spark.deploy.yarn.ApplicationMaster.run(ApplicationMaster.scala:256)

at org.apache.spark.deploy.yarn.ApplicationMasteranonfun$main$1.apply$mcV$sp(ApplicationMaster.scala:766) at org.apache.spark.deploy.SparkHadoopUtilanon2.run(SparkHadoopUtil.scala:67) at org.apache.spark.deploy.SparkHadoopUtil$$anon2.run(SparkHadoopUtil.scala:66)

at java.security.AccessController.doPrivileged(Native Method)

at javax.security.auth.Subject.doAs(Subject.java:422)

at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1698)

at org.apache.spark.deploy.SparkHadoopUtil.runAsSparkUser(SparkHadoopUtil.scala:66)

at org.apache.spark.deploy.yarn.ApplicationMaster.main(ApplicationMaster.scala:764) at org.apache.spark.deploy.yarn.ApplicationMaster.main(ApplicationMaster.scala) Caused by: java.lang.Exception: /data2/zdh/yarn/local/usercache/mr/appcache/application_1528709704391_0017/container_1528709704391_0017_02_000001/KAn_DoubleNum.xml (没有那个文件或目录) at config.DOMParser.parse(DOMParser.java:32) at config.DOMParser.parseConfig(DOMParser.java:46) at anonymous.main.KLMainSpark.function(KLMainSpark.java:65) at anonymous.main.KLMainSpark.init(KLMainSpark.java:57) at anonymous.main.KLMainSpark.main(KLMainSpark.java:53) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.yarn.ApplicationMaster$$anon2.run(ApplicationMaster.scala:637)

2018-06-13 09:17:25,512 INFO org.apache.spark.deploy.yarn.ApplicationMaster: Unregistering ApplicationMaster with FAILED (diag message: User class threw exception: java.lang.Exception: /data2/zdh/yarn/local/usercache/mr/appcache/application_1528709704391_0017/container_1528709704391_0017_02_000001/KAn_DoubleNum.xml (没有那个文件或目录))

2018-06-13 09:17:25,513 INFO org.apache.spark.deploy.yarn.ApplicationMaster: Deleting staging directory hdfs://nameservice/user/mr/.sparkStaging/application_1528709704391_0017

2018-06-13 09:17:25,652 ERROR org.apache.spark.util.Utils: Uncaught exception in thread Thread-3

java.lang.NullPointerException

at org.apache.spark.deploy.yarn.ApplicationMaster$$anonfunrun1.applymcVsp(ApplicationMaster.scala:237)

at org.apache.spark.util.SparkShutdownHook.run(ShutdownHookManager.scala:216)

at org.apache.spark.util.SparkSh

2018-06-13 09:43:42,785 INFO org.apache.hadoop.hive.metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY

问题三:

2018-06-08 14:51:07,506 WARN org.apache.spark.scheduler.TaskSchedulerImpl: Initial job has not accepted any resources; check your cluster UI to ensure that workers are registered and have sufficient resources

原因:任务执行所需的资源不够

Java启动sparksql,Java提交sparksql任务及问题分析相关推荐

  1. java启动密码,java - 用户验证密码后如何启动java applet程序? - SO中文参考 - www.soinside.com...

    我正在尝试连接两个简单的java程序,一个密码登录程序,然后打开第二个程序(一个非常简单的PrintWriter程序). 我是一个大型菜鸟,所以尝试将二级程序添加到密码程序中.显然这没用.我看到很多关 ...

  2. java 启动方式 java -jar xx.jar

    1.窗口被锁定,可按CTRL + C打断程序运行,关闭窗口程序停止运行 java -jar XXX.jar 2.窗口不被锁定,关闭窗口时,程序停止运行 java -jar XXX.jar & ...

  3. sparksql读取数据过大报java.lang.OutOfMemoryError: Java heap space

    堆栈溢出 Exception in thread "Spark Context Cleaner" java.lang.OutOfMemoryError: Java heap spa ...

  4. java 程序启动界面_程序启动界面java代码

    最近写了个程序启动界面,分享一下 import javax.swing.*; import java.awt.*; import java.net.*; //Download by http://ww ...

  5. springboot启动异常java.lang.NoSuchFieldError: DEFAULT_INCOMPATIBLE_IMPROVEMENTS

    springboot启动异常java.lang.NoSuchFieldError: DEFAULT_INCOMPATIBLE_IMPROVEMENTS 参考文章: (1)springboot启动异常j ...

  6. 启动一个java项目http状态 500 - 内部服务器错误_Java web服务器假死分析

    问题现象: 服务端端口开着,客户端可以telnet 服务器上的端口,但访问服务时没有任何返回. 服务器环境 :Java 8 + linux CentOS release 6.5. 用apache ab ...

  7. 【Java 语言】Java 多线程 一 ( 线程基础 : 线程启动 | 线程停止 | 线程暂停 | 线程优先级 | 守护线程)

    一. 线程启动 线程启动 : -- 1. 继承 Thread 运行线程 : 重写 Thread 类的 run 方法, 然后执行该线程; -- 2. 实现 Runnable 接口, 并运行线程; -- ...

  8. 【转载】struts应用在断网情况下启动报错解决办法(java/net/AbstractPlainSocketImpl.java:178:-1)...

    无意间struts应用在有网络的情况下启动正常,在断网的情况下启动报错,报错代码如下图所示: SEVERE: Exception starting filter struts2 Class: java ...

  9. 启动非java虚拟机方式下运行matlab

    在非java虚拟机下运行matlab据说可以扩大内存,启动非java虚拟机下运行 matlab的方法有: 一. 1.右建点matlab快捷方式,选属性 2.在"目标"中添加-noj ...

最新文章

  1. php中使用phpmailer发送邮件
  2. 2012 MUTC 7 总结
  3. MongoDB数据库--扩展Base64,算法
  4. puppet kick 功能实现
  5. 淘宝JavaScript 编码风格规范
  6. sed 替换_sed命令批量替换文件内容
  7. DIY RazorEngine 的程序集生成方式
  8. Java springcloud B2B2C o2o多用户商城 springcloud架 (二): 配置管理
  9. scala 写入文件_Scala文件IO –写入文件,读取文件
  10. 东南卫视肌肤食品微商套路解析
  11. java循环遍历map集合_Java中遍历Map集合的四种方法
  12. 基于java的化妆品购物商城微信小程序的设计与实现 毕业设计毕设参考
  13. 为何程序员工资高?原因都在这儿了!
  14. 运动会加油稿计算机学院150字,运动会加油稿150字
  15. JINI和java space入门
  16. 微软研究院科大实习生聚餐
  17. ec200t 拨号_Quectel EC200T驱动以及ppp拨号移植
  18. PCB板抄板的流程_PCB板抄板的技巧
  19. 中小企业信息化(一)——何为信息化
  20. 曾经vb,c ,c++,python,写过的杨辉三角,致敬杨老前辈

热门文章

  1. TuneUp的Turbo Mode模式
  2. Qt实战案例(1)——计时器的启动、暂停与继续
  3. 财务部计算机操作规程,财务部电脑管理制度
  4. 基于MPC的电流环控制策略研究
  5. QML Canvas 绘制总结
  6. ubuntu20下安装nginx插件geoip2查询ip信息
  7. zemax入门学习笔记一(干货)
  8. 【Visual C++】游戏开发五十四 浅墨DirectX教程二十一 视觉的诡计 公告板 Billboard 技术
  9. 分屏状态下分出的屏幕提示超出显示范围
  10. PDM编码的软件实现