添加配置文件 phoenixConnectMode.scala :

packagestatistics.benefitsimportorg.apache.hadoop.conf.Configurationimportorg.apache.spark.sql.{DataFrame, SQLContext}importorg.apache.phoenix.spark._object phoenixConnectMode {private val zookeeper = "node3:2181"def getMode1(sqlContext: SQLContext, tableName: String, columns: Array[String]): DataFrame={val configuration= newConfiguration()configuration.set("phoenix.schema.isNamespaceMappingEnabled", "true")configuration.set("phoenix.schema.mapSystemTablesToNamespace", "true")configuration.set("hbase.zookeeper.quorum", zookeeper)val df= sqlContext.phoenixTableAsDataFrame(tableName, columns, conf =configuration)df}
}

Spark SQL源文件 costDay.scala :

packagestatisticsimportcommon.util.timeUtilimportorg.apache.spark.{SparkConf, SparkContext}importorg.apache.spark.sql.SQLContextimportstatistics.benefits.phoenixConnectMode//import common.util.{phoenixConnectMode, timeUtil}
importcommon.util.timeUtilimportorg.apache.spark.{SparkConf, SparkContext}importorg.apache.spark.sql.SQLContextimportorg.apache.spark.sql.functions.col/*每天执行*/object costDay {def main(args: Array[String]): Unit={val conf= newSparkConf().setAppName("fdsf").setMaster("local")val sc= newSparkContext(conf)val sqlContext= newSQLContext(sc)//val df = sqlContext.load(//"org.apache.phoenix.spark"//, Map("table" -> "ASSET_NORMAL"//, "zkUrl" -> "node3,node4,node5:2181")//)
val tableName = "ASSET_NORMAL"val columes=Array("ID","ASSET_ID","ASSET_NAME","ASSET_FIRST_DEGREE_ID","ASSET_FIRST_DEGREE_NAME","ASSET_SECOND_DEGREE_ID","ASSET_SECOND_DEGREE_NAME","GB_DEGREE_ID","GB_DEGREE_NAME","ASSET_USE_FIRST_DEGREE_ID","ASSET_USE_FIRST_DEGREE_NAME","ASSET_USE_SECOND_DEGREE_ID","ASSET_USE_SECOND_DEGREE_NAME","MANAGEMENT_TYPE_ID","MANAGEMENT_TYPE_NAME","ASSET_MODEL","FACTORY_NUMBER","ASSET_COUNTRY_ID","ASSET_COUNTRY_NAME","MANUFACTURER","SUPPLIER","SUPPLIER_TEL","ORIGINAL_VALUE","USE_DEPARTMENT_ID","USE_DEPARTMENT_NAME","USER_ID","USER_NAME","ASSET_LOCATION_OF_PARK_ID","ASSET_LOCATION_OF_PARK_NAME","ASSET_LOCATION_OF_BUILDING_ID","ASSET_LOCATION_OF_BUILDING_NAME","ASSET_LOCATION_OF_ROOM_ID","ASSET_LOCATION_OF_ROOM_NUMBER","PRODUCTION_DATE","ACCEPTANCE_DATE","REQUISITION_DATE","PERFORMANCE_INDEX","ASSET_STATE_ID","ASSET_STATE_NAME","INSPECTION_TYPE_ID","INSPECTION_TYPE_NAME","SEAL_DATE","SEAL_CAUSE","COST_ITEM_ID","COST_ITEM_NAME","ITEM_COMMENTS","UNSEAL_DATE","SCRAP_DATE","PURCHASE_NUMBER","WARRANTY_PERIOD","DEPRECIABLE_LIVES_ID","DEPRECIABLE_LIVES_NAME","MEASUREMENT_UNITS_ID","MEASUREMENT_UNITS_NAME","ANNEX","REMARK","ACCOUNTING_TYPE_ID","ACCOUNTING_TYPE_NAME","SYSTEM_TYPE_ID","SYSTEM_TYPE_NAME","ASSET_ID_PARENT","CLASSIFIED_LEVEL_ID","CLASSIFIED_LEVEL_NAME","ASSET_PICTURE","MILITARY_SPECIAL_CODE","CHECK_CYCLE_ID","CHECK_CYCLE_NAME","CHECK_DATE","CHECK_EFFECTIVE_DATE","CHECK_MODE_ID","CHECK_MODE_NAME","CHECK_DEPARTMENT_ID","CHECK_DEPARTMENT_NAME","RENT_STATUS_ID","RENT_STATUS_NAME","STORAGE_TIME","UPDATE_USER","UPDATE_TIME","IS_ON_PROCESS","IS_DELETED","FIRST_DEPARTMENT_ID","FIRST_DEPARTMENT_NAME","SECOND_DEPARTMENT_ID","SECOND_DEPARTMENT_NAME","CREATE_USER","CREATE_TIME")val df=phoenixConnectMode.getMode1(sqlContext, tableName, columes).filter(col("USE_DEPARTMENT_ID") isNotNull)df.registerTempTable("asset_normal")//df.show(false)
def costingWithin(originalValue: Double, years: Int): Double=  (originalValue*0.95)/(years*365)sqlContext.udf.register("costingWithin", costingWithin _)def costingBeyond(originalValue: Double): Double= originalValue*0.05/365sqlContext.udf.register("costingBeyond", costingBeyond _)def expire(acceptanceDate: String, years: Int): Boolean= timeUtil.dateStrAddYears2TimeStamp(acceptanceDate, timeUtil.SECOND_TIME_FORMAT, years) >System.currentTimeMillis()sqlContext.udf.register("expire", expire _)val costDay=sqlContext.sql("select " +"ID" +",USE_DEPARTMENT_ID as FIRST_DEPARTMENT_ID" +",case when expire(ACCEPTANCE_DATE, DEPRECIABLE_LIVES_NAME) then costingWithin(ORIGINAL_VALUE, DEPRECIABLE_LIVES_NAME) else costingBeyond(ORIGINAL_VALUE) end as ACTUAL_COST" +",ORIGINAL_VALUE" +",current_timestamp() as GENERATION_TIME" +" from asset_normal")costDay.show(false)//costDay.write//.format("org.apache.phoenix.spark")//.mode("overwrite")//.option("table", "ASSET_FINANCIAL_DETAIL_DAY")//.option("zkUrl", "node3,node4,node5:2181")//.save()
}
}

返回信息:Process finished with exit code 0,执行成功!

执行结果:

C:\3rd\Java\jdk1.8.0_212\bin\java.exe "-javaagent:C:\3rd\JetBrains\IntelliJ IDEA 2019.1.3\lib\idea_rt.jar=56536:C:\3rd\JetBrains\IntelliJ IDEA 2019.1.3\bin" -Dfile.encoding=UTF-8 -classpath C:\3rd\Java\jdk1.8.0_212\jre\lib\charsets.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\deploy.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\access-bridge-64.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\cldrdata.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\dnsns.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\jaccess.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\jfxrt.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\localedata.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\nashorn.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\sunec.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\sunjce_provider.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\sunmscapi.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\sunpkcs11.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\zipfs.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\javaws.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\jce.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\jfr.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\jfxswt.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\jsse.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\management-agent.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\plugin.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\resources.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\rt.jar;C:\development\statistics\target\classes;C:\development\MavenRepository\org\apache\spark\spark-core_2.10\1.6.0-cdh5.14.2\spark-core_2.10-1.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\avro\avro-mapred\1.7.6-cdh5.14.2\avro-mapred-1.7.6-cdh5.14.2-hadoop2.jar;C:\development\MavenRepository\org\apache\avro\avro-ipc\1.7.6-cdh5.14.2\avro-ipc-1.7.6-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\avro\avro-ipc\1.7.6-cdh5.14.2\avro-ipc-1.7.6-cdh5.14.2-tests.jar;C:\development\MavenRepository\com\twitter\chill_2.10\0.5.0\chill_2.10-0.5.0.jar;C:\development\MavenRepository\com\esotericsoftware\kryo\kryo\2.21\kryo-2.21.jar;C:\development\MavenRepository\com\esotericsoftware\reflectasm\reflectasm\1.07\reflectasm-1.07-shaded.jar;C:\development\MavenRepository\com\esotericsoftware\minlog\minlog\1.2\minlog-1.2.jar;C:\development\MavenRepository\org\objenesis\objenesis\1.2\objenesis-1.2.jar;C:\development\MavenRepository\com\twitter\chill-java\0.5.0\chill-java-0.5.0.jar;C:\development\MavenRepository\org\apache\xbean\xbean-asm5-shaded\4.4\xbean-asm5-shaded-4.4.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-client\2.6.0-cdh5.14.2\hadoop-client-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-hdfs\2.6.0-cdh5.14.2\hadoop-hdfs-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\xerces\xercesImpl\2.9.1\xercesImpl-2.9.1.jar;C:\development\MavenRepository\xml-apis\xml-apis\1.3.04\xml-apis-1.3.04.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-mapreduce-client-app\2.6.0-cdh5.14.2\hadoop-mapreduce-client-app-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-mapreduce-client-common\2.6.0-cdh5.14.2\hadoop-mapreduce-client-common-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-yarn-client\2.6.0-cdh5.14.2\hadoop-yarn-client-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-yarn-server-common\2.6.0-cdh5.14.2\hadoop-yarn-server-common-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-mapreduce-client-shuffle\2.6.0-cdh5.14.2\hadoop-mapreduce-client-shuffle-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-yarn-api\2.6.0-cdh5.14.2\hadoop-yarn-api-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-mapreduce-client-jobclient\2.6.0-cdh5.14.2\hadoop-mapreduce-client-jobclient-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-aws\2.6.0-cdh5.14.2\hadoop-aws-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\com\amazonaws\aws-java-sdk-bundle\1.11.134\aws-java-sdk-bundle-1.11.134.jar;C:\development\MavenRepository\org\apache\spark\spark-launcher_2.10\1.6.0-cdh5.14.2\spark-launcher_2.10-1.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\spark\spark-network-common_2.10\1.6.0-cdh5.14.2\spark-network-common_2.10-1.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\fusesource\leveldbjni\leveldbjni-all\1.8\leveldbjni-all-1.8.jar;C:\development\MavenRepository\com\fasterxml\jackson\core\jackson-annotations\2.2.3\jackson-annotations-2.2.3.jar;C:\development\MavenRepository\org\apache\spark\spark-network-shuffle_2.10\1.6.0-cdh5.14.2\spark-network-shuffle_2.10-1.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\spark\spark-unsafe_2.10\1.6.0-cdh5.14.2\spark-unsafe_2.10-1.6.0-cdh5.14.2.jar;C:\development\MavenRepository\net\java\dev\jets3t\jets3t\0.9.0\jets3t-0.9.0.jar;C:\development\MavenRepository\org\apache\httpcomponents\httpcore\4.1.2\httpcore-4.1.2.jar;C:\development\MavenRepository\com\jamesmurty\utils\java-xmlbuilder\0.4\java-xmlbuilder-0.4.jar;C:\development\MavenRepository\org\apache\curator\curator-recipes\2.7.1\curator-recipes-2.7.1.jar;C:\development\MavenRepository\org\apache\curator\curator-framework\2.7.1\curator-framework-2.7.1.jar;C:\development\MavenRepository\org\apache\zookeeper\zookeeper\3.4.6\zookeeper-3.4.6.jar;C:\development\MavenRepository\org\eclipse\jetty\orbit\javax.servlet\3.0.0.v201112011016\javax.servlet-3.0.0.v201112011016.jar;C:\development\MavenRepository\org\apache\commons\commons-lang3\3.3.2\commons-lang3-3.3.2.jar;C:\development\MavenRepository\org\apache\commons\commons-math3\3.4.1\commons-math3-3.4.1.jar;C:\development\MavenRepository\com\google\code\findbugs\jsr305\1.3.9\jsr305-1.3.9.jar;C:\development\MavenRepository\org\slf4j\slf4j-api\1.7.5\slf4j-api-1.7.5.jar;C:\development\MavenRepository\org\slf4j\jul-to-slf4j\1.7.5\jul-to-slf4j-1.7.5.jar;C:\development\MavenRepository\org\slf4j\jcl-over-slf4j\1.7.5\jcl-over-slf4j-1.7.5.jar;C:\development\MavenRepository\log4j\log4j\1.2.17\log4j-1.2.17.jar;C:\development\MavenRepository\org\slf4j\slf4j-log4j12\1.7.5\slf4j-log4j12-1.7.5.jar;C:\development\MavenRepository\com\ning\compress-lzf\1.0.3\compress-lzf-1.0.3.jar;C:\development\MavenRepository\org\xerial\snappy\snappy-java\1.0.4.1\snappy-java-1.0.4.1.jar;C:\development\MavenRepository\net\jpountz\lz4\lz4\1.3.0\lz4-1.3.0.jar;C:\development\MavenRepository\org\roaringbitmap\RoaringBitmap\0.5.11\RoaringBitmap-0.5.11.jar;C:\development\MavenRepository\commons-net\commons-net\2.2\commons-net-2.2.jar;C:\development\MavenRepository\org\spark-project\akka\akka-remote_2.10\2.2.3-shaded-protobuf\akka-remote_2.10-2.2.3-shaded-protobuf.jar;C:\development\MavenRepository\org\spark-project\akka\akka-actor_2.10\2.2.3-shaded-protobuf\akka-actor_2.10-2.2.3-shaded-protobuf.jar;C:\development\MavenRepository\com\typesafe\config\1.0.2\config-1.0.2.jar;C:\development\MavenRepository\org\spark-project\protobuf\protobuf-java\2.4.1-shaded\protobuf-java-2.4.1-shaded.jar;C:\development\MavenRepository\org\uncommons\maths\uncommons-maths\1.2.2a\uncommons-maths-1.2.2a.jar;C:\development\MavenRepository\org\spark-project\akka\akka-slf4j_2.10\2.2.3-shaded-protobuf\akka-slf4j_2.10-2.2.3-shaded-protobuf.jar;C:\development\MavenRepository\org\scala-lang\scala-library\2.10.5\scala-library-2.10.5.jar;C:\development\MavenRepository\org\json4s\json4s-jackson_2.10\3.2.10\json4s-jackson_2.10-3.2.10.jar;C:\development\MavenRepository\org\json4s\json4s-core_2.10\3.2.10\json4s-core_2.10-3.2.10.jar;C:\development\MavenRepository\org\json4s\json4s-ast_2.10\3.2.10\json4s-ast_2.10-3.2.10.jar;C:\development\MavenRepository\org\scala-lang\scalap\2.10.0\scalap-2.10.0.jar;C:\development\MavenRepository\org\scala-lang\scala-compiler\2.10.0\scala-compiler-2.10.0.jar;C:\development\MavenRepository\com\sun\jersey\jersey-server\1.9\jersey-server-1.9.jar;C:\development\MavenRepository\asm\asm\3.1\asm-3.1.jar;C:\development\MavenRepository\com\sun\jersey\jersey-core\1.9\jersey-core-1.9.jar;C:\development\MavenRepository\org\apache\mesos\mesos\0.21.1\mesos-0.21.1-shaded-protobuf.jar;C:\development\MavenRepository\io\netty\netty-all\4.0.29.Final\netty-all-4.0.29.Final.jar;C:\development\MavenRepository\com\clearspring\analytics\stream\2.7.0\stream-2.7.0.jar;C:\development\MavenRepository\io\dropwizard\metrics\metrics-core\3.1.2\metrics-core-3.1.2.jar;C:\development\MavenRepository\io\dropwizard\metrics\metrics-jvm\3.1.2\metrics-jvm-3.1.2.jar;C:\development\MavenRepository\io\dropwizard\metrics\metrics-json\3.1.2\metrics-json-3.1.2.jar;C:\development\MavenRepository\io\dropwizard\metrics\metrics-graphite\3.1.2\metrics-graphite-3.1.2.jar;C:\development\MavenRepository\com\fasterxml\jackson\core\jackson-databind\2.2.3\jackson-databind-2.2.3.jar;C:\development\MavenRepository\com\fasterxml\jackson\core\jackson-core\2.2.3\jackson-core-2.2.3.jar;C:\development\MavenRepository\com\fasterxml\jackson\module\jackson-module-scala_2.10\2.2.3\jackson-module-scala_2.10-2.2.3.jar;C:\development\MavenRepository\com\thoughtworks\paranamer\paranamer\2.3\paranamer-2.3.jar;C:\development\MavenRepository\org\apache\ivy\ivy\2.4.0\ivy-2.4.0.jar;C:\development\MavenRepository\oro\oro\2.0.8\oro-2.0.8.jar;C:\development\MavenRepository\org\tachyonproject\tachyon-client\0.8.2\tachyon-client-0.8.2.jar;C:\development\MavenRepository\commons-lang\commons-lang\2.4\commons-lang-2.4.jar;C:\development\MavenRepository\commons-io\commons-io\2.4\commons-io-2.4.jar;C:\development\MavenRepository\org\tachyonproject\tachyon-underfs-hdfs\0.8.2\tachyon-underfs-hdfs-0.8.2.jar;C:\development\MavenRepository\org\tachyonproject\tachyon-underfs-s3\0.8.2\tachyon-underfs-s3-0.8.2.jar;C:\development\MavenRepository\org\tachyonproject\tachyon-underfs-local\0.8.2\tachyon-underfs-local-0.8.2.jar;C:\development\MavenRepository\net\razorvine\pyrolite\4.9\pyrolite-4.9.jar;C:\development\MavenRepository\net\sf\py4j\py4j\0.9\py4j-0.9.jar;C:\development\MavenRepository\com\intel\chimera\chimera\0.9.2\chimera-0.9.2.jar;C:\development\MavenRepository\org\spark-project\spark\unused\1.0.0\unused-1.0.0.jar;C:\development\MavenRepository\org\apache\spark\spark-sql_2.10\1.6.0-cdh5.14.2\spark-sql_2.10-1.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\spark\spark-catalyst_2.10\1.6.0-cdh5.14.2\spark-catalyst_2.10-1.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\scala-lang\scala-reflect\2.10.5\scala-reflect-2.10.5.jar;C:\development\MavenRepository\org\codehaus\janino\janino\2.7.8\janino-2.7.8.jar;C:\development\MavenRepository\org\codehaus\janino\commons-compiler\2.7.8\commons-compiler-2.7.8.jar;C:\development\MavenRepository\com\twitter\parquet-column\1.5.0-cdh5.14.2\parquet-column-1.5.0-cdh5.14.2.jar;C:\development\MavenRepository\com\twitter\parquet-common\1.5.0-cdh5.14.2\parquet-common-1.5.0-cdh5.14.2.jar;C:\development\MavenRepository\com\twitter\parquet-encoding\1.5.0-cdh5.14.2\parquet-encoding-1.5.0-cdh5.14.2.jar;C:\development\MavenRepository\com\twitter\parquet-hadoop\1.5.0-cdh5.14.2\parquet-hadoop-1.5.0-cdh5.14.2.jar;C:\development\MavenRepository\com\twitter\parquet-format\2.1.0-cdh5.14.2\parquet-format-2.1.0-cdh5.14.2.jar;C:\development\MavenRepository\com\twitter\parquet-jackson\1.5.0-cdh5.14.2\parquet-jackson-1.5.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hbase\hbase-spark\1.2.0-cdh5.14.2\hbase-spark-1.2.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-common\2.6.0-cdh5.14.2\hadoop-common-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\xmlenc\xmlenc\0.52\xmlenc-0.52.jar;C:\development\MavenRepository\commons-httpclient\commons-httpclient\3.1\commons-httpclient-3.1.jar;C:\development\MavenRepository\org\mortbay\jetty\jetty\6.1.26.cloudera.4\jetty-6.1.26.cloudera.4.jar;C:\development\MavenRepository\org\mortbay\jetty\jetty-util\6.1.26.cloudera.4\jetty-util-6.1.26.cloudera.4.jar;C:\development\MavenRepository\com\sun\jersey\jersey-json\1.9\jersey-json-1.9.jar;C:\development\MavenRepository\org\codehaus\jettison\jettison\1.1\jettison-1.1.jar;C:\development\MavenRepository\com\sun\xml\bind\jaxb-impl\2.2.3-1\jaxb-impl-2.2.3-1.jar;C:\development\MavenRepository\tomcat\jasper-compiler\5.5.23\jasper-compiler-5.5.23.jar;C:\development\MavenRepository\tomcat\jasper-runtime\5.5.23\jasper-runtime-5.5.23.jar;C:\development\MavenRepository\commons-el\commons-el\1.0\commons-el-1.0.jar;C:\development\MavenRepository\commons-configuration\commons-configuration\1.6\commons-configuration-1.6.jar;C:\development\MavenRepository\commons-digester\commons-digester\1.8\commons-digester-1.8.jar;C:\development\MavenRepository\commons-beanutils\commons-beanutils\1.7.0\commons-beanutils-1.7.0.jar;C:\development\MavenRepository\commons-beanutils\commons-beanutils-core\1.8.0\commons-beanutils-core-1.8.0.jar;C:\development\MavenRepository\com\google\code\gson\gson\2.2.4\gson-2.2.4.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-auth\2.6.0-cdh5.14.2\hadoop-auth-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\directory\server\apacheds-kerberos-codec\2.0.0-M15\apacheds-kerberos-codec-2.0.0-M15.jar;C:\development\MavenRepository\org\apache\directory\server\apacheds-i18n\2.0.0-M15\apacheds-i18n-2.0.0-M15.jar;C:\development\MavenRepository\org\apache\directory\api\api-asn1-api\1.0.0-M20\api-asn1-api-1.0.0-M20.jar;C:\development\MavenRepository\org\apache\directory\api\api-util\1.0.0-M20\api-util-1.0.0-M20.jar;C:\development\MavenRepository\com\jcraft\jsch\0.1.42\jsch-0.1.42.jar;C:\development\MavenRepository\org\apache\curator\curator-client\2.7.1\curator-client-2.7.1.jar;C:\development\MavenRepository\org\apache\htrace\htrace-core4\4.0.1-incubating\htrace-core4-4.0.1-incubating.jar;C:\development\MavenRepository\org\apache\commons\commons-compress\1.4.1\commons-compress-1.4.1.jar;C:\development\MavenRepository\org\tukaani\xz\1.0\xz-1.0.jar;C:\development\MavenRepository\org\apache\hbase\hbase-client\1.2.0-cdh5.14.2\hbase-client-1.2.0-cdh5.14.2.jar;C:\development\MavenRepository\org\jruby\jcodings\jcodings\1.0.8\jcodings-1.0.8.jar;C:\development\MavenRepository\com\yammer\metrics\metrics-core\2.2.0\metrics-core-2.2.0.jar;C:\development\MavenRepository\org\apache\hbase\hbase-protocol\1.2.0-cdh5.14.2\hbase-protocol-1.2.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hbase\hbase-server\1.2.0-cdh5.14.2\hbase-server-1.2.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hbase\hbase-procedure\1.2.0-cdh5.14.2\hbase-procedure-1.2.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hbase\hbase-common\1.2.0-cdh5.14.2\hbase-common-1.2.0-cdh5.14.2-tests.jar;C:\development\MavenRepository\org\apache\hbase\hbase-prefix-tree\1.2.0-cdh5.14.2\hbase-prefix-tree-1.2.0-cdh5.14.2.jar;C:\development\MavenRepository\com\github\stephenc\high-scale-lib\high-scale-lib\1.1.1\high-scale-lib-1.1.1.jar;C:\development\MavenRepository\org\apache\commons\commons-math\2.1\commons-math-2.1.jar;C:\development\MavenRepository\org\mortbay\jetty\jetty-sslengine\6.1.26.cloudera.4\jetty-sslengine-6.1.26.cloudera.4.jar;C:\development\MavenRepository\org\mortbay\jetty\jsp-2.1\6.1.14\jsp-2.1-6.1.14.jar;C:\development\MavenRepository\org\mortbay\jetty\jsp-api-2.1\6.1.14\jsp-api-2.1-6.1.14.jar;C:\development\MavenRepository\org\mortbay\jetty\servlet-api-2.5\6.1.14\servlet-api-2.5-6.1.14.jar;C:\development\MavenRepository\org\codehaus\jackson\jackson-jaxrs\1.8.8\jackson-jaxrs-1.8.8.jar;C:\development\MavenRepository\org\jamon\jamon-runtime\2.4.1\jamon-runtime-2.4.1.jar;C:\development\MavenRepository\org\hamcrest\hamcrest-core\1.3\hamcrest-core-1.3.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-core\2.6.0-mr1-cdh5.14.2\hadoop-core-2.6.0-mr1-cdh5.14.2.jar;C:\development\MavenRepository\javax\servlet\jsp\jsp-api\2.1\jsp-api-2.1.jar;C:\development\MavenRepository\hsqldb\hsqldb\1.8.0.10\hsqldb-1.8.0.10.jar;C:\development\MavenRepository\org\eclipse\jdt\core\3.1.1\core-3.1.1.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-hdfs\2.6.0-cdh5.14.2\hadoop-hdfs-2.6.0-cdh5.14.2-tests.jar;C:\development\MavenRepository\commons-daemon\commons-daemon\1.0.13\commons-daemon-1.0.13.jar;C:\development\MavenRepository\com\google\protobuf\protobuf-java\2.5.0\protobuf-java-2.5.0.jar;C:\development\MavenRepository\commons-logging\commons-logging\1.2\commons-logging-1.2.jar;C:\development\MavenRepository\com\github\stephenc\findbugs\findbugs-annotations\1.3.9-1\findbugs-annotations-1.3.9-1.jar;C:\development\MavenRepository\org\apache\phoenix\phoenix-spark\4.14.0-cdh5.14.2\phoenix-spark-4.14.0-cdh5.14.2.jar;C:\development\MavenRepository\com\lmax\disruptor\3.3.8\disruptor-3.3.8.jar;C:\development\MavenRepository\org\apache\phoenix\phoenix-core\4.14.0-cdh5.14.2\phoenix-core-4.14.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\tephra\tephra-api\0.14.0-incubating\tephra-api-0.14.0-incubating.jar;C:\development\MavenRepository\org\apache\tephra\tephra-core\0.14.0-incubating\tephra-core-0.14.0-incubating.jar;C:\development\MavenRepository\com\google\inject\guice\3.0\guice-3.0.jar;C:\development\MavenRepository\javax\inject\javax.inject\1\javax.inject-1.jar;C:\development\MavenRepository\aopalliance\aopalliance\1.0\aopalliance-1.0.jar;C:\development\MavenRepository\com\google\inject\extensions\guice-assistedinject\3.0\guice-assistedinject-3.0.jar;C:\development\MavenRepository\org\apache\thrift\libthrift\0.9.0\libthrift-0.9.0.jar;C:\development\MavenRepository\it\unimi\dsi\fastutil\6.5.6\fastutil-6.5.6.jar;C:\development\MavenRepository\org\apache\twill\twill-common\0.8.0\twill-common-0.8.0.jar;C:\development\MavenRepository\org\apache\twill\twill-core\0.8.0\twill-core-0.8.0.jar;C:\development\MavenRepository\org\apache\twill\twill-api\0.8.0\twill-api-0.8.0.jar;C:\development\MavenRepository\org\ow2\asm\asm-all\5.0.2\asm-all-5.0.2.jar;C:\development\MavenRepository\org\apache\twill\twill-discovery-api\0.8.0\twill-discovery-api-0.8.0.jar;C:\development\MavenRepository\org\apache\twill\twill-discovery-core\0.8.0\twill-discovery-core-0.8.0.jar;C:\development\MavenRepository\org\apache\twill\twill-zookeeper\0.8.0\twill-zookeeper-0.8.0.jar;C:\development\MavenRepository\org\apache\tephra\tephra-hbase-compat-1.2-cdh\0.14.0-incubating\tephra-hbase-compat-1.2-cdh-0.14.0-incubating.jar;C:\development\MavenRepository\org\antlr\antlr-runtime\3.5.2\antlr-runtime-3.5.2.jar;C:\development\MavenRepository\jline\jline\2.11\jline-2.11.jar;C:\development\MavenRepository\sqlline\sqlline\1.2.0\sqlline-1.2.0.jar;C:\development\MavenRepository\com\google\guava\guava\13.0.1\guava-13.0.1.jar;C:\development\MavenRepository\joda-time\joda-time\1.6\joda-time-1.6.jar;C:\development\MavenRepository\com\github\stephenc\jcip\jcip-annotations\1.0-1\jcip-annotations-1.0-1.jar;C:\development\MavenRepository\org\codehaus\jackson\jackson-core-asl\1.8.8\jackson-core-asl-1.8.8.jar;C:\development\MavenRepository\org\codehaus\jackson\jackson-mapper-asl\1.8.8\jackson-mapper-asl-1.8.8.jar;C:\development\MavenRepository\junit\junit\4.12\junit-4.12.jar;C:\development\MavenRepository\org\apache\httpcomponents\httpclient\4.0.1\httpclient-4.0.1.jar;C:\development\MavenRepository\org\iq80\snappy\snappy\0.3\snappy-0.3.jar;C:\development\MavenRepository\org\apache\htrace\htrace-core\3.2.0-incubating\htrace-core-3.2.0-incubating.jar;C:\development\MavenRepository\commons-cli\commons-cli\1.2\commons-cli-1.2.jar;C:\development\MavenRepository\commons-codec\commons-codec\1.7\commons-codec-1.7.jar;C:\development\MavenRepository\commons-collections\commons-collections\3.2.2\commons-collections-3.2.2.jar;C:\development\MavenRepository\org\apache\commons\commons-csv\1.0\commons-csv-1.0.jar;C:\development\MavenRepository\org\apache\hbase\hbase-annotations\1.2.0-cdh5.14.2\hbase-annotations-1.2.0-cdh5.14.2.jar;C:\3rd\Java\jdk1.8.0_212\lib\tools.jar;C:\development\MavenRepository\org\apache\hbase\hbase-common\1.2.0-cdh5.14.2\hbase-common-1.2.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hbase\hbase-hadoop-compat\1.2.0-cdh5.14.2\hbase-hadoop-compat-1.2.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hbase\hbase-hadoop2-compat\1.2.0-cdh5.14.2\hbase-hadoop2-compat-1.2.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-annotations\2.6.0-cdh5.14.2\hadoop-annotations-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\jruby\joni\joni\2.1.2\joni-2.1.2.jar;C:\development\MavenRepository\com\salesforce\i18n\i18n-util\1.0.4\i18n-util-1.0.4.jar;C:\development\MavenRepository\com\ibm\icu\icu4j\60.2\icu4j-60.2.jar;C:\development\MavenRepository\com\ibm\icu\icu4j-localespi\60.2\icu4j-localespi-60.2.jar;C:\development\MavenRepository\com\ibm\icu\icu4j-charset\60.2\icu4j-charset-60.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-mapreduce-client-core\2.6.0-cdh5.14.2\hadoop-mapreduce-client-core-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-yarn-common\2.6.0-cdh5.14.2\hadoop-yarn-common-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\javax\xml\bind\jaxb-api\2.2.2\jaxb-api-2.2.2.jar;C:\development\MavenRepository\javax\xml\stream\stax-api\1.0-2\stax-api-1.0-2.jar;C:\development\MavenRepository\javax\activation\activation\1.1\activation-1.1.jar;C:\development\MavenRepository\javax\servlet\servlet-api\2.5\servlet-api-2.5.jar;C:\development\MavenRepository\com\sun\jersey\jersey-client\1.9\jersey-client-1.9.jar;C:\development\MavenRepository\org\codehaus\jackson\jackson-xc\1.8.8\jackson-xc-1.8.8.jar;C:\development\MavenRepository\com\sun\jersey\contribs\jersey-guice\1.9\jersey-guice-1.9.jar;C:\development\MavenRepository\org\apache\avro\avro\1.7.6-cdh5.14.2\avro-1.7.6-cdh5.14.2.jar;C:\development\MavenRepository\com\google\inject\extensions\guice-servlet\3.0\guice-servlet-3.0.jar;C:\development\MavenRepository\io\netty\netty\3.10.5.Final\netty-3.10.5.Final.jar;C:\development\MavenRepository\mysql\mysql-connector-java\5.1.43\mysql-connector-java-5.1.43.jar statistics.costDay19/09/19 23:38:24 INFO spark.SparkContext: Running Spark version 1.6.0
19/09/19 23:38:25INFO spark.SecurityManager: Changing view acls to: cf_pc19/09/19 23:38:25INFO spark.SecurityManager: Changing modify acls to: cf_pc19/09/19 23:38:25INFO spark.SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(cf_pc); users with modify permissions: Set(cf_pc)19/09/19 23:38:26 INFO util.Utils: Successfully started service 'sparkDriver' on port 56564.19/09/19 23:38:27INFO slf4j.Slf4jLogger: Slf4jLogger started19/09/19 23:38:27INFO Remoting: Starting remoting19/09/19 23:38:27 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkDriverActorSystem@10.200.74.156:56580]
19/09/19 23:38:27 INFO Remoting: Remoting now listens on addresses: [akka.tcp://sparkDriverActorSystem@10.200.74.156:56580]
19/09/19 23:38:27 INFO util.Utils: Successfully started service 'sparkDriverActorSystem' on port 56580.19/09/19 23:38:27INFO spark.SparkEnv: Registering MapOutputTracker19/09/19 23:38:27INFO spark.SparkEnv: Registering BlockManagerMaster19/09/19 23:38:27 INFO storage.DiskBlockManager: Created local directory at C:\Users\cf_pc\AppData\Local\Temp\blockmgr-76b81e6b-b765-4620-a1c0-5078286a7cfc19/09/19 23:38:27 INFO storage.MemoryStore: MemoryStore started with capacity 478.2MB19/09/19 23:38:27INFO spark.SparkEnv: Registering OutputCommitCoordinator19/09/19 23:38:28 INFO server.Server: jetty-8.y.z-SNAPSHOT19/09/19 23:38:28 INFO server.AbstractConnector: Started SelectChannelConnector@0.0.0.0:4040
19/09/19 23:38:28 INFO util.Utils: Successfully started service 'SparkUI' on port 4040.19/09/19 23:38:28 INFO ui.SparkUI: Started SparkUI at http://10.200.74.156:4040
19/09/19 23:38:28INFO executor.Executor: Starting executor ID driver on host localhost19/09/19 23:38:28 INFO util.Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 56589.19/09/19 23:38:28 INFO netty.NettyBlockTransferService: Server created on 56589
19/09/19 23:38:28INFO storage.BlockManagerMaster: Trying to register BlockManager19/09/19 23:38:28 INFO storage.BlockManagerMasterEndpoint: Registering block manager localhost:56589 with 478.2 MB RAM, BlockManagerId(driver, localhost, 56589)19/09/19 23:38:28INFO storage.BlockManagerMaster: Registered BlockManager19/09/19 23:38:30 INFO storage.MemoryStore: Block broadcast_0 stored as values in memory (estimated size 247.5 KB, free 477.9MB)19/09/19 23:38:30 INFO storage.MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 15.4 KB, free 477.9MB)19/09/19 23:38:30 INFO storage.BlockManagerInfo: Added broadcast_0_piece0 in memory on localhost:56589 (size: 15.4 KB, free: 478.2MB)19/09/19 23:38:30 INFO spark.SparkContext: Created broadcast 0 from newAPIHadoopRDD at PhoenixRDD.scala:49
19/09/19 23:38:30 INFO jdbc.PhoenixEmbeddedDriver$ConnectionInfo: Trying to connect to a secure cluster as 2181 with keytab /hbase19/09/19 23:38:30 INFO jdbc.PhoenixEmbeddedDriver$ConnectionInfo: Successful loginto secure cluster19/09/19 23:38:30 INFO log.QueryLoggerDisruptor: Starting  QueryLoggerDisruptor for with ringbufferSize=8192, waitStrategy=BlockingWaitStrategy, exceptionHandler=org.apache.phoenix.log.QueryLoggerDefaultExceptionHandler@59edb4f5...19/09/19 23:38:30INFO query.ConnectionQueryServicesImpl: An instance of ConnectionQueryServices was created.19/09/19 23:38:30 INFO zookeeper.RecoverableZooKeeper: Process identifier=hconnection-0x73041b7d connecting to ZooKeeper ensemble=node3:2181
19/09/19 23:38:30 INFO zookeeper.ZooKeeper: Client environment:zookeeper.version=3.4.6-1569965, built on 02/20/2014 09:09GMT19/09/19 23:38:30 INFO zookeeper.ZooKeeper: Client environment:host.name=DESKTOP-0CDQ4PM19/09/19 23:38:30 INFO zookeeper.ZooKeeper: Client environment:java.version=1.8.0_21219/09/19 23:38:30 INFO zookeeper.ZooKeeper: Client environment:java.vendor=Oracle Corporation19/09/19 23:38:30 INFO zookeeper.ZooKeeper: Client environment:java.home=C:\3rd\Java\jdk1.8.0_212\jre19/09/19 23:38:30 INFO zookeeper.ZooKeeper: Client environment:java.class.path=C:\3rd\Java\jdk1.8.0_212\jre\lib\charsets.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\deploy.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\access-bridge-64.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\cldrdata.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\dnsns.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\jaccess.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\jfxrt.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\localedata.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\nashorn.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\sunec.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\sunjce_provider.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\sunmscapi.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\sunpkcs11.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\ext\zipfs.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\javaws.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\jce.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\jfr.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\jfxswt.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\jsse.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\management-agent.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\plugin.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\resources.jar;C:\3rd\Java\jdk1.8.0_212\jre\lib\rt.jar;C:\development\statistics\target\classes;C:\development\MavenRepository\org\apache\spark\spark-core_2.10\1.6.0-cdh5.14.2\spark-core_2.10-1.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\avro\avro-mapred\1.7.6-cdh5.14.2\avro-mapred-1.7.6-cdh5.14.2-hadoop2.jar;C:\development\MavenRepository\org\apache\avro\avro-ipc\1.7.6-cdh5.14.2\avro-ipc-1.7.6-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\avro\avro-ipc\1.7.6-cdh5.14.2\avro-ipc-1.7.6-cdh5.14.2-tests.jar;C:\development\MavenRepository\com\twitter\chill_2.10\0.5.0\chill_2.10-0.5.0.jar;C:\development\MavenRepository\com\esotericsoftware\kryo\kryo\2.21\kryo-2.21.jar;C:\development\MavenRepository\com\esotericsoftware\reflectasm\reflectasm\1.07\reflectasm-1.07-shaded.jar;C:\development\MavenRepository\com\esotericsoftware\minlog\minlog\1.2\minlog-1.2.jar;C:\development\MavenRepository\org\objenesis\objenesis\1.2\objenesis-1.2.jar;C:\development\MavenRepository\com\twitter\chill-java\0.5.0\chill-java-0.5.0.jar;C:\development\MavenRepository\org\apache\xbean\xbean-asm5-shaded\4.4\xbean-asm5-shaded-4.4.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-client\2.6.0-cdh5.14.2\hadoop-client-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-hdfs\2.6.0-cdh5.14.2\hadoop-hdfs-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\xerces\xercesImpl\2.9.1\xercesImpl-2.9.1.jar;C:\development\MavenRepository\xml-apis\xml-apis\1.3.04\xml-apis-1.3.04.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-mapreduce-client-app\2.6.0-cdh5.14.2\hadoop-mapreduce-client-app-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-mapreduce-client-common\2.6.0-cdh5.14.2\hadoop-mapreduce-client-common-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-yarn-client\2.6.0-cdh5.14.2\hadoop-yarn-client-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-yarn-server-common\2.6.0-cdh5.14.2\hadoop-yarn-server-common-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-mapreduce-client-shuffle\2.6.0-cdh5.14.2\hadoop-mapreduce-client-shuffle-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-yarn-api\2.6.0-cdh5.14.2\hadoop-yarn-api-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-mapreduce-client-jobclient\2.6.0-cdh5.14.2\hadoop-mapreduce-client-jobclient-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-aws\2.6.0-cdh5.14.2\hadoop-aws-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\com\amazonaws\aws-java-sdk-bundle\1.11.134\aws-java-sdk-bundle-1.11.134.jar;C:\development\MavenRepository\org\apache\spark\spark-launcher_2.10\1.6.0-cdh5.14.2\spark-launcher_2.10-1.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\spark\spark-network-common_2.10\1.6.0-cdh5.14.2\spark-network-common_2.10-1.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\fusesource\leveldbjni\leveldbjni-all\1.8\leveldbjni-all-1.8.jar;C:\development\MavenRepository\com\fasterxml\jackson\core\jackson-annotations\2.2.3\jackson-annotations-2.2.3.jar;C:\development\MavenRepository\org\apache\spark\spark-network-shuffle_2.10\1.6.0-cdh5.14.2\spark-network-shuffle_2.10-1.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\spark\spark-unsafe_2.10\1.6.0-cdh5.14.2\spark-unsafe_2.10-1.6.0-cdh5.14.2.jar;C:\development\MavenRepository\net\java\dev\jets3t\jets3t\0.9.0\jets3t-0.9.0.jar;C:\development\MavenRepository\org\apache\httpcomponents\httpcore\4.1.2\httpcore-4.1.2.jar;C:\development\MavenRepository\com\jamesmurty\utils\java-xmlbuilder\0.4\java-xmlbuilder-0.4.jar;C:\development\MavenRepository\org\apache\curator\curator-recipes\2.7.1\curator-recipes-2.7.1.jar;C:\development\MavenRepository\org\apache\curator\curator-framework\2.7.1\curator-framework-2.7.1.jar;C:\development\MavenRepository\org\apache\zookeeper\zookeeper\3.4.6\zookeeper-3.4.6.jar;C:\development\MavenRepository\org\eclipse\jetty\orbit\javax.servlet\3.0.0.v201112011016\javax.servlet-3.0.0.v201112011016.jar;C:\development\MavenRepository\org\apache\commons\commons-lang3\3.3.2\commons-lang3-3.3.2.jar;C:\development\MavenRepository\org\apache\commons\commons-math3\3.4.1\commons-math3-3.4.1.jar;C:\development\MavenRepository\com\google\code\findbugs\jsr305\1.3.9\jsr305-1.3.9.jar;C:\development\MavenRepository\org\slf4j\slf4j-api\1.7.5\slf4j-api-1.7.5.jar;C:\development\MavenRepository\org\slf4j\jul-to-slf4j\1.7.5\jul-to-slf4j-1.7.5.jar;C:\development\MavenRepository\org\slf4j\jcl-over-slf4j\1.7.5\jcl-over-slf4j-1.7.5.jar;C:\development\MavenRepository\log4j\log4j\1.2.17\log4j-1.2.17.jar;C:\development\MavenRepository\org\slf4j\slf4j-log4j12\1.7.5\slf4j-log4j12-1.7.5.jar;C:\development\MavenRepository\com\ning\compress-lzf\1.0.3\compress-lzf-1.0.3.jar;C:\development\MavenRepository\org\xerial\snappy\snappy-java\1.0.4.1\snappy-java-1.0.4.1.jar;C:\development\MavenRepository\net\jpountz\lz4\lz4\1.3.0\lz4-1.3.0.jar;C:\development\MavenRepository\org\roaringbitmap\RoaringBitmap\0.5.11\RoaringBitmap-0.5.11.jar;C:\development\MavenRepository\commons-net\commons-net\2.2\commons-net-2.2.jar;C:\development\MavenRepository\org\spark-project\akka\akka-remote_2.10\2.2.3-shaded-protobuf\akka-remote_2.10-2.2.3-shaded-protobuf.jar;C:\development\MavenRepository\org\spark-project\akka\akka-actor_2.10\2.2.3-shaded-protobuf\akka-actor_2.10-2.2.3-shaded-protobuf.jar;C:\development\MavenRepository\com\typesafe\config\1.0.2\config-1.0.2.jar;C:\development\MavenRepository\org\spark-project\protobuf\protobuf-java\2.4.1-shaded\protobuf-java-2.4.1-shaded.jar;C:\development\MavenRepository\org\uncommons\maths\uncommons-maths\1.2.2a\uncommons-maths-1.2.2a.jar;C:\development\MavenRepository\org\spark-project\akka\akka-slf4j_2.10\2.2.3-shaded-protobuf\akka-slf4j_2.10-2.2.3-shaded-protobuf.jar;C:\development\MavenRepository\org\scala-lang\scala-library\2.10.5\scala-library-2.10.5.jar;C:\development\MavenRepository\org\json4s\json4s-jackson_2.10\3.2.10\json4s-jackson_2.10-3.2.10.jar;C:\development\MavenRepository\org\json4s\json4s-core_2.10\3.2.10\json4s-core_2.10-3.2.10.jar;C:\development\MavenRepository\org\json4s\json4s-ast_2.10\3.2.10\json4s-ast_2.10-3.2.10.jar;C:\development\MavenRepository\org\scala-lang\scalap\2.10.0\scalap-2.10.0.jar;C:\development\MavenRepository\org\scala-lang\scala-compiler\2.10.0\scala-compiler-2.10.0.jar;C:\development\MavenRepository\com\sun\jersey\jersey-server\1.9\jersey-server-1.9.jar;C:\development\MavenRepository\asm\asm\3.1\asm-3.1.jar;C:\development\MavenRepository\com\sun\jersey\jersey-core\1.9\jersey-core-1.9.jar;C:\development\MavenRepository\org\apache\mesos\mesos\0.21.1\mesos-0.21.1-shaded-protobuf.jar;C:\development\MavenRepository\io\netty\netty-all\4.0.29.Final\netty-all-4.0.29.Final.jar;C:\development\MavenRepository\com\clearspring\analytics\stream\2.7.0\stream-2.7.0.jar;C:\development\MavenRepository\io\dropwizard\metrics\metrics-core\3.1.2\metrics-core-3.1.2.jar;C:\development\MavenRepository\io\dropwizard\metrics\metrics-jvm\3.1.2\metrics-jvm-3.1.2.jar;C:\development\MavenRepository\io\dropwizard\metrics\metrics-json\3.1.2\metrics-json-3.1.2.jar;C:\development\MavenRepository\io\dropwizard\metrics\metrics-graphite\3.1.2\metrics-graphite-3.1.2.jar;C:\development\MavenRepository\com\fasterxml\jackson\core\jackson-databind\2.2.3\jackson-databind-2.2.3.jar;C:\development\MavenRepository\com\fasterxml\jackson\core\jackson-core\2.2.3\jackson-core-2.2.3.jar;C:\development\MavenRepository\com\fasterxml\jackson\module\jackson-module-scala_2.10\2.2.3\jackson-module-scala_2.10-2.2.3.jar;C:\development\MavenRepository\com\thoughtworks\paranamer\paranamer\2.3\paranamer-2.3.jar;C:\development\MavenRepository\org\apache\ivy\ivy\2.4.0\ivy-2.4.0.jar;C:\development\MavenRepository\oro\oro\2.0.8\oro-2.0.8.jar;C:\development\MavenRepository\org\tachyonproject\tachyon-client\0.8.2\tachyon-client-0.8.2.jar;C:\development\MavenRepository\commons-lang\commons-lang\2.4\commons-lang-2.4.jar;C:\development\MavenRepository\commons-io\commons-io\2.4\commons-io-2.4.jar;C:\development\MavenRepository\org\tachyonproject\tachyon-underfs-hdfs\0.8.2\tachyon-underfs-hdfs-0.8.2.jar;C:\development\MavenRepository\org\tachyonproject\tachyon-underfs-s3\0.8.2\tachyon-underfs-s3-0.8.2.jar;C:\development\MavenRepository\org\tachyonproject\tachyon-underfs-local\0.8.2\tachyon-underfs-local-0.8.2.jar;C:\development\MavenRepository\net\razorvine\pyrolite\4.9\pyrolite-4.9.jar;C:\development\MavenRepository\net\sf\py4j\py4j\0.9\py4j-0.9.jar;C:\development\MavenRepository\com\intel\chimera\chimera\0.9.2\chimera-0.9.2.jar;C:\development\MavenRepository\org\spark-project\spark\unused\1.0.0\unused-1.0.0.jar;C:\development\MavenRepository\org\apache\spark\spark-sql_2.10\1.6.0-cdh5.14.2\spark-sql_2.10-1.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\spark\spark-catalyst_2.10\1.6.0-cdh5.14.2\spark-catalyst_2.10-1.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\scala-lang\scala-reflect\2.10.5\scala-reflect-2.10.5.jar;C:\development\MavenRepository\org\codehaus\janino\janino\2.7.8\janino-2.7.8.jar;C:\development\MavenRepository\org\codehaus\janino\commons-compiler\2.7.8\commons-compiler-2.7.8.jar;C:\development\MavenRepository\com\twitter\parquet-column\1.5.0-cdh5.14.2\parquet-column-1.5.0-cdh5.14.2.jar;C:\development\MavenRepository\com\twitter\parquet-common\1.5.0-cdh5.14.2\parquet-common-1.5.0-cdh5.14.2.jar;C:\development\MavenRepository\com\twitter\parquet-encoding\1.5.0-cdh5.14.2\parquet-encoding-1.5.0-cdh5.14.2.jar;C:\development\MavenRepository\com\twitter\parquet-hadoop\1.5.0-cdh5.14.2\parquet-hadoop-1.5.0-cdh5.14.2.jar;C:\development\MavenRepository\com\twitter\parquet-format\2.1.0-cdh5.14.2\parquet-format-2.1.0-cdh5.14.2.jar;C:\development\MavenRepository\com\twitter\parquet-jackson\1.5.0-cdh5.14.2\parquet-jackson-1.5.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hbase\hbase-spark\1.2.0-cdh5.14.2\hbase-spark-1.2.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-common\2.6.0-cdh5.14.2\hadoop-common-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\xmlenc\xmlenc\0.52\xmlenc-0.52.jar;C:\development\MavenRepository\commons-httpclient\commons-httpclient\3.1\commons-httpclient-3.1.jar;C:\development\MavenRepository\org\mortbay\jetty\jetty\6.1.26.cloudera.4\jetty-6.1.26.cloudera.4.jar;C:\development\MavenRepository\org\mortbay\jetty\jetty-util\6.1.26.cloudera.4\jetty-util-6.1.26.cloudera.4.jar;C:\development\MavenRepository\com\sun\jersey\jersey-json\1.9\jersey-json-1.9.jar;C:\development\MavenRepository\org\codehaus\jettison\jettison\1.1\jettison-1.1.jar;C:\development\MavenRepository\com\sun\xml\bind\jaxb-impl\2.2.3-1\jaxb-impl-2.2.3-1.jar;C:\development\MavenRepository\tomcat\jasper-compiler\5.5.23\jasper-compiler-5.5.23.jar;C:\development\MavenRepository\tomcat\jasper-runtime\5.5.23\jasper-runtime-5.5.23.jar;C:\development\MavenRepository\commons-el\commons-el\1.0\commons-el-1.0.jar;C:\development\MavenRepository\commons-configuration\commons-configuration\1.6\commons-configuration-1.6.jar;C:\development\MavenRepository\commons-digester\commons-digester\1.8\commons-digester-1.8.jar;C:\development\MavenRepository\commons-beanutils\commons-beanutils\1.7.0\commons-beanutils-1.7.0.jar;C:\development\MavenRepository\commons-beanutils\commons-beanutils-core\1.8.0\commons-beanutils-core-1.8.0.jar;C:\development\MavenRepository\com\google\code\gson\gson\2.2.4\gson-2.2.4.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-auth\2.6.0-cdh5.14.2\hadoop-auth-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\directory\server\apacheds-kerberos-codec\2.0.0-M15\apacheds-kerberos-codec-2.0.0-M15.jar;C:\development\MavenRepository\org\apache\directory\server\apacheds-i18n\2.0.0-M15\apacheds-i18n-2.0.0-M15.jar;C:\development\MavenRepository\org\apache\directory\api\api-asn1-api\1.0.0-M20\api-asn1-api-1.0.0-M20.jar;C:\development\MavenRepository\org\apache\directory\api\api-util\1.0.0-M20\api-util-1.0.0-M20.jar;C:\development\MavenRepository\com\jcraft\jsch\0.1.42\jsch-0.1.42.jar;C:\development\MavenRepository\org\apache\curator\curator-client\2.7.1\curator-client-2.7.1.jar;C:\development\MavenRepository\org\apache\htrace\htrace-core4\4.0.1-incubating\htrace-core4-4.0.1-incubating.jar;C:\development\MavenRepository\org\apache\commons\commons-compress\1.4.1\commons-compress-1.4.1.jar;C:\development\MavenRepository\org\tukaani\xz\1.0\xz-1.0.jar;C:\development\MavenRepository\org\apache\hbase\hbase-client\1.2.0-cdh5.14.2\hbase-client-1.2.0-cdh5.14.2.jar;C:\development\MavenRepository\org\jruby\jcodings\jcodings\1.0.8\jcodings-1.0.8.jar;C:\development\MavenRepository\com\yammer\metrics\metrics-core\2.2.0\metrics-core-2.2.0.jar;C:\development\MavenRepository\org\apache\hbase\hbase-protocol\1.2.0-cdh5.14.2\hbase-protocol-1.2.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hbase\hbase-server\1.2.0-cdh5.14.2\hbase-server-1.2.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hbase\hbase-procedure\1.2.0-cdh5.14.2\hbase-procedure-1.2.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hbase\hbase-common\1.2.0-cdh5.14.2\hbase-common-1.2.0-cdh5.14.2-tests.jar;C:\development\MavenRepository\org\apache\hbase\hbase-prefix-tree\1.2.0-cdh5.14.2\hbase-prefix-tree-1.2.0-cdh5.14.2.jar;C:\development\MavenRepository\com\github\stephenc\high-scale-lib\high-scale-lib\1.1.1\high-scale-lib-1.1.1.jar;C:\development\MavenRepository\org\apache\commons\commons-math\2.1\commons-math-2.1.jar;C:\development\MavenRepository\org\mortbay\jetty\jetty-sslengine\6.1.26.cloudera.4\jetty-sslengine-6.1.26.cloudera.4.jar;C:\development\MavenRepository\org\mortbay\jetty\jsp-2.1\6.1.14\jsp-2.1-6.1.14.jar;C:\development\MavenRepository\org\mortbay\jetty\jsp-api-2.1\6.1.14\jsp-api-2.1-6.1.14.jar;C:\development\MavenRepository\org\mortbay\jetty\servlet-api-2.5\6.1.14\servlet-api-2.5-6.1.14.jar;C:\development\MavenRepository\org\codehaus\jackson\jackson-jaxrs\1.8.8\jackson-jaxrs-1.8.8.jar;C:\development\MavenRepository\org\jamon\jamon-runtime\2.4.1\jamon-runtime-2.4.1.jar;C:\development\MavenRepository\org\hamcrest\hamcrest-core\1.3\hamcrest-core-1.3.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-core\2.6.0-mr1-cdh5.14.2\hadoop-core-2.6.0-mr1-cdh5.14.2.jar;C:\development\MavenRepository\javax\servlet\jsp\jsp-api\2.1\jsp-api-2.1.jar;C:\development\MavenRepository\hsqldb\hsqldb\1.8.0.10\hsqldb-1.8.0.10.jar;C:\development\MavenRepository\org\eclipse\jdt\core\3.1.1\core-3.1.1.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-hdfs\2.6.0-cdh5.14.2\hadoop-hdfs-2.6.0-cdh5.14.2-tests.jar;C:\development\MavenRepository\commons-daemon\commons-daemon\1.0.13\commons-daemon-1.0.13.jar;C:\development\MavenRepository\com\google\protobuf\protobuf-java\2.5.0\protobuf-java-2.5.0.jar;C:\development\MavenRepository\commons-logging\commons-logging\1.2\commons-logging-1.2.jar;C:\development\MavenRepository\com\github\stephenc\findbugs\findbugs-annotations\1.3.9-1\findbugs-annotations-1.3.9-1.jar;C:\development\MavenRepository\org\apache\phoenix\phoenix-spark\4.14.0-cdh5.14.2\phoenix-spark-4.14.0-cdh5.14.2.jar;C:\development\MavenRepository\com\lmax\disruptor\3.3.8\disruptor-3.3.8.jar;C:\development\MavenRepository\org\apache\phoenix\phoenix-core\4.14.0-cdh5.14.2\phoenix-core-4.14.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\tephra\tephra-api\0.14.0-incubating\tephra-api-0.14.0-incubating.jar;C:\development\MavenRepository\org\apache\tephra\tephra-core\0.14.0-incubating\tephra-core-0.14.0-incubating.jar;C:\development\MavenRepository\com\google\inject\guice\3.0\guice-3.0.jar;C:\development\MavenRepository\javax\inject\javax.inject\1\javax.inject-1.jar;C:\development\MavenRepository\aopalliance\aopalliance\1.0\aopalliance-1.0.jar;C:\development\MavenRepository\com\google\inject\extensions\guice-assistedinject\3.0\guice-assistedinject-3.0.jar;C:\development\MavenRepository\org\apache\thrift\libthrift\0.9.0\libthrift-0.9.0.jar;C:\development\MavenRepository\it\unimi\dsi\fastutil\6.5.6\fastutil-6.5.6.jar;C:\development\MavenRepository\org\apache\twill\twill-common\0.8.0\twill-common-0.8.0.jar;C:\development\MavenRepository\org\apache\twill\twill-core\0.8.0\twill-core-0.8.0.jar;C:\development\MavenRepository\org\apache\twill\twill-api\0.8.0\twill-api-0.8.0.jar;C:\development\MavenRepository\org\ow2\asm\asm-all\5.0.2\asm-all-5.0.2.jar;C:\development\MavenRepository\org\apache\twill\twill-discovery-api\0.8.0\twill-discovery-api-0.8.0.jar;C:\development\MavenRepository\org\apache\twill\twill-discovery-core\0.8.0\twill-discovery-core-0.8.0.jar;C:\development\MavenRepository\org\apache\twill\twill-zookeeper\0.8.0\twill-zookeeper-0.8.0.jar;C:\development\MavenRepository\org\apache\tephra\tephra-hbase-compat-1.2-cdh\0.14.0-incubating\tephra-hbase-compat-1.2-cdh-0.14.0-incubating.jar;C:\development\MavenRepository\org\antlr\antlr-runtime\3.5.2\antlr-runtime-3.5.2.jar;C:\development\MavenRepository\jline\jline\2.11\jline-2.11.jar;C:\development\MavenRepository\sqlline\sqlline\1.2.0\sqlline-1.2.0.jar;C:\development\MavenRepository\com\google\guava\guava\13.0.1\guava-13.0.1.jar;C:\development\MavenRepository\joda-time\joda-time\1.6\joda-time-1.6.jar;C:\development\MavenRepository\com\github\stephenc\jcip\jcip-annotations\1.0-1\jcip-annotations-1.0-1.jar;C:\development\MavenRepository\org\codehaus\jackson\jackson-core-asl\1.8.8\jackson-core-asl-1.8.8.jar;C:\development\MavenRepository\org\codehaus\jackson\jackson-mapper-asl\1.8.8\jackson-mapper-asl-1.8.8.jar;C:\development\MavenRepository\junit\junit\4.12\junit-4.12.jar;C:\development\MavenRepository\org\apache\httpcomponents\httpclient\4.0.1\httpclient-4.0.1.jar;C:\development\MavenRepository\org\iq80\snappy\snappy\0.3\snappy-0.3.jar;C:\development\MavenRepository\org\apache\htrace\htrace-core\3.2.0-incubating\htrace-core-3.2.0-incubating.jar;C:\development\MavenRepository\commons-cli\commons-cli\1.2\commons-cli-1.2.jar;C:\development\MavenRepository\commons-codec\commons-codec\1.7\commons-codec-1.7.jar;C:\development\MavenRepository\commons-collections\commons-collections\3.2.2\commons-collections-3.2.2.jar;C:\development\MavenRepository\org\apache\commons\commons-csv\1.0\commons-csv-1.0.jar;C:\development\MavenRepository\org\apache\hbase\hbase-annotations\1.2.0-cdh5.14.2\hbase-annotations-1.2.0-cdh5.14.2.jar;C:\3rd\Java\jdk1.8.0_212\lib\tools.jar;C:\development\MavenRepository\org\apache\hbase\hbase-common\1.2.0-cdh5.14.2\hbase-common-1.2.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hbase\hbase-hadoop-compat\1.2.0-cdh5.14.2\hbase-hadoop-compat-1.2.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hbase\hbase-hadoop2-compat\1.2.0-cdh5.14.2\hbase-hadoop2-compat-1.2.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-annotations\2.6.0-cdh5.14.2\hadoop-annotations-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\jruby\joni\joni\2.1.2\joni-2.1.2.jar;C:\development\MavenRepository\com\salesforce\i18n\i18n-util\1.0.4\i18n-util-1.0.4.jar;C:\development\MavenRepository\com\ibm\icu\icu4j\60.2\icu4j-60.2.jar;C:\development\MavenRepository\com\ibm\icu\icu4j-localespi\60.2\icu4j-localespi-60.2.jar;C:\development\MavenRepository\com\ibm\icu\icu4j-charset\60.2\icu4j-charset-60.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-mapreduce-client-core\2.6.0-cdh5.14.2\hadoop-mapreduce-client-core-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-yarn-common\2.6.0-cdh5.14.2\hadoop-yarn-common-2.6.0-cdh5.14.2.jar;C:\development\MavenRepository\javax\xml\bind\jaxb-api\2.2.2\jaxb-api-2.2.2.jar;C:\development\MavenRepository\javax\xml\stream\stax-api\1.0-2\stax-api-1.0-2.jar;C:\development\MavenRepository\javax\activation\activation\1.1\activation-1.1.jar;C:\development\MavenRepository\javax\servlet\servlet-api\2.5\servlet-api-2.5.jar;C:\development\MavenRepository\com\sun\jersey\jersey-client\1.9\jersey-client-1.9.jar;C:\development\MavenRepository\org\codehaus\jackson\jackson-xc\1.8.8\jackson-xc-1.8.8.jar;C:\development\MavenRepository\com\sun\jersey\contribs\jersey-guice\1.9\jersey-guice-1.9.jar;C:\development\MavenRepository\org\apache\avro\avro\1.7.6-cdh5.14.2\avro-1.7.6-cdh5.14.2.jar;C:\development\MavenRepository\com\google\inject\extensions\guice-servlet\3.0\guice-servlet-3.0.jar;C:\development\MavenRepository\io\netty\netty\3.10.5.Final\netty-3.10.5.Final.jar;C:\development\MavenRepository\mysql\mysql-connector-java\5.1.43\mysql-connector-java-5.1.43.jar;C:\3rd\JetBrains\IntelliJ IDEA 2019.1.3\lib\idea_rt.jar19/09/19 23:38:30 INFO zookeeper.ZooKeeper: Client environment:java.library.path=C:\3rd\Java\jdk1.8.0_212\bin;C:\WINDOWS\Sun\Java\bin;C:\WINDOWS\system32;C:\WINDOWS;C:\3rd\Anaconda2;C:\3rd\Anaconda2\Library\mingw-w64\bin;C:\3rd\Anaconda2\Library\usr\bin;C:\3rd\Anaconda2\Library\bin;C:\3rd\Anaconda2\Scripts;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\iCLS\;C:\Program Files\Intel\Intel(R) Management Engine Components\iCLS\;C:\WINDOWS\system32;C:\WINDOWS;C:\WINDOWS\System32\Wbem;C:\WINDOWS\System32\WindowsPowerShell\v1.0\;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\IPT;C:\Program Files\Intel\Intel(R) Management Engine Components\IPT;C:\3rd\MATLAB\R2016a\runtime\win64;C:\3rd\MATLAB\R2016a\bin;C:\3rd\MATLAB\R2016a\polyspace\bin;C:\WINDOWS\System32\OpenSSH\;C:\Program Files\TortoiseSVN\bin;C:\Users\cf_pc\Documents\Caffe\Release;C:\3rd\scala\scala-2.10.5\bin;C:\3rd\Java\jdk1.8.0_212\bin;C:\development\apache-maven-3.6.1\bin;C:\3rd\mysql-8.0.16-winx64\bin;C:\Program Files\Intel\WiFi\bin\;C:\Program Files\Common Files\Intel\WirelessCommon\;C:\3rd\hadoop-common-2.6.0\bin;C:\Users\cf_pc\AppData\Local\Microsoft\WindowsApps;;C:\Program Files\Microsoft VS Code\bin;.19/09/19 23:38:30 INFO zookeeper.ZooKeeper: Client environment:java.io.tmpdir=C:\Users\cf_pc\AppData\Local\Temp\19/09/19 23:38:30 INFO zookeeper.ZooKeeper: Client environment:java.compiler=<NA>
19/09/19 23:38:30 INFO zookeeper.ZooKeeper: Client environment:os.name=Windows 10
19/09/19 23:38:30 INFO zookeeper.ZooKeeper: Client environment:os.arch=amd6419/09/19 23:38:30 INFO zookeeper.ZooKeeper: Client environment:os.version=10.0
19/09/19 23:38:30 INFO zookeeper.ZooKeeper: Client environment:user.name=cf_pc19/09/19 23:38:30 INFO zookeeper.ZooKeeper: Client environment:user.home=C:\Users\cf_pc19/09/19 23:38:30 INFO zookeeper.ZooKeeper: Client environment:user.dir=C:\development\statistics19/09/19 23:38:30 INFO zookeeper.ZooKeeper: Initiating client connection, connectString=node3:2181 sessionTimeout=90000 watcher=hconnection-0x73041b7d0x0, quorum=node3:2181, baseZNode=/hbase19/09/19 23:38:30 INFO zookeeper.ClientCnxn: Opening socket connection to server node3/10.200.101.133:2181. Will not attempt to authenticate using SASL (unknown error)19/09/19 23:38:30 INFO zookeeper.ClientCnxn: Socket connection established to node3/10.200.101.133:2181, initiating session19/09/19 23:38:30 INFO zookeeper.ClientCnxn: Session establishment complete on server node3/10.200.101.133:2181, sessionid = 0x36ca2ccfed66850, negotiated timeout = 60000
19/09/19 23:38:31 INFO query.ConnectionQueryServicesImpl: HConnection established. Stacktrace for informational purposes: hconnection-0x73041b7d java.lang.Thread.getStackTrace(Thread.java:1559)
org.apache.phoenix.util.LogUtil.getCallerStackTrace(LogUtil.java:55)
org.apache.phoenix.query.ConnectionQueryServicesImpl.openConnection(ConnectionQueryServicesImpl.java:427)
org.apache.phoenix.query.ConnectionQueryServicesImpl.access$400(ConnectionQueryServicesImpl.java:267)
org.apache.phoenix.query.ConnectionQueryServicesImpl$12.call(ConnectionQueryServicesImpl.java:2515)
org.apache.phoenix.query.ConnectionQueryServicesImpl$12.call(ConnectionQueryServicesImpl.java:2491)
org.apache.phoenix.util.PhoenixContextExecutor.call(PhoenixContextExecutor.java:76)
org.apache.phoenix.query.ConnectionQueryServicesImpl.init(ConnectionQueryServicesImpl.java:2491)
org.apache.phoenix.jdbc.PhoenixDriver.getConnectionQueryServices(PhoenixDriver.java:255)
org.apache.phoenix.jdbc.PhoenixEmbeddedDriver.createConnection(PhoenixEmbeddedDriver.java:150)
org.apache.phoenix.jdbc.PhoenixDriver.connect(PhoenixDriver.java:221)
java.sql.DriverManager.getConnection(DriverManager.java:664)
java.sql.DriverManager.getConnection(DriverManager.java:208)
org.apache.phoenix.mapreduce.util.ConnectionUtil.getConnection(ConnectionUtil.java:113)
org.apache.phoenix.mapreduce.util.ConnectionUtil.getInputConnection(ConnectionUtil.java:58)
org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil.getSelectColumnMetadataList(PhoenixConfigurationUtil.java:354)
org.apache.phoenix.spark.PhoenixRDD.toDataFrame(PhoenixRDD.scala:118)
org.apache.phoenix.spark.SparkSqlContextFunctions.phoenixTableAsDataFrame(SparkSqlContextFunctions.scala:39)
statistics.benefits.phoenixConnectMode$.getMode1(phoenixConnectMode.scala:16)
statistics.costDay$.main(costDay.scala:117)
statistics.costDay.main(costDay.scala)19/09/19 23:38:32INFO Configuration.deprecation: hadoop.native.lib is deprecated. Instead, use io.native.lib.available19/09/19 23:38:33 INFO mapreduce.PhoenixInputFormat: UseSelectColumns=true, selectColumnList.size()=86, selectColumnList=ID,ASSET_ID,ASSET_NAME,ASSET_FIRST_DEGREE_ID,ASSET_FIRST_DEGREE_NAME,ASSET_SECOND_DEGREE_ID,ASSET_SECOND_DEGREE_NAME,GB_DEGREE_ID,GB_DEGREE_NAME,ASSET_USE_FIRST_DEGREE_ID,ASSET_USE_FIRST_DEGREE_NAME,ASSET_USE_SECOND_DEGREE_ID,ASSET_USE_SECOND_DEGREE_NAME,MANAGEMENT_TYPE_ID,MANAGEMENT_TYPE_NAME,ASSET_MODEL,FACTORY_NUMBER,ASSET_COUNTRY_ID,ASSET_COUNTRY_NAME,MANUFACTURER,SUPPLIER,SUPPLIER_TEL,ORIGINAL_VALUE,USE_DEPARTMENT_ID,USE_DEPARTMENT_NAME,USER_ID,USER_NAME,ASSET_LOCATION_OF_PARK_ID,ASSET_LOCATION_OF_PARK_NAME,ASSET_LOCATION_OF_BUILDING_ID,ASSET_LOCATION_OF_BUILDING_NAME,ASSET_LOCATION_OF_ROOM_ID,ASSET_LOCATION_OF_ROOM_NUMBER,PRODUCTION_DATE,ACCEPTANCE_DATE,REQUISITION_DATE,PERFORMANCE_INDEX,ASSET_STATE_ID,ASSET_STATE_NAME,INSPECTION_TYPE_ID,INSPECTION_TYPE_NAME,SEAL_DATE,SEAL_CAUSE,COST_ITEM_ID,COST_ITEM_NAME,ITEM_COMMENTS,UNSEAL_DATE,SCRAP_DATE,PURCHASE_NUMBER,WARRANTY_PERIOD,DEPRECIABLE_LIVES_ID,DEPRECIABLE_LIVES_NAME,MEASUREMENT_UNITS_ID,MEASUREMENT_UNITS_NAME,ANNEX,REMARK,ACCOUNTING_TYPE_ID,ACCOUNTING_TYPE_NAME,SYSTEM_TYPE_ID,SYSTEM_TYPE_NAME,ASSET_ID_PARENT,CLASSIFIED_LEVEL_ID,CLASSIFIED_LEVEL_NAME,ASSET_PICTURE,MILITARY_SPECIAL_CODE,CHECK_CYCLE_ID,CHECK_CYCLE_NAME,CHECK_DATE,CHECK_EFFECTIVE_DATE,CHECK_MODE_ID,CHECK_MODE_NAME,CHECK_DEPARTMENT_ID,CHECK_DEPARTMENT_NAME,RENT_STATUS_ID,RENT_STATUS_NAME,STORAGE_TIME,UPDATE_USER,UPDATE_TIME,IS_ON_PROCESS,IS_DELETED,FIRST_DEPARTMENT_ID,FIRST_DEPARTMENT_NAME,SECOND_DEPARTMENT_ID,SECOND_DEPARTMENT_NAME,CREATE_USER,CREATE_TIME19/09/19 23:38:36 INFO Configuration.deprecation: io.bytes.per.checksum is deprecated. Instead, use dfs.bytes-per-checksum19/09/19 23:38:36 INFO jdbc.PhoenixEmbeddedDriver$ConnectionInfo: Trying to connect to a secure cluster as 2181 with keytab /hbase19/09/19 23:38:36 INFO jdbc.PhoenixEmbeddedDriver$ConnectionInfo: Successful loginto secure cluster19/09/19 23:38:36 INFO Configuration.deprecation: io.bytes.per.checksum is deprecated. Instead, use dfs.bytes-per-checksum19/09/19 23:38:36 INFO jdbc.PhoenixEmbeddedDriver$ConnectionInfo: Trying to connect to a secure cluster as 2181 with keytab /hbase19/09/19 23:38:36 INFO jdbc.PhoenixEmbeddedDriver$ConnectionInfo: Successful loginto secure cluster19/09/19 23:38:36 INFO mapreduce.PhoenixInputFormat: UseSelectColumns=true, selectColumnList.size()=86, selectColumnList=ID,ASSET_ID,ASSET_NAME,ASSET_FIRST_DEGREE_ID,ASSET_FIRST_DEGREE_NAME,ASSET_SECOND_DEGREE_ID,ASSET_SECOND_DEGREE_NAME,GB_DEGREE_ID,GB_DEGREE_NAME,ASSET_USE_FIRST_DEGREE_ID,ASSET_USE_FIRST_DEGREE_NAME,ASSET_USE_SECOND_DEGREE_ID,ASSET_USE_SECOND_DEGREE_NAME,MANAGEMENT_TYPE_ID,MANAGEMENT_TYPE_NAME,ASSET_MODEL,FACTORY_NUMBER,ASSET_COUNTRY_ID,ASSET_COUNTRY_NAME,MANUFACTURER,SUPPLIER,SUPPLIER_TEL,ORIGINAL_VALUE,USE_DEPARTMENT_ID,USE_DEPARTMENT_NAME,USER_ID,USER_NAME,ASSET_LOCATION_OF_PARK_ID,ASSET_LOCATION_OF_PARK_NAME,ASSET_LOCATION_OF_BUILDING_ID,ASSET_LOCATION_OF_BUILDING_NAME,ASSET_LOCATION_OF_ROOM_ID,ASSET_LOCATION_OF_ROOM_NUMBER,PRODUCTION_DATE,ACCEPTANCE_DATE,REQUISITION_DATE,PERFORMANCE_INDEX,ASSET_STATE_ID,ASSET_STATE_NAME,INSPECTION_TYPE_ID,INSPECTION_TYPE_NAME,SEAL_DATE,SEAL_CAUSE,COST_ITEM_ID,COST_ITEM_NAME,ITEM_COMMENTS,UNSEAL_DATE,SCRAP_DATE,PURCHASE_NUMBER,WARRANTY_PERIOD,DEPRECIABLE_LIVES_ID,DEPRECIABLE_LIVES_NAME,MEASUREMENT_UNITS_ID,MEASUREMENT_UNITS_NAME,ANNEX,REMARK,ACCOUNTING_TYPE_ID,ACCOUNTING_TYPE_NAME,SYSTEM_TYPE_ID,SYSTEM_TYPE_NAME,ASSET_ID_PARENT,CLASSIFIED_LEVEL_ID,CLASSIFIED_LEVEL_NAME,ASSET_PICTURE,MILITARY_SPECIAL_CODE,CHECK_CYCLE_ID,CHECK_CYCLE_NAME,CHECK_DATE,CHECK_EFFECTIVE_DATE,CHECK_MODE_ID,CHECK_MODE_NAME,CHECK_DEPARTMENT_ID,CHECK_DEPARTMENT_NAME,RENT_STATUS_ID,RENT_STATUS_NAME,STORAGE_TIME,UPDATE_USER,UPDATE_TIME,IS_ON_PROCESS,IS_DELETED,FIRST_DEPARTMENT_ID,FIRST_DEPARTMENT_NAME,SECOND_DEPARTMENT_ID,SECOND_DEPARTMENT_NAME,CREATE_USER,CREATE_TIME19/09/19 23:38:36 INFO mapreduce.PhoenixInputFormat: Select Statement: SELECT "ID","0"."ASSET_ID","0"."ASSET_NAME","0"."ASSET_FIRST_DEGREE_ID","0"."ASSET_FIRST_DEGREE_NAME","0"."ASSET_SECOND_DEGREE_ID","0"."ASSET_SECOND_DEGREE_NAME","0"."GB_DEGREE_ID","0"."GB_DEGREE_NAME","0"."ASSET_USE_FIRST_DEGREE_ID","0"."ASSET_USE_FIRST_DEGREE_NAME","0"."ASSET_USE_SECOND_DEGREE_ID","0"."ASSET_USE_SECOND_DEGREE_NAME","0"."MANAGEMENT_TYPE_ID","0"."MANAGEMENT_TYPE_NAME","0"."ASSET_MODEL","0"."FACTORY_NUMBER","0"."ASSET_COUNTRY_ID","0"."ASSET_COUNTRY_NAME","0"."MANUFACTURER","0"."SUPPLIER","0"."SUPPLIER_TEL","0"."ORIGINAL_VALUE","0"."USE_DEPARTMENT_ID","0"."USE_DEPARTMENT_NAME","0"."USER_ID","0"."USER_NAME","0"."ASSET_LOCATION_OF_PARK_ID","0"."ASSET_LOCATION_OF_PARK_NAME","0"."ASSET_LOCATION_OF_BUILDING_ID","0"."ASSET_LOCATION_OF_BUILDING_NAME","0"."ASSET_LOCATION_OF_ROOM_ID","0"."ASSET_LOCATION_OF_ROOM_NUMBER","0"."PRODUCTION_DATE","0"."ACCEPTANCE_DATE","0"."REQUISITION_DATE","0"."PERFORMANCE_INDEX","0"."ASSET_STATE_ID","0"."ASSET_STATE_NAME","0"."INSPECTION_TYPE_ID","0"."INSPECTION_TYPE_NAME","0"."SEAL_DATE","0"."SEAL_CAUSE","0"."COST_ITEM_ID","0"."COST_ITEM_NAME","0"."ITEM_COMMENTS","0"."UNSEAL_DATE","0"."SCRAP_DATE","0"."PURCHASE_NUMBER","0"."WARRANTY_PERIOD","0"."DEPRECIABLE_LIVES_ID","0"."DEPRECIABLE_LIVES_NAME","0"."MEASUREMENT_UNITS_ID","0"."MEASUREMENT_UNITS_NAME","0"."ANNEX","0"."REMARK","0"."ACCOUNTING_TYPE_ID","0"."ACCOUNTING_TYPE_NAME","0"."SYSTEM_TYPE_ID","0"."SYSTEM_TYPE_NAME","0"."ASSET_ID_PARENT","0"."CLASSIFIED_LEVEL_ID","0"."CLASSIFIED_LEVEL_NAME","0"."ASSET_PICTURE","0"."MILITARY_SPECIAL_CODE","0"."CHECK_CYCLE_ID","0"."CHECK_CYCLE_NAME","0"."CHECK_DATE","0"."CHECK_EFFECTIVE_DATE","0"."CHECK_MODE_ID","0"."CHECK_MODE_NAME","0"."CHECK_DEPARTMENT_ID","0"."CHECK_DEPARTMENT_NAME","0"."RENT_STATUS_ID","0"."RENT_STATUS_NAME","0"."STORAGE_TIME","0"."UPDATE_USER","0"."UPDATE_TIME","0"."IS_ON_PROCESS","0"."IS_DELETED","0"."FIRST_DEPARTMENT_ID","0"."FIRST_DEPARTMENT_NAME","0"."SECOND_DEPARTMENT_ID","0"."SECOND_DEPARTMENT_NAME","0"."CREATE_USER","0"."CREATE_TIME"FROM ASSET_NORMAL19/09/19 23:38:36 INFO zookeeper.RecoverableZooKeeper: Process identifier=hconnection-0x5f8da82 connecting to ZooKeeper ensemble=node3:2181
19/09/19 23:38:36 INFO zookeeper.ZooKeeper: Initiating client connection, connectString=node3:2181 sessionTimeout=90000 watcher=hconnection-0x5f8da820x0, quorum=node3:2181, baseZNode=/hbase19/09/19 23:38:36 INFO zookeeper.ClientCnxn: Opening socket connection to server node3/10.200.101.133:2181. Will not attempt to authenticate using SASL (unknown error)19/09/19 23:38:36 INFO zookeeper.ClientCnxn: Socket connection established to node3/10.200.101.133:2181, initiating session19/09/19 23:38:36 INFO zookeeper.ClientCnxn: Session establishment complete on server node3/10.200.101.133:2181, sessionid = 0x36ca2ccfed66851, negotiated timeout = 60000
19/09/19 23:38:36 INFO util.RegionSizeCalculator: Calculating region sizes for table "IDX_ASSET_NORMAL".19/09/19 23:38:37INFO client.ConnectionManager$HConnectionImplementation: Closing master protocol: MasterService19/09/19 23:38:37 INFO client.ConnectionManager$HConnectionImplementation: Closing zookeeper sessionid=0x36ca2ccfed66851
19/09/19 23:38:37 INFO zookeeper.ZooKeeper: Session: 0x36ca2ccfed66851closed19/09/19 23:38:37INFO zookeeper.ClientCnxn: EventThread shut down19/09/19 23:38:37 INFO spark.SparkContext: Starting job: show at costDay.scala:142
19/09/19 23:38:37 INFO scheduler.DAGScheduler: Got job 0 (show at costDay.scala:142) with 1output partitions19/09/19 23:38:37 INFO scheduler.DAGScheduler: Final stage: ResultStage 0 (show at costDay.scala:142)19/09/19 23:38:37INFO scheduler.DAGScheduler: Parents of final stage: List()19/09/19 23:38:37INFO scheduler.DAGScheduler: Missing parents: List()19/09/19 23:38:37 INFO scheduler.DAGScheduler: Submitting ResultStage 0 (MapPartitionsRDD[7] at show at costDay.scala:142), whichhas no missing parents19/09/19 23:38:37 INFO storage.MemoryStore: Block broadcast_1 stored as values in memory (estimated size 23.6 KB, free 477.9MB)19/09/19 23:38:37 INFO storage.MemoryStore: Block broadcast_1_piece0 stored as bytes in memory (estimated size 9.0 KB, free 477.9MB)19/09/19 23:38:37 INFO storage.BlockManagerInfo: Added broadcast_1_piece0 in memory on localhost:56589 (size: 9.0 KB, free: 478.1MB)19/09/19 23:38:37 INFO spark.SparkContext: Created broadcast 1 from broadcast at DAGScheduler.scala:1004
19/09/19 23:38:37 INFO scheduler.DAGScheduler: Submitting 1 missing tasks from ResultStage 0 (MapPartitionsRDD[7] at show at costDay.scala:142) (first 15 tasks are for partitions Vector(0))19/09/19 23:38:37 INFO scheduler.TaskSchedulerImpl: Adding task set 0.0 with 1tasks19/09/19 23:38:37 INFO scheduler.TaskSetManager: Starting task 0.0 in stage 0.0 (TID 0, localhost, executor driver, partition 0, ANY, 2549bytes)19/09/19 23:38:37 INFO executor.Executor: Running task 0.0 in stage 0.0 (TID 0)19/09/19 23:38:37 INFO rdd.NewHadoopRDD: Input split: org.apache.phoenix.mapreduce.PhoenixInputSplit@20b48819/09/19 23:38:37 INFO Configuration.deprecation: io.bytes.per.checksum is deprecated. Instead, use dfs.bytes-per-checksum19/09/19 23:38:37 INFO jdbc.PhoenixEmbeddedDriver$ConnectionInfo: Trying to connect to a secure cluster as 2181 with keytab /hbase19/09/19 23:38:37 INFO jdbc.PhoenixEmbeddedDriver$ConnectionInfo: Successful loginto secure cluster19/09/19 23:38:37 INFO codegen.GeneratePredicate: Code generated in 219.2327ms19/09/19 23:38:37 INFO codegen.GenerateUnsafeProjection: Code generated in 67.2901ms19/09/19 23:38:37 INFO codegen.GenerateSafeProjection: Code generated in 12.9857ms19/09/19 23:38:38 INFO executor.Executor: Finished task 0.0 in stage 0.0 (TID 0). 5544bytes result sent to driver19/09/19 23:38:38 INFO scheduler.TaskSetManager: Finished task 0.0 in stage 0.0 (TID 0) in 968 ms on localhost (executor driver) (1/1)19/09/19 23:38:38 INFO scheduler.TaskSchedulerImpl: Removed TaskSet 0.0, whose tasks have all completed, from pool19/09/19 23:38:38 INFO scheduler.DAGScheduler: ResultStage 0 (show at costDay.scala:142) finished in 1.000s19/09/19 23:38:38 INFO scheduler.DAGScheduler: Job 0 finished: show at costDay.scala:142, took 1.145145s+--------------------------------+-------------------+-----------------------+--------------+-----------------------+
|ID                              |FIRST_DEPARTMENT_ID|ACTUAL_COST            |ORIGINAL_VALUE|GENERATION_TIME        |
+--------------------------------+-------------------+-----------------------+--------------+-----------------------+
|0001d3d568924f89b073ab7fd10b67f7|1149569976173203456|-40.1654794520548      |-123456.0     |2019-09-19 23:38:36.197|
|0004b28d29124bcc954586b481078bc4|1149569393206890501|526.0068664383562      |1616779.0     |2019-09-19 23:38:36.197|
|0004c64dddda4c289dfc4a9ffb8f14d8|1149569393206890498|443.4765924657534      |1363107.0     |2019-09-19 23:38:36.197|
|0004f257d4b04337bd7a7f46e00e3c72|1149569393206890504|889.779890410959       |8546570.0     |2019-09-19 23:38:36.197|
|00060d2e73314c3a925b0f4408ff8062|1149569393202696199|1391.364794520548      |2672885.0     |2019-09-19 23:38:36.197|
|00064088a53247f8b2435586aad51fb1|1149569976173203457|499.9789041095891      |3649846.0     |2019-09-19 23:38:36.197|
|0008cf0ea69f4c46b1853af91da12f43|1149569393202696200|483.5152054794521      |3529661.0     |2019-09-19 23:38:36.197|
|000aa24e859e4df29f4c14c1f89c5222|1149569393206890503|1923.8509863013699     |3695819.0     |2019-09-19 23:38:36.197|
|000b893786444874b2be42f13093ac30|1149569393206890501|898.7884931506851      |6561156.0     |2019-09-19 23:38:36.197|
|000bce624409453bbc2e29e688b33939|1149569976173203461|2405.1813698630135     |7392768.0     |2019-09-19 23:38:36.197|
|000c1624c43147859b4a8cc210a9bc3d|1149569976173203462|641.9117465753424      |1973034.0     |2019-09-19 23:38:36.197|
|000cd7f91a8b420fa6af8038cd8d2d38|1149569393206890502|803.4142191780821      |3086802.0     |2019-09-19 23:38:36.197|
|000f9ce45af141d581f6be97f925efea|1149569393202696198|-2.6027397260273974E-13|-1.0E-9       |2019-09-19 23:38:36.197|
|0010d45656fe44bbabbb07554d57938e|1149564326588321792|3572.157726027397      |6862303.0     |2019-09-19 23:38:36.197|
|001139ff8a604c3db5c5bb03dfc0b25e|1149564326588321792|1151.3453424657534     |4423590.0     |2019-09-19 23:38:36.197|
|00138f888e9e47eeb713fdfa946ad958|1149569393202696196|93.60389041095891      |899090.0      |2019-09-19 23:38:36.197|
|0013f431b4444fd6a5e9c99e9f96402b|1149569393206890498|350.0574315068493      |1075966.0     |2019-09-19 23:38:36.197|
|00143021a36246dd8ecb07dbe6ece34d|1149569393202696201|632.7836               |6078053.0     |2019-09-19 23:38:36.197|
|00178d016bc4459796a74b9dbf8932af|1149569393202696197|1904.5558356164383     |7317504.0     |2019-09-19 23:38:36.197|
|001e56309b41437385b6b9165b224c32|1149569976173203463|758.8970630136986      |7289406.0     |2019-09-19 23:38:36.197|
+--------------------------------+-------------------+-----------------------+--------------+-----------------------+only showing top20rows19/09/19 23:38:38INFO spark.SparkContext: Invoking stop() from shutdown hook19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/static/sql,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/SQL/execution/json,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/SQL/execution,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/SQL/json,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/SQL,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/metrics/json,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/api,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/static,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/json,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment/json,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/json,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/json,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/json,null}19/09/19 23:38:38 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs,null}19/09/19 23:38:38 INFO ui.SparkUI: Stopped Spark web UI at http://10.200.74.156:4040
19/09/19 23:38:38 INFO spark.MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
19/09/19 23:38:38INFO storage.MemoryStore: MemoryStore cleared19/09/19 23:38:38INFO storage.BlockManager: BlockManager stopped19/09/19 23:38:38INFO storage.BlockManagerMaster: BlockManagerMaster stopped19/09/19 23:38:38 INFO scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
19/09/19 23:38:38INFO spark.SparkContext: Successfully stopped SparkContext19/09/19 23:38:38INFO util.ShutdownHookManager: Shutdown hook called19/09/19 23:38:38 INFO util.ShutdownHookManager: Deleting directory C:\Users\cf_pc\AppData\Local\Temp\spark-0ab01cf8-9dc3-4ad5-a435-d84098223ee3Process finished with exit code0

从中得到Spark SQL计算的结果:

+--------------------------------+-------------------+-----------------------+--------------+-----------------------+
|ID                              |FIRST_DEPARTMENT_ID|ACTUAL_COST            |ORIGINAL_VALUE|GENERATION_TIME        |
+--------------------------------+-------------------+-----------------------+--------------+-----------------------+
|0001d3d568924f89b073ab7fd10b67f7|1149569976173203456|-40.1654794520548      |-123456.0     |2019-09-19 23:38:36.197|
|0004b28d29124bcc954586b481078bc4|1149569393206890501|526.0068664383562      |1616779.0     |2019-09-19 23:38:36.197|
|0004c64dddda4c289dfc4a9ffb8f14d8|1149569393206890498|443.4765924657534      |1363107.0     |2019-09-19 23:38:36.197|
|0004f257d4b04337bd7a7f46e00e3c72|1149569393206890504|889.779890410959       |8546570.0     |2019-09-19 23:38:36.197|
|00060d2e73314c3a925b0f4408ff8062|1149569393202696199|1391.364794520548      |2672885.0     |2019-09-19 23:38:36.197|
|00064088a53247f8b2435586aad51fb1|1149569976173203457|499.9789041095891      |3649846.0     |2019-09-19 23:38:36.197|
|0008cf0ea69f4c46b1853af91da12f43|1149569393202696200|483.5152054794521      |3529661.0     |2019-09-19 23:38:36.197|
|000aa24e859e4df29f4c14c1f89c5222|1149569393206890503|1923.8509863013699     |3695819.0     |2019-09-19 23:38:36.197|
|000b893786444874b2be42f13093ac30|1149569393206890501|898.7884931506851      |6561156.0     |2019-09-19 23:38:36.197|
|000bce624409453bbc2e29e688b33939|1149569976173203461|2405.1813698630135     |7392768.0     |2019-09-19 23:38:36.197|
|000c1624c43147859b4a8cc210a9bc3d|1149569976173203462|641.9117465753424      |1973034.0     |2019-09-19 23:38:36.197|
|000cd7f91a8b420fa6af8038cd8d2d38|1149569393206890502|803.4142191780821      |3086802.0     |2019-09-19 23:38:36.197|
|000f9ce45af141d581f6be97f925efea|1149569393202696198|-2.6027397260273974E-13|-1.0E-9       |2019-09-19 23:38:36.197|
|0010d45656fe44bbabbb07554d57938e|1149564326588321792|3572.157726027397      |6862303.0     |2019-09-19 23:38:36.197|
|001139ff8a604c3db5c5bb03dfc0b25e|1149564326588321792|1151.3453424657534     |4423590.0     |2019-09-19 23:38:36.197|
|00138f888e9e47eeb713fdfa946ad958|1149569393202696196|93.60389041095891      |899090.0      |2019-09-19 23:38:36.197|
|0013f431b4444fd6a5e9c99e9f96402b|1149569393206890498|350.0574315068493      |1075966.0     |2019-09-19 23:38:36.197|
|00143021a36246dd8ecb07dbe6ece34d|1149569393202696201|632.7836               |6078053.0     |2019-09-19 23:38:36.197|
|00178d016bc4459796a74b9dbf8932af|1149569393202696197|1904.5558356164383     |7317504.0     |2019-09-19 23:38:36.197|
|001e56309b41437385b6b9165b224c32|1149569976173203463|758.8970630136986      |7289406.0     |2019-09-19 23:38:36.197|
+--------------------------------+-------------------+-----------------------+--------------+-----------------------+only showing top20 rows

转载于:https://www.cnblogs.com/ratels/p/11553400.html

Spark教程——(10)Spark SQL读取Phoenix数据本地执行计算相关推荐

  1. NC65根据sql读取缓存数据

    NC65根据sql读取缓存数据,相关类 //获取已经读出的参照数据 String sql = "select eps_code,eps_name,pk_eps,pk_org,pk_paren ...

  2. 通过Spark listener实现Direct模式读取Kafaka数据

    参考文章: http://coolplayer.net/2016/11/30/spark-streaming-从kafka-拉数据如何保证数据不丢失/ https://github.com/jacks ...

  3. 【极简spark教程】spark聚合函数

    聚合函数分为两类,一种是spark内置的常用聚合函数,一种是用户自定义聚合函数 UDAF 不带类型的UDAF[较常用] 继承UserDefinedAggregateFunction 定义输入数据的sc ...

  4. sql读取excel数据_Python 读取 Excel 数据,并插入到MySQL

    说实话,个人不建议用Python来读取Excel进行入库操作,有条件的话,可以尝试用 ETL 工具,快速导入到MySQL中,或者也可使用 SQL 的导入工具进行. 写本文的目的在于:记录一下之前做过一 ...

  5. 【jQuery】在表单提交前触发事件(数据校验/执行计算等)

    方式1 给form加一个id,然后在js中可以这样触发提交事件: $("#FormID").submit(function(){// do something... }); 方式二 ...

  6. 33. 如何找出 SAP Fiori Launchpad 里点击 tile 之后,读取业务数据调用的是哪个 SAP 后台系统的 OData 服务

    文章目录 如何找到 SAP Fiori Launchpad tile 对应的 SAP UI5 应用名称 如何找到 SAP UI5 应用发出的 OData 请求明细 如何找到 SAP UI5 应用发送的 ...

  7. 大数据实时流计算详解

    开篇词-攻克实时流计算难点,掌握大数据未来 我曾任职于华为 2012 实验室高斯部门,负责实时分析型内存数据库 RTANA.华为公有云 RDS 服务的研发工作.目前,我专注于移动反欺诈解决方案的研发. ...

  8. 12_大数据之图计算_note

    大数据中的图计算 此笔记参考厦门大学-林子雨<<大数据技术原理与应用>> 1 图计算简介 1.1 图结构数据 许多大数据都是以大规模图或网络的形式呈现,如社交网络.传染病传播途 ...

  9. 使用Spark SQL读取Hive上的数据

    Spark SQL主要目的是使得用户可以在Spark上使用SQL,其数据源既可以是RDD,也可以是外部的数据源(比如Parquet.Hive.Json等).Spark SQL的其中一个分支就是Spar ...

最新文章

  1. javascript数据类型一览
  2. mysql函数包含的意思_MYSQL函数
  3. VTK:图表之GraphToPolyData
  4. 【Leetcode_easy】821. Shortest Distance to a Character
  5. spring mvc学习(25):Eclipse设置代码自动提示
  6. jdbc建立数据库连接的helloword
  7. iPhone 5用户们,苹果又喊你更新了,不然可能会变砖!
  8. Python基础教程笔记——列表和元组
  9. 6_Selenium Excel参数化
  10. Java多线程核心技术
  11. 计算机网络:网络传输介质
  12. j1900做网站服务器,j1900可以跟云服务器
  13. 不要告诉我你不知道这32个网站!
  14. 自动点击网页脚本---selenium库使用
  15. laravel视图 compact 循环遍历,if判断
  16. 函数——IIFE、作用域、函数调用、函数应用、闭包
  17. 宗宁:把企业做成IP,BOSS直聘对标Line
  18. C++ Primer Plus (第六版)编程练习记录(chapter10 对象和类)
  19. 神经网络适用于分类问题的最后一层-Softmax和交叉熵损失介绍及梯度推导
  20. Nifi博客之一:概述贴

热门文章

  1. 随机过程(random process)
  2. CentOS 7安装chrome
  3. python key=lambda函数_使用’key’和lambda表达式的python max函数
  4. 第157章 SQL函数 WEEK
  5. visual studio 2008微软教程
  6. 简易的java发邮件客户端
  7. vue table自定义样式
  8. 【个人练习3.11】7 c++练习题
  9. 计算机考研真题解析---计算机网络
  10. ArchSummit深圳2016优秀演讲公布,北京站专题抢先看