JustPaste.it

/usr/lib/jvm/java-8-openjdk-amd64/bin/java -Didea.launcher.port=7533 -Didea.launcher.bin.path=/home/aims/idea-IU-163.12024.16/bin -Dfile.encoding=UTF-8 -classpath /usr/lib/jvm/java-8-openjdk-amd64/jre/lib/charsets.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/ext/cldrdata.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/ext/dnsns.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/ext/icedtea-sound.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/ext/jaccess.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/ext/localedata.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/ext/nashorn.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/ext/sunec.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/ext/sunjce_provider.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/ext/sunpkcs11.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/ext/zipfs.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/jce.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/jsse.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/management-agent.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/resources.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar:/home/aims/SentimentAnalysis/target/sentiment-analysis-1.0.jar:/home/aims/spark/jars/activation-1.1.1.jar:/home/aims/spark/jars/antlr-2.7.7.jar:/home/aims/spark/jars/antlr-runtime-3.4.jar:/home/aims/spark/jars/antlr4-runtime-4.5.3.jar:/home/aims/spark/jars/aopalliance-1.0.jar:/home/aims/spark/jars/aopalliance-repackaged-2.4.0-b34.jar:/home/aims/spark/jars/apache-log4j-extras-1.2.17.jar:/home/aims/spark/jars/apacheds-i18n-2.0.0-M15.jar:/home/aims/spark/jars/apacheds-kerberos-codec-2.0.0-M15.jar:/home/aims/spark/jars/api-asn1-api-1.0.0-M20.jar:/home/aims/spark/jars/api-util-1.0.0-M20.jar:/home/aims/spark/jars/arpack_combined_all-0.1.jar:/home/aims/spark/jars/avro-1.7.7.jar:/home/aims/spark/jars/avro-ipc-1.7.7.jar:/home/aims/spark/jars/avro-mapred-1.7.7-hadoop2.jar:/home/aims/spark/jars/base64-2.3.8.jar:/home/aims/spark/jars/bcprov-jdk15on-1.51.jar:/home/aims/spark/jars/bonecp-0.8.0.RELEASE.jar:/home/aims/spark/jars/breeze-macros_2.11-0.12.jar:/home/aims/spark/jars/breeze_2.11-0.12.jar:/home/aims/spark/jars/calcite-avatica-1.2.0-incubating.jar:/home/aims/spark/jars/calcite-core-1.2.0-incubating.jar:/home/aims/spark/jars/calcite-linq4j-1.2.0-incubating.jar:/home/aims/spark/jars/chill-java-0.8.0.jar:/home/aims/spark/jars/chill_2.11-0.8.0.jar:/home/aims/spark/jars/commons-beanutils-1.7.0.jar:/home/aims/spark/jars/commons-beanutils-core-1.8.0.jar:/home/aims/spark/jars/commons-cli-1.2.jar:/home/aims/spark/jars/commons-codec-1.10.jar:/home/aims/spark/jars/commons-collections-3.2.2.jar:/home/aims/spark/jars/commons-compiler-3.0.0.jar:/home/aims/spark/jars/commons-compress-1.4.1.jar:/home/aims/spark/jars/commons-configuration-1.6.jar:/home/aims/spark/jars/commons-crypto-1.0.0.jar:/home/aims/spark/jars/commons-dbcp-1.4.jar:/home/aims/spark/jars/commons-digester-1.8.jar:/home/aims/spark/jars/commons-httpclient-3.1.jar:/home/aims/spark/jars/commons-io-2.4.jar:/home/aims/spark/jars/commons-lang-2.6.jar:/home/aims/spark/jars/commons-lang3-3.5.jar:/home/aims/spark/jars/commons-logging-1.1.3.jar:/home/aims/spark/jars/commons-math3-3.4.1.jar:/home/aims/spark/jars/commons-net-2.2.jar:/home/aims/spark/jars/commons-pool-1.5.4.jar:/home/aims/spark/jars/compress-lzf-1.0.3.jar:/home/aims/spark/jars/core-1.1.2.jar:/home/aims/spark/jars/curator-client-2.6.0.jar:/home/aims/spark/jars/curator-framework-2.6.0.jar:/home/aims/spark/jars/curator-recipes-2.6.0.jar:/home/aims/spark/jars/datanucleus-api-jdo-3.2.6.jar:/home/aims/spark/jars/datanucleus-core-3.2.10.jar:/home/aims/spark/jars/datanucleus-rdbms-3.2.9.jar:/home/aims/spark/jars/derby-10.12.1.1.jar:/home/aims/spark/jars/eigenbase-properties-1.1.5.jar:/home/aims/spark/jars/gson-2.2.4.jar:/home/aims/spark/jars/guava-14.0.1.jar:/home/aims/spark/jars/guice-3.0.jar:/home/aims/spark/jars/guice-servlet-3.0.jar:/home/aims/spark/jars/hadoop-annotations-2.7.3.jar:/home/aims/spark/jars/hadoop-auth-2.7.3.jar:/home/aims/spark/jars/hadoop-client-2.7.3.jar:/home/aims/spark/jars/hadoop-common-2.7.3.jar:/home/aims/spark/jars/hadoop-hdfs-2.7.3.jar:/home/aims/spark/jars/hadoop-mapreduce-client-app-2.7.3.jar:/home/aims/spark/jars/hadoop-mapreduce-client-common-2.7.3.jar:/home/aims/spark/jars/hadoop-mapreduce-client-core-2.7.3.jar:/home/aims/spark/jars/hadoop-mapreduce-client-jobclient-2.7.3.jar:/home/aims/spark/jars/hadoop-mapreduce-client-shuffle-2.7.3.jar:/home/aims/spark/jars/hadoop-yarn-api-2.7.3.jar:/home/aims/spark/jars/hadoop-yarn-client-2.7.3.jar:/home/aims/spark/jars/hadoop-yarn-common-2.7.3.jar:/home/aims/spark/jars/hadoop-yarn-server-common-2.7.3.jar:/home/aims/spark/jars/hadoop-yarn-server-web-proxy-2.7.3.jar:/home/aims/spark/jars/hive-beeline-1.2.1.spark2.jar:/home/aims/spark/jars/hive-cli-1.2.1.spark2.jar:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar:/home/aims/spark/jars/hive-jdbc-1.2.1.spark2.jar:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar:/home/aims/spark/jars/hk2-api-2.4.0-b34.jar:/home/aims/spark/jars/hk2-locator-2.4.0-b34.jar:/home/aims/spark/jars/hk2-utils-2.4.0-b34.jar:/home/aims/spark/jars/htrace-core-3.1.0-incubating.jar:/home/aims/spark/jars/httpclient-4.5.2.jar:/home/aims/spark/jars/httpcore-4.4.4.jar:/home/aims/spark/jars/ivy-2.4.0.jar:/home/aims/spark/jars/jackson-annotations-2.6.5.jar:/home/aims/spark/jars/jackson-core-2.6.5.jar:/home/aims/spark/jars/jackson-core-asl-1.9.13.jar:/home/aims/spark/jars/jackson-databind-2.6.5.jar:/home/aims/spark/jars/jackson-jaxrs-1.9.13.jar:/home/aims/spark/jars/jackson-mapper-asl-1.9.13.jar:/home/aims/spark/jars/jackson-module-paranamer-2.6.5.jar:/home/aims/spark/jars/jackson-module-scala_2.11-2.6.5.jar:/home/aims/spark/jars/jackson-xc-1.9.13.jar:/home/aims/spark/jars/janino-3.0.0.jar:/home/aims/spark/jars/java-xmlbuilder-1.0.jar:/home/aims/spark/jars/JavaEWAH-0.3.2.jar:/home/aims/spark/jars/javassist-3.18.1-GA.jar:/home/aims/spark/jars/javax.annotation-api-1.2.jar:/home/aims/spark/jars/javax.inject-1.jar:/home/aims/spark/jars/javax.inject-2.4.0-b34.jar:/home/aims/spark/jars/javax.servlet-api-3.1.0.jar:/home/aims/spark/jars/javax.ws.rs-api-2.0.1.jar:/home/aims/spark/jars/javolution-5.5.1.jar:/home/aims/spark/jars/jaxb-api-2.2.2.jar:/home/aims/spark/jars/jcl-over-slf4j-1.7.16.jar:/home/aims/spark/jars/jdo-api-3.0.1.jar:/home/aims/spark/jars/jersey-client-2.22.2.jar:/home/aims/spark/jars/jersey-common-2.22.2.jar:/home/aims/spark/jars/jersey-container-servlet-2.22.2.jar:/home/aims/spark/jars/jersey-container-servlet-core-2.22.2.jar:/home/aims/spark/jars/jersey-guava-2.22.2.jar:/home/aims/spark/jars/jersey-media-jaxb-2.22.2.jar:/home/aims/spark/jars/jersey-server-2.22.2.jar:/home/aims/spark/jars/jets3t-0.9.3.jar:/home/aims/spark/jars/jetty-6.1.26.jar:/home/aims/spark/jars/jetty-util-6.1.26.jar:/home/aims/spark/jars/jline-2.12.1.jar:/home/aims/spark/jars/joda-time-2.9.3.jar:/home/aims/spark/jars/jodd-core-3.5.2.jar:/home/aims/spark/jars/jpam-1.1.jar:/home/aims/spark/jars/json4s-ast_2.11-3.2.11.jar:/home/aims/spark/jars/json4s-core_2.11-3.2.11.jar:/home/aims/spark/jars/json4s-jackson_2.11-3.2.11.jar:/home/aims/spark/jars/jsp-api-2.1.jar:/home/aims/spark/jars/jsr305-1.3.9.jar:/home/aims/spark/jars/jta-1.1.jar:/home/aims/spark/jars/jtransforms-2.4.0.jar:/home/aims/spark/jars/jul-to-slf4j-1.7.16.jar:/home/aims/spark/jars/kryo-shaded-3.0.3.jar:/home/aims/spark/jars/leveldbjni-all-1.8.jar:/home/aims/spark/jars/libfb303-0.9.2.jar:/home/aims/spark/jars/libthrift-0.9.2.jar:/home/aims/spark/jars/log4j-1.2.17.jar:/home/aims/spark/jars/lz4-1.3.0.jar:/home/aims/spark/jars/mail-1.4.7.jar:/home/aims/spark/jars/mesos-1.0.0-shaded-protobuf.jar:/home/aims/spark/jars/metrics-core-3.1.2.jar:/home/aims/spark/jars/metrics-graphite-3.1.2.jar:/home/aims/spark/jars/metrics-json-3.1.2.jar:/home/aims/spark/jars/metrics-jvm-3.1.2.jar:/home/aims/spark/jars/minlog-1.3.0.jar:/home/aims/spark/jars/mx4j-3.0.2.jar:/home/aims/spark/jars/netty-3.8.0.Final.jar:/home/aims/spark/jars/netty-all-4.0.42.Final.jar:/home/aims/spark/jars/objenesis-2.1.jar:/home/aims/spark/jars/opencsv-2.3.jar:/home/aims/spark/jars/oro-2.0.8.jar:/home/aims/spark/jars/osgi-resource-locator-1.0.1.jar:/home/aims/spark/jars/paranamer-2.3.jar:/home/aims/spark/jars/parquet-column-1.8.1.jar:/home/aims/spark/jars/parquet-common-1.8.1.jar:/home/aims/spark/jars/parquet-encoding-1.8.1.jar:/home/aims/spark/jars/parquet-format-2.3.0-incubating.jar:/home/aims/spark/jars/parquet-hadoop-1.8.1.jar:/home/aims/spark/jars/parquet-hadoop-bundle-1.6.0.jar:/home/aims/spark/jars/parquet-jackson-1.8.1.jar:/home/aims/spark/jars/pmml-model-1.2.15.jar:/home/aims/spark/jars/pmml-schema-1.2.15.jar:/home/aims/spark/jars/protobuf-java-2.5.0.jar:/home/aims/spark/jars/py4j-0.10.4.jar:/home/aims/spark/jars/pyrolite-4.13.jar:/home/aims/spark/jars/RoaringBitmap-0.5.11.jar:/home/aims/spark/jars/scala-compiler-2.11.8.jar:/home/aims/spark/jars/scala-library-2.11.8.jar:/home/aims/spark/jars/scala-parser-combinators_2.11-1.0.4.jar:/home/aims/spark/jars/scala-reflect-2.11.8.jar:/home/aims/spark/jars/scala-xml_2.11-1.0.2.jar:/home/aims/spark/jars/scalap-2.11.8.jar:/home/aims/spark/jars/shapeless_2.11-2.0.0.jar:/home/aims/spark/jars/slf4j-api-1.7.16.jar:/home/aims/spark/jars/slf4j-log4j12-1.7.16.jar:/home/aims/spark/jars/snappy-0.2.jar:/home/aims/spark/jars/snappy-java-1.1.2.6.jar:/home/aims/spark/jars/spark-catalyst_2.11-2.1.0.jar:/home/aims/spark/jars/spark-core_2.11-2.1.0.jar:/home/aims/spark/jars/spark-graphx_2.11-2.1.0.jar:/home/aims/spark/jars/spark-hive-thriftserver_2.11-2.1.0.jar:/home/aims/spark/jars/spark-hive_2.11-2.1.0.jar:/home/aims/spark/jars/spark-launcher_2.11-2.1.0.jar:/home/aims/spark/jars/spark-mesos_2.11-2.1.0.jar:/home/aims/spark/jars/spark-mllib-local_2.11-2.1.0.jar:/home/aims/spark/jars/spark-mllib_2.11-2.1.0.jar:/home/aims/spark/jars/spark-network-common_2.11-2.1.0.jar:/home/aims/spark/jars/spark-network-shuffle_2.11-2.1.0.jar:/home/aims/spark/jars/spark-repl_2.11-2.1.0.jar:/home/aims/spark/jars/spark-sketch_2.11-2.1.0.jar:/home/aims/spark/jars/spark-sql_2.11-2.1.0.jar:/home/aims/spark/jars/spark-streaming_2.11-2.1.0.jar:/home/aims/spark/jars/spark-tags_2.11-2.1.0.jar:/home/aims/spark/jars/spark-unsafe_2.11-2.1.0.jar:/home/aims/spark/jars/spark-yarn_2.11-2.1.0.jar:/home/aims/spark/jars/spire-macros_2.11-0.7.4.jar:/home/aims/spark/jars/spire_2.11-0.7.4.jar:/home/aims/spark/jars/ST4-4.0.4.jar:/home/aims/spark/jars/stax-api-1.0-2.jar:/home/aims/spark/jars/stax-api-1.0.1.jar:/home/aims/spark/jars/stream-2.7.0.jar:/home/aims/spark/jars/stringtemplate-3.2.1.jar:/home/aims/spark/jars/super-csv-2.2.0.jar:/home/aims/spark/jars/univocity-parsers-2.2.1.jar:/home/aims/spark/jars/validation-api-1.1.0.Final.jar:/home/aims/spark/jars/xbean-asm5-shaded-4.4.jar:/home/aims/spark/jars/xercesImpl-2.9.1.jar:/home/aims/spark/jars/xmlenc-0.52.jar:/home/aims/spark/jars/xz-1.0.jar:/home/aims/spark/jars/zookeeper-3.4.6.jar:/home/aims/workspace/sparkhive/target/classes:/home/aims/.m2/repository/junit/junit/3.8.1/junit-3.8.1.jar:/home/aims/Downloads/stanford-corenlp-full-2016-10-31/ejml-0.23.jar:/home/aims/Downloads/stanford-corenlp-full-2016-10-31/javax.json.jar:/home/aims/Downloads/stanford-corenlp-full-2016-10-31/joda-time.jar:/home/aims/Downloads/stanford-corenlp-full-2016-10-31/jollyday.jar:/home/aims/Downloads/stanford-corenlp-full-2016-10-31/protobuf.jar:/home/aims/Downloads/stanford-corenlp-full-2016-10-31/slf4j-api.jar:/home/aims/Downloads/stanford-corenlp-full-2016-10-31/slf4j-simple.jar:/home/aims/Downloads/stanford-corenlp-full-2016-10-31/stanford-corenlp-3.7.0.jar:/home/aims/Downloads/stanford-corenlp-full-2016-10-31/xom.jar:/home/aims/idea-IU-163.12024.16/lib/idea_rt.jar com.intellij.rt.execution.application.AppMain spark.sparkhive.queryhive
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/home/aims/SentimentAnalysis/target/sentiment-analysis-1.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/aims/spark/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/aims/Downloads/stanford-corenlp-full-2016-10-31/slf4j-simple.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [ch.qos.logback.classic.util.ContextSelectorStaticBinder]
17:13:56.292 [main] DEBUG org.apache.hadoop.util.Shell - Failed to detect a valid hadoop home directory
java.io.IOException: HADOOP_HOME or hadoop.home.dir are not set.
at org.apache.hadoop.util.Shell.checkHadoopHome(Shell.java:326) [hadoop-common-2.7.3.jar:na]
at org.apache.hadoop.util.Shell.<clinit>(Shell.java:351) [hadoop-common-2.7.3.jar:na]
at org.apache.hadoop.hive.conf.HiveConf$ConfVars.findHadoopBinary(HiveConf.java:2327) [hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
at org.apache.hadoop.hive.conf.HiveConf$ConfVars.<clinit>(HiveConf.java:365) [hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
at org.apache.hadoop.hive.conf.HiveConf.<clinit>(HiveConf.java:105) [hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
at java.lang.Class.forName0(Native Method) [na:1.8.0_121]
at java.lang.Class.forName(Class.java:348) [na:1.8.0_121]
at org.apache.spark.util.Utils$.classForName(Utils.scala:229) [spark-core_2.11-2.1.0.jar:2.1.0]
at org.apache.spark.sql.SparkSession$.hiveClassesArePresent(SparkSession.scala:991) [spark-sql_2.11-2.1.0.jar:2.1.0]
at org.apache.spark.sql.SparkSession$Builder.enableHiveSupport(SparkSession.scala:812) [spark-sql_2.11-2.1.0.jar:2.1.0]
at spark.sparkhive.queryhive.main(queryhive.java:36) [classes/:na]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_121]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_121]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_121]
at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_121]
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:147) [idea_rt.jar:na]
17:13:56.441 [main] DEBUG org.apache.hadoop.util.Shell - setsid exited with exit code 0
17:13:56.826 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.1.0
17:13:57.029 [main] DEBUG o.a.h.m.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginSuccess with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Rate of successful kerberos logins and latency (milliseconds)])
17:13:57.058 [main] DEBUG o.a.h.m.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginFailure with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Rate of failed kerberos logins and latency (milliseconds)])
17:13:57.059 [main] DEBUG o.a.h.m.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.getGroups with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[GetGroups])
17:13:57.061 [main] DEBUG o.a.h.m.impl.MetricsSystemImpl - UgiMetrics, User and group related metrics
17:13:57.561 [main] DEBUG o.a.h.s.a.util.KerberosName - Kerberos krb5 configuration not found, setting default realm to empty
17:13:57.582 [main] DEBUG org.apache.hadoop.security.Groups - Creating new Groups object
17:13:57.592 [main] DEBUG o.a.hadoop.util.NativeCodeLoader - Trying to load the custom-built native-hadoop library...
17:13:57.593 [main] DEBUG o.a.hadoop.util.NativeCodeLoader - Failed to load native-hadoop with error: java.lang.UnsatisfiedLinkError: no hadoop in java.library.path
17:13:57.593 [main] DEBUG o.a.hadoop.util.NativeCodeLoader - java.library.path=/home/aims/idea-IU-163.12024.16/bin::/usr/java/packages/lib/amd64:/usr/lib/x86_64-linux-gnu/jni:/lib/x86_64-linux-gnu:/usr/lib/x86_64-linux-gnu:/usr/lib/jni:/lib:/usr/lib
17:13:57.593 [main] WARN o.a.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
17:13:57.602 [main] DEBUG o.a.hadoop.util.PerformanceAdvisory - Falling back to shell based
17:13:57.602 [main] DEBUG o.a.h.s.JniBasedUnixGroupsMappingWithFallback - Group mapping impl=org.apache.hadoop.security.ShellBasedUnixGroupsMapping
17:13:57.790 [main] DEBUG org.apache.hadoop.security.Groups - Group mapping impl=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback; cacheTimeout=300000; warningDeltaMs=5000
17:13:57.800 [main] DEBUG o.a.h.security.UserGroupInformation - hadoop login
17:13:57.802 [main] DEBUG o.a.h.security.UserGroupInformation - hadoop login commit
17:13:57.823 [main] DEBUG o.a.h.security.UserGroupInformation - using local user:UnixPrincipal: aims
17:13:57.823 [main] DEBUG o.a.h.security.UserGroupInformation - Using user: "UnixPrincipal: aims" with name aims
17:13:57.824 [main] DEBUG o.a.h.security.UserGroupInformation - User entry: "aims"
17:13:57.831 [main] DEBUG o.a.h.security.UserGroupInformation - UGI loginUser:aims (auth:SIMPLE)
17:13:57.884 [main] WARN org.apache.spark.util.Utils - Your hostname, aims resolves to a loopback address: 127.0.1.1; using 10.0.0.3 instead (on interface wlp2s0)
17:13:57.885 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address
17:13:57.934 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: aims
17:13:57.935 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: aims
17:13:57.935 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to:
17:13:57.936 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to:
17:13:57.937 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(aims); groups with view permissions: Set(); users with modify permissions: Set(aims); groups with modify permissions: Set()
17:13:57.952 [main] DEBUG org.apache.spark.SecurityManager - Created SSL options for fs: SSLOptions{enabled=false, keyStore=None, keyStorePassword=None, trustStore=None, trustStorePassword=None, protocol=None, enabledAlgorithms=Set()}
17:13:58.074 [main] DEBUG i.n.u.i.l.InternalLoggerFactory - Using SLF4J as the default logging framework
17:13:58.083 [main] DEBUG i.n.util.internal.PlatformDependent0 - java.nio.Buffer.address: available
17:13:58.084 [main] DEBUG i.n.util.internal.PlatformDependent0 - sun.misc.Unsafe.theUnsafe: available
17:13:58.086 [main] DEBUG i.n.util.internal.PlatformDependent0 - sun.misc.Unsafe.copyMemory: available
17:13:58.088 [main] DEBUG i.n.util.internal.PlatformDependent0 - direct buffer constructor: available
17:13:58.090 [main] DEBUG i.n.util.internal.PlatformDependent0 - java.nio.Bits.unaligned: available, true
17:13:58.092 [main] DEBUG i.n.util.internal.PlatformDependent0 - java.nio.DirectByteBuffer.<init>(long, int): available
17:13:58.094 [main] DEBUG io.netty.util.internal.Cleaner0 - java.nio.ByteBuffer.cleaner(): available
17:13:58.097 [main] DEBUG i.n.util.internal.PlatformDependent - Java version: 8
17:13:58.098 [main] DEBUG i.n.util.internal.PlatformDependent - -Dio.netty.noUnsafe: false
17:13:58.098 [main] DEBUG i.n.util.internal.PlatformDependent - sun.misc.Unsafe: available
17:13:58.098 [main] DEBUG i.n.util.internal.PlatformDependent - -Dio.netty.noJavassist: false
17:13:58.172 [main] DEBUG i.n.util.internal.PlatformDependent - Javassist: available
17:13:58.172 [main] DEBUG i.n.util.internal.PlatformDependent - -Dio.netty.tmpdir: /tmp (java.io.tmpdir)
17:13:58.172 [main] DEBUG i.n.util.internal.PlatformDependent - -Dio.netty.bitMode: 64 (sun.arch.data.model)
17:13:58.173 [main] DEBUG i.n.util.internal.PlatformDependent - -Dio.netty.noPreferDirect: false
17:13:58.173 [main] DEBUG i.n.util.internal.PlatformDependent - io.netty.maxDirectMemory: 0 bytes
17:13:58.175 [main] DEBUG i.n.u.i.JavassistTypeParameterMatcherGenerator - Generated: io.netty.util.internal.__matchers__.org.apache.spark.network.protocol.MessageMatcher
17:13:58.181 [main] DEBUG i.n.u.i.JavassistTypeParameterMatcherGenerator - Generated: io.netty.util.internal.__matchers__.io.netty.buffer.ByteBufMatcher
17:13:58.197 [main] DEBUG i.n.c.MultithreadEventLoopGroup - -Dio.netty.eventLoopThreads: 8
17:13:58.224 [main] DEBUG io.netty.channel.nio.NioEventLoop - -Dio.netty.noKeySetOptimization: false
17:13:58.224 [main] DEBUG io.netty.channel.nio.NioEventLoop - -Dio.netty.selectorAutoRebuildThreshold: 512
17:13:58.228 [main] DEBUG i.n.util.internal.PlatformDependent - org.jctools-core.MpscChunkedArrayQueue: available
17:13:58.255 [main] DEBUG i.n.buffer.PooledByteBufAllocator - -Dio.netty.allocator.numHeapArenas: 8
17:13:58.256 [main] DEBUG i.n.buffer.PooledByteBufAllocator - -Dio.netty.allocator.numDirectArenas: 8
17:13:58.256 [main] DEBUG i.n.buffer.PooledByteBufAllocator - -Dio.netty.allocator.pageSize: 8192
17:13:58.256 [main] DEBUG i.n.buffer.PooledByteBufAllocator - -Dio.netty.allocator.maxOrder: 11
17:13:58.256 [main] DEBUG i.n.buffer.PooledByteBufAllocator - -Dio.netty.allocator.chunkSize: 16777216
17:13:58.256 [main] DEBUG i.n.buffer.PooledByteBufAllocator - -Dio.netty.allocator.tinyCacheSize: 512
17:13:58.256 [main] DEBUG i.n.buffer.PooledByteBufAllocator - -Dio.netty.allocator.smallCacheSize: 256
17:13:58.256 [main] DEBUG i.n.buffer.PooledByteBufAllocator - -Dio.netty.allocator.normalCacheSize: 64
17:13:58.256 [main] DEBUG i.n.buffer.PooledByteBufAllocator - -Dio.netty.allocator.maxCachedBufferCapacity: 32768
17:13:58.256 [main] DEBUG i.n.buffer.PooledByteBufAllocator - -Dio.netty.allocator.cacheTrimInterval: 8192
17:13:58.306 [main] DEBUG i.n.util.internal.ThreadLocalRandom - -Dio.netty.initialSeedUniquifier: 0xc1294575454af5ab (took 0 ms)
17:13:58.356 [main] DEBUG io.netty.buffer.ByteBufUtil - -Dio.netty.allocator.type: unpooled
17:13:58.357 [main] DEBUG io.netty.buffer.ByteBufUtil - -Dio.netty.threadLocalDirectBufferSize: 65536
17:13:58.357 [main] DEBUG io.netty.buffer.ByteBufUtil - -Dio.netty.maxThreadLocalCharBufferSize: 16384
17:13:58.370 [main] DEBUG io.netty.util.NetUtil - Loopback interface: lo (lo, 127.0.0.1)
17:13:58.372 [main] DEBUG io.netty.util.NetUtil - /proc/sys/net/core/somaxconn: 128
17:13:58.398 [main] DEBUG o.a.s.network.server.TransportServer - Shuffle server started on port: 44072
17:13:58.409 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 44072.
17:13:58.410 [main] DEBUG org.apache.spark.SparkEnv - Using serializer: class org.apache.spark.serializer.JavaSerializer
17:13:58.554 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker
17:13:58.558 [main] DEBUG o.a.s.MapOutputTrackerMasterEndpoint - init
17:13:58.611 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster
17:13:58.616 [main] INFO o.a.s.s.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information
17:13:58.619 [main] INFO o.a.s.s.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up
17:13:58.649 [main] INFO o.a.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-bfc86fe4-c9b6-403e-9c0e-ff6b56d94d5a
17:13:58.651 [main] DEBUG o.a.spark.storage.DiskBlockManager - Adding shutdown hook
17:13:58.653 [main] DEBUG o.a.spark.util.ShutdownHookManager - Adding shutdown hook
17:13:58.678 [main] INFO o.a.spark.storage.memory.MemoryStore - MemoryStore started with capacity 335.4 MB
17:13:58.757 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator
17:13:58.759 [main] DEBUG o.a.s.s.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint - init
17:13:58.795 [main] DEBUG org.apache.spark.SecurityManager - Created SSL options for ui: SSLOptions{enabled=false, keyStore=None, keyStorePassword=None, trustStore=None, trustStorePassword=None, protocol=None, enabledAlgorithms=Set()}
17:13:58.914 [main] DEBUG org.spark_project.jetty.util.log - Logging to Logger[org.spark_project.jetty.util.log] via org.spark_project.jetty.util.log.Slf4jLog
17:13:58.917 [main] INFO org.spark_project.jetty.util.log - Logging initialized @4111ms
17:13:58.931 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@60bdf15d{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@47da3952,AUTO}
17:13:58.935 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@47da3952 added {org.apache.spark.ui.JettyUtils$$anon$3-4fcee388@5d14a26e==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.937 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@47da3952 added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-4fcee388,POJO}
17:13:58.938 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@303e3593{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@4ef27d66,AUTO}
17:13:58.939 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@4ef27d66 added {org.apache.spark.ui.JettyUtils$$anon$3-362a019c@b61a6609==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.939 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@4ef27d66 added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-362a019c,POJO}
17:13:58.941 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@3f23a3a0{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@5ab14cb9,AUTO}
17:13:58.941 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@5ab14cb9 added {org.apache.spark.ui.JettyUtils$$anon$3-5fb97279@c0bfff91==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.942 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@5ab14cb9 added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-5fb97279,POJO}
17:13:58.942 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@439a8f59{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@61861a29,AUTO}
17:13:58.942 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@61861a29 added {org.apache.spark.ui.JettyUtils$$anon$3-31024624@a79d1f10==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.943 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@61861a29 added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-31024624,POJO}
17:13:58.956 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@593e824f{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@72ccd81a,AUTO}
17:13:58.956 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@72ccd81a added {org.apache.spark.ui.JettyUtils$$anon$3-6d8792db@770deb30==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.957 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@72ccd81a added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-6d8792db,POJO}
17:13:58.957 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@64bc21ac{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@493dfb8e,AUTO}
17:13:58.957 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@493dfb8e added {org.apache.spark.ui.JettyUtils$$anon$3-5d25e6bb@4e78aaf==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.958 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@493dfb8e added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-5d25e6bb,POJO}
17:13:58.958 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@9d157ff{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@2f162cc0,AUTO}
17:13:58.958 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@2f162cc0 added {org.apache.spark.ui.JettyUtils$$anon$3-5df417a7@5d7dd3a5==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.959 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@2f162cc0 added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-5df417a7,POJO}
17:13:58.959 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@7c041b41{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@7f69d591,AUTO}
17:13:58.959 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@7f69d591 added {org.apache.spark.ui.JettyUtils$$anon$3-61078690@df88ed83==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.960 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@7f69d591 added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-61078690,POJO}
17:13:58.960 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@403132fc{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@71c5b236,AUTO}
17:13:58.960 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@71c5b236 added {org.apache.spark.ui.JettyUtils$$anon$3-2cab9998@eaf7b57d==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.960 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@71c5b236 added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-2cab9998,POJO}
17:13:58.961 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@2f7a7219{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@669513d8,AUTO}
17:13:58.961 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@669513d8 added {org.apache.spark.ui.JettyUtils$$anon$3-3a1d593e@973e2b03==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.961 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@669513d8 added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-3a1d593e,POJO}
17:13:58.967 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@664a9613{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@5118388b,AUTO}
17:13:58.967 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@5118388b added {org.apache.spark.ui.JettyUtils$$anon$3-15a902e7@ff71f2fc==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.967 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@5118388b added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-15a902e7,POJO}
17:13:58.967 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@7876d598{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@4a3e3e8b,AUTO}
17:13:58.968 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@4a3e3e8b added {org.apache.spark.ui.JettyUtils$$anon$3-5af28b27@beb2bd33==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.968 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@4a3e3e8b added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-5af28b27,POJO}
17:13:58.968 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@4985cbcb{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@72f46e16,AUTO}
17:13:58.969 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@72f46e16 added {org.apache.spark.ui.JettyUtils$$anon$3-3c9168dc@be32e55==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.969 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@72f46e16 added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-3c9168dc,POJO}
17:13:58.969 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@332a7fce{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@549621f3,AUTO}
17:13:58.970 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@549621f3 added {org.apache.spark.ui.JettyUtils$$anon$3-54361a9@2a2ca7fb==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.970 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@549621f3 added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-54361a9,POJO}
17:13:58.974 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@293bb8a5{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@2416a51,AUTO}
17:13:58.974 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@2416a51 added {org.apache.spark.ui.JettyUtils$$anon$3-6fa590ba@26b4dc66==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.974 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@2416a51 added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-6fa590ba,POJO}
17:13:58.974 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@6e9319f{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@72e34f77,AUTO}
17:13:58.975 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@72e34f77 added {org.apache.spark.ui.JettyUtils$$anon$3-7bf9b098@c3d05377==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.975 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@72e34f77 added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-7bf9b098,POJO}
17:13:58.979 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@33617539{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@2c177f9e,AUTO}
17:13:58.979 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@2c177f9e added {org.apache.spark.ui.JettyUtils$$anon$3-5db4c359@56c11761==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.979 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@2c177f9e added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-5db4c359,POJO}
17:13:58.979 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@209775a9{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@18e7143f,AUTO}
17:13:58.979 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@18e7143f added {org.apache.spark.ui.JettyUtils$$anon$3-f9b7332@555d017e==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.980 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@18e7143f added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-f9b7332,POJO}
17:13:58.980 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@6fefce9e{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@4f8969b0,AUTO}
17:13:58.980 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@4f8969b0 added {org.apache.spark.ui.JettyUtils$$anon$3-1bdf8190@5386a2cf==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.980 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@4f8969b0 added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-1bdf8190,POJO}
17:13:58.980 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@192f2f27{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@8a589a2,AUTO}
17:13:58.980 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@8a589a2 added {org.apache.spark.ui.JettyUtils$$anon$3-c65a5ef@af24c98b==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:13:58.981 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@8a589a2 added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-c65a5ef,POJO}
17:13:58.981 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@b672aa8{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@2fab4aff,AUTO}
17:13:59.003 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@2fab4aff added {org.spark_project.jetty.servlet.DefaultServlet-a77614d@22ce533d==org.spark_project.jetty.servlet.DefaultServlet,-1,true,AUTO}
17:13:59.003 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@2fab4aff added {[/]=>org.spark_project.jetty.servlet.DefaultServlet-a77614d,POJO}
17:13:59.004 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@a1217f9{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@3bde62ff,AUTO}
17:13:59.004 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@3bde62ff added {org.apache.spark.ui.JettyUtils$$anon$4-523424b5@a5c8f39e==org.apache.spark.ui.JettyUtils$$anon$4,-1,true,AUTO}
17:13:59.004 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@3bde62ff added {[/]=>org.apache.spark.ui.JettyUtils$$anon$4-523424b5,POJO}
17:13:59.005 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@791cbf87{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@a7e2d9d,AUTO}
17:13:59.010 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@a7e2d9d added {org.glassfish.jersey.servlet.ServletContainer-7de0c6ae@83760c5d==org.glassfish.jersey.servlet.ServletContainer,-1,false,AUTO}
17:13:59.011 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@a7e2d9d added {[/*]=>org.glassfish.jersey.servlet.ServletContainer-7de0c6ae,POJO}
17:13:59.013 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@4c36250e{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@21526f6c,AUTO}
17:13:59.013 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@21526f6c added {org.apache.spark.ui.JettyUtils$$anon$4-49f5c307@75ee116==org.apache.spark.ui.JettyUtils$$anon$4,-1,true,AUTO}
17:13:59.013 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@21526f6c added {[/]=>org.apache.spark.ui.JettyUtils$$anon$4-49f5c307,POJO}
17:13:59.014 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@66ea1466{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@1601e47,AUTO}
17:13:59.015 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@1601e47 added {org.apache.spark.ui.JettyUtils$$anon$4-3bffddff@1b10e45a==org.apache.spark.ui.JettyUtils$$anon$4,-1,true,AUTO}
17:13:59.015 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@1601e47 added {[/]=>org.apache.spark.ui.JettyUtils$$anon$4-3bffddff,POJO}
17:13:59.058 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e mime types IncludeExclude@552518c3{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@1a69561c,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@59aa20b3}
17:13:59.059 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e added {o.s.j.s.ServletContextHandler@60bdf15d{/jobs,null,null},AUTO}
17:13:59.060 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148 mime types IncludeExclude@4b21844c{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@1b28f282,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@138fe6ec}
17:13:59.061 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148 added {o.s.j.s.ServletContextHandler@303e3593{/jobs/json,null,null},AUTO}
17:13:59.061 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4 mime types IncludeExclude@19b30c92{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@455351c4,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@29876704}
17:13:59.061 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4 added {o.s.j.s.ServletContextHandler@3f23a3a0{/jobs/job,null,null},AUTO}
17:13:59.063 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c mime types IncludeExclude@16423501{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@4efcf8a,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@7a138fc5}
17:13:59.063 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c added {o.s.j.s.ServletContextHandler@439a8f59{/jobs/job/json,null,null},AUTO}
17:13:59.064 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b mime types IncludeExclude@307765b4{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@4a9e6faf,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@2c95ac9e}
17:13:59.064 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b added {o.s.j.s.ServletContextHandler@593e824f{/stages,null,null},AUTO}
17:13:59.064 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b mime types IncludeExclude@459f7aa3{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@7cc586a8,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@7db534f2}
17:13:59.065 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b added {o.s.j.s.ServletContextHandler@64bc21ac{/stages/json,null,null},AUTO}
17:13:59.065 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b mime types IncludeExclude@7a56812e{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@2a76b80a,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@7eb01b12}
17:13:59.066 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b added {o.s.j.s.ServletContextHandler@9d157ff{/stages/stage,null,null},AUTO}
17:13:59.067 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39 mime types IncludeExclude@e6516e{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@6de54b40,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@43ed0ff3}
17:13:59.067 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39 added {o.s.j.s.ServletContextHandler@7c041b41{/stages/stage/json,null,null},AUTO}
17:13:59.068 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2 mime types IncludeExclude@a50b09c{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@4da855dd,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@6691490c}
17:13:59.068 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2 added {o.s.j.s.ServletContextHandler@403132fc{/stages/pool,null,null},AUTO}
17:13:59.068 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7 mime types IncludeExclude@2e5c7f0b{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@21d5c1a0,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@4de025bf}
17:13:59.068 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7 added {o.s.j.s.ServletContextHandler@2f7a7219{/stages/pool/json,null,null},AUTO}
17:13:59.069 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3 mime types IncludeExclude@1eef9aef{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@11389053,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@5db99216}
17:13:59.069 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3 added {o.s.j.s.ServletContextHandler@664a9613{/storage,null,null},AUTO}
17:13:59.069 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999 mime types IncludeExclude@5c1bd44c{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@9f46d94,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@18cc679e}
17:13:59.070 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999 added {o.s.j.s.ServletContextHandler@7876d598{/storage/json,null,null},AUTO}
17:13:59.070 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf mime types IncludeExclude@2c4ca0f9{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@67ef029,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@7df587ef}
17:13:59.070 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf added {o.s.j.s.ServletContextHandler@4985cbcb{/storage/rdd,null,null},AUTO}
17:13:59.071 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e mime types IncludeExclude@2755d705{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@56db847e,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@740abb5}
17:13:59.071 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e added {o.s.j.s.ServletContextHandler@332a7fce{/storage/rdd/json,null,null},AUTO}
17:13:59.071 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a mime types IncludeExclude@5fe8b721{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@551a20d6,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@578524c3}
17:13:59.071 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a added {o.s.j.s.ServletContextHandler@293bb8a5{/environment,null,null},AUTO}
17:13:59.072 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546 mime types IncludeExclude@7e094740{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@7a11c4c7,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@4cc547a}
17:13:59.072 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546 added {o.s.j.s.ServletContextHandler@6e9319f{/environment/json,null,null},AUTO}
17:13:59.072 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920 mime types IncludeExclude@4152d38d{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@3591009c,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@5398edd0}
17:13:59.072 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920 added {o.s.j.s.ServletContextHandler@33617539{/executors,null,null},AUTO}
17:13:59.073 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a mime types IncludeExclude@5cc5b667{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@61edc883,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@758f4f03}
17:13:59.073 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a added {o.s.j.s.ServletContextHandler@209775a9{/executors/json,null,null},AUTO}
17:13:59.073 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a mime types IncludeExclude@6928f576{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@660e9100,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@69f63d95}
17:13:59.073 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a added {o.s.j.s.ServletContextHandler@6fefce9e{/executors/threadDump,null,null},AUTO}
17:13:59.074 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff mime types IncludeExclude@27e0f2f5{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@3574e198,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@6db66836}
17:13:59.074 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff added {o.s.j.s.ServletContextHandler@192f2f27{/executors/threadDump/json,null,null},AUTO}
17:13:59.074 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2 mime types IncludeExclude@2de366bb{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@3f093abe,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@61a002b1}
17:13:59.074 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2 added {o.s.j.s.ServletContextHandler@b672aa8{/static,null,null},AUTO}
17:13:59.075 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d mime types IncludeExclude@780ec4a5{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@e24ddd0,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@6f70f32f}
17:13:59.075 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d added {o.s.j.s.ServletContextHandler@a1217f9{/,null,null},AUTO}
17:13:59.076 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1 mime types IncludeExclude@5aabbb29{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@72c927f1,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@1ac85b0c}
17:13:59.076 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1 added {o.s.j.s.ServletContextHandler@791cbf87{/api,null,null},AUTO}
17:13:59.077 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a mime types IncludeExclude@3aa3193a{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@1ee4730,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@59a67c3a}
17:13:59.077 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a added {o.s.j.s.ServletContextHandler@4c36250e{/jobs/job/kill,null,null},AUTO}
17:13:59.078 [main] DEBUG o.s.jetty.servlets.gzip.GzipHandler - org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b mime types IncludeExclude@724bade8{i=[],ip=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@16fb356,e=[image/ief, image/vnd.wap.wbmp, image/jpeg, application/bzip2, image/x-portable-graymap, image/gif, image/x-icon, audio/midi, video/x-msvideo, image/x-xbitmap, application/x-rar-compressed, image/x-portable-bitmap, image/x-rgb, image/x-cmu-raster, application/gzip, audio/x-wav, audio/basic, audio/x-pn-realaudio, application/compress, audio/x-aiff, video/x.ms.asx, video/x.ms.asf, image/png, video/vnd.rn-realvideo, image/x-xwindowdump, video/x-sgi-movie, audio/mpeg, video/mpeg, image/x-portable-pixmap, image/tiff, image/x-portable-anymap, image/x-xpixmap, application/zip, video/quicktime],ep=org.spark_project.jetty.util.IncludeExclude$SetContainsPredicate@6bc248ed}
17:13:59.078 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b added {o.s.j.s.ServletContextHandler@66ea1466{/stages/stage/kill,null,null},AUTO}
17:13:59.091 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.Server@514eedd8 added {SparkUI{STOPPED,8<=0<=200,i=0,q=0},AUTO}
17:13:59.107 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - HttpConnectionFactory@5910de75{HTTP/1.1} added {HttpConfiguration@4108fa66{32768/8192,8192/8192,https://:0,[]},POJO}
17:13:59.111 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - ServerConnector@1b5bc39d{null}{0.0.0.0:0} added {org.spark_project.jetty.server.Server@514eedd8,UNMANAGED}
17:13:59.111 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - ServerConnector@1b5bc39d{null}{0.0.0.0:0} added {SparkUI{STOPPED,8<=0<=200,i=0,q=0},AUTO}
17:13:59.112 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - ServerConnector@1b5bc39d{null}{0.0.0.0:0} added {org.spark_project.jetty.util.thread.ScheduledExecutorScheduler@655a5d9c,AUTO}
17:13:59.112 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - ServerConnector@1b5bc39d{null}{0.0.0.0:0} added {org.spark_project.jetty.io.ArrayByteBufferPool@1494b84d,POJO}
17:13:59.112 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - ServerConnector@1b5bc39d{null}{0.0.0.0:0} added {HttpConnectionFactory@5910de75{HTTP/1.1},AUTO}
17:13:59.115 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - ServerConnector@1b5bc39d{HTTP/1.1}{0.0.0.0:0} added {org.spark_project.jetty.server.ServerConnector$ServerConnectorManager@1df98368,MANAGED}
17:13:59.117 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e] added {org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e,AUTO}
17:13:59.117 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148] added {org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148,AUTO}
17:13:59.117 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4] added {org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4,AUTO}
17:13:59.117 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c] added {org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c,AUTO}
17:13:59.117 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b] added {org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b,AUTO}
17:13:59.118 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b] added {org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b,AUTO}
17:13:59.118 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b] added {org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b,AUTO}
17:13:59.118 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39] added {org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39,AUTO}
17:13:59.118 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2] added {org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2,AUTO}
17:13:59.118 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7] added {org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7,AUTO}
17:13:59.119 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3] added {org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3,AUTO}
17:13:59.119 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999] added {org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999,AUTO}
17:13:59.119 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf] added {org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf,AUTO}
17:13:59.119 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e] added {org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e,AUTO}
17:13:59.119 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a] added {org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a,AUTO}
17:13:59.120 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546] added {org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546,AUTO}
17:13:59.120 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920] added {org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920,AUTO}
17:13:59.120 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a] added {org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a,AUTO}
17:13:59.120 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a] added {org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a,AUTO}
17:13:59.121 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a, org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff] added {org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff,AUTO}
17:13:59.121 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a, org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff, org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2] added {org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2,AUTO}
17:13:59.121 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a, org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff, org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2, org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d] added {org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d,AUTO}
17:13:59.121 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a, org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff, org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2, org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d, org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1] added {org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1,AUTO}
17:13:59.122 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a, org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff, org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2, org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d, org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1, org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a] added {org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a,AUTO}
17:13:59.122 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a, org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff, org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2, org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d, org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1, org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a, org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b] added {org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b,AUTO}
17:13:59.124 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.Server@514eedd8 added {ServerConnector@1b5bc39d{HTTP/1.1}{0.0.0.0:4040},AUTO}
17:13:59.126 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.Server@514eedd8 added {org.spark_project.jetty.server.handler.ErrorHandler@641856,AUTO}
17:13:59.126 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.Server@514eedd8 added {org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a, org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff, org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2, org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d, org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1, org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a, org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b],AUTO}
17:13:59.126 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.server.Server@514eedd8
17:13:59.130 [main] INFO o.spark_project.jetty.server.Server - jetty-9.2.z-SNAPSHOT
17:13:59.145 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.server.Server@514eedd8
17:13:59.145 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting SparkUI{STOPPED,8<=0<=200,i=0,q=0}
17:13:59.149 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4343ms SparkUI{STARTED,8<=8<=200,i=8,q=0}
17:13:59.149 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.server.handler.ErrorHandler@641856
17:13:59.149 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.server.handler.ErrorHandler@641856
17:13:59.149 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4344ms org.spark_project.jetty.server.handler.ErrorHandler@641856
17:13:59.149 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a, org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff, org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2, org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d, org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1, org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a, org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b]
17:13:59.152 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - ->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d,[o.s.j.s.ServletContextHandler@a1217f9{/,null,null}]}]
17:13:59.152 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/rdd->[{org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf,[o.s.j.s.ServletContextHandler@4985cbcb{/storage/rdd,null,null}]}]
17:13:59.152 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage->[{org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3,[o.s.j.s.ServletContextHandler@664a9613{/storage,null,null}]}]
17:13:59.152 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/rdd/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e,[o.s.j.s.ServletContextHandler@332a7fce{/storage/rdd/json,null,null}]}]
17:13:59.152 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - api->[{org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1,[o.s.j.s.ServletContextHandler@791cbf87{/api,null,null}]}]
17:13:59.153 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/pool/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7,[o.s.j.s.ServletContextHandler@2f7a7219{/stages/pool/json,null,null}]}]
17:13:59.153 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/pool->[{org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2,[o.s.j.s.ServletContextHandler@403132fc{/stages/pool,null,null}]}]
17:13:59.153 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148,[o.s.j.s.ServletContextHandler@303e3593{/jobs/json,null,null}]}]
17:13:59.153 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - static->[{org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2,[o.s.j.s.ServletContextHandler@b672aa8{/static,null,null}]}]
17:13:59.153 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a,[o.s.j.s.ServletContextHandler@209775a9{/executors/json,null,null}]}]
17:13:59.154 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39,[o.s.j.s.ServletContextHandler@7c041b41{/stages/stage/json,null,null}]}]
17:13:59.154 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/threadDump/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff,[o.s.j.s.ServletContextHandler@192f2f27{/executors/threadDump/json,null,null}]}]
17:13:59.154 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - environment/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546,[o.s.j.s.ServletContextHandler@6e9319f{/environment/json,null,null}]}]
17:13:59.154 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c,[o.s.j.s.ServletContextHandler@439a8f59{/jobs/job/json,null,null}]}]
17:13:59.154 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs->[{org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e,[o.s.j.s.ServletContextHandler@60bdf15d{/jobs,null,null}]}]
17:13:59.154 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b,[o.s.j.s.ServletContextHandler@64bc21ac{/stages/json,null,null}]}]
17:13:59.154 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage->[{org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b,[o.s.j.s.ServletContextHandler@9d157ff{/stages/stage,null,null}]}]
17:13:59.155 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999,[o.s.j.s.ServletContextHandler@7876d598{/storage/json,null,null}]}]
17:13:59.155 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage/kill->[{org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b,[o.s.j.s.ServletContextHandler@66ea1466{/stages/stage/kill,null,null}]}]
17:13:59.155 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job->[{org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4,[o.s.j.s.ServletContextHandler@3f23a3a0{/jobs/job,null,null}]}]
17:13:59.155 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - environment->[{org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a,[o.s.j.s.ServletContextHandler@293bb8a5{/environment,null,null}]}]
17:13:59.155 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages->[{org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b,[o.s.j.s.ServletContextHandler@593e824f{/stages,null,null}]}]
17:13:59.155 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors->[{org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920,[o.s.j.s.ServletContextHandler@33617539{/executors,null,null}]}]
17:13:59.156 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job/kill->[{org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a,[o.s.j.s.ServletContextHandler@4c36250e{/jobs/job/kill,null,null}]}]
17:13:59.156 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/threadDump->[{org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a,[o.s.j.s.ServletContextHandler@6fefce9e{/executors/threadDump,null,null}]}]
17:13:59.156 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a, org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff, org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2, org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d, org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1, org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a, org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b]
17:13:59.156 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e
17:13:59.156 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e
17:13:59.156 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@60bdf15d{/jobs,null,null}
17:13:59.158 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@60bdf15d{/jobs,null,STARTING}
17:13:59.158 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@47da3952
17:13:59.163 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-4fcee388 from default=false
17:13:59.165 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.165 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.165 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.165 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-4fcee388@5d14a26e==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.165 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-4fcee388=org.apache.spark.ui.JettyUtils$$anon$3-4fcee388@5d14a26e==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.165 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@47da3952
17:13:59.166 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4360ms org.spark_project.jetty.servlet.ServletHandler@47da3952
17:13:59.166 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-4fcee388@5d14a26e==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.170 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4364ms org.apache.spark.ui.JettyUtils$$anon$3-4fcee388@5d14a26e==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.173 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@5c77053b for org.apache.spark.ui.JettyUtils$$anon$3-4fcee388
17:13:59.173 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@60bdf15d{/jobs,null,AVAILABLE}
17:13:59.175 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4369ms o.s.j.s.ServletContextHandler@60bdf15d{/jobs,null,AVAILABLE}
17:13:59.175 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4369ms org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e
17:13:59.175 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148
17:13:59.175 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148
17:13:59.175 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@303e3593{/jobs/json,null,null}
17:13:59.175 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@303e3593{/jobs/json,null,STARTING}
17:13:59.175 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@4ef27d66
17:13:59.175 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-362a019c from default=false
17:13:59.175 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.175 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.175 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.175 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-362a019c@b61a6609==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.176 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-362a019c=org.apache.spark.ui.JettyUtils$$anon$3-362a019c@b61a6609==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.176 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@4ef27d66
17:13:59.176 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4370ms org.spark_project.jetty.servlet.ServletHandler@4ef27d66
17:13:59.176 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-362a019c@b61a6609==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.176 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4370ms org.apache.spark.ui.JettyUtils$$anon$3-362a019c@b61a6609==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.176 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@287f94b1 for org.apache.spark.ui.JettyUtils$$anon$3-362a019c
17:13:59.176 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@303e3593{/jobs/json,null,AVAILABLE}
17:13:59.176 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4371ms o.s.j.s.ServletContextHandler@303e3593{/jobs/json,null,AVAILABLE}
17:13:59.176 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4371ms org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148
17:13:59.176 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4
17:13:59.176 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4
17:13:59.177 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@3f23a3a0{/jobs/job,null,null}
17:13:59.177 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@3f23a3a0{/jobs/job,null,STARTING}
17:13:59.177 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@5ab14cb9
17:13:59.177 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-5fb97279 from default=false
17:13:59.177 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.177 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.177 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.177 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-5fb97279@c0bfff91==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.177 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-5fb97279=org.apache.spark.ui.JettyUtils$$anon$3-5fb97279@c0bfff91==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.177 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@5ab14cb9
17:13:59.177 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4372ms org.spark_project.jetty.servlet.ServletHandler@5ab14cb9
17:13:59.178 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-5fb97279@c0bfff91==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.178 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4372ms org.apache.spark.ui.JettyUtils$$anon$3-5fb97279@c0bfff91==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.178 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@5489c777 for org.apache.spark.ui.JettyUtils$$anon$3-5fb97279
17:13:59.178 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3f23a3a0{/jobs/job,null,AVAILABLE}
17:13:59.178 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4372ms o.s.j.s.ServletContextHandler@3f23a3a0{/jobs/job,null,AVAILABLE}
17:13:59.178 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4373ms org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4
17:13:59.178 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c
17:13:59.178 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c
17:13:59.178 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@439a8f59{/jobs/job/json,null,null}
17:13:59.179 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@439a8f59{/jobs/job/json,null,STARTING}
17:13:59.179 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@61861a29
17:13:59.179 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-31024624 from default=false
17:13:59.179 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.179 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.179 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.179 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-31024624@a79d1f10==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.179 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-31024624=org.apache.spark.ui.JettyUtils$$anon$3-31024624@a79d1f10==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.179 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@61861a29
17:13:59.180 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4375ms org.spark_project.jetty.servlet.ServletHandler@61861a29
17:13:59.180 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-31024624@a79d1f10==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.181 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4375ms org.apache.spark.ui.JettyUtils$$anon$3-31024624@a79d1f10==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.181 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@48f5bde6 for org.apache.spark.ui.JettyUtils$$anon$3-31024624
17:13:59.181 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@439a8f59{/jobs/job/json,null,AVAILABLE}
17:13:59.181 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4375ms o.s.j.s.ServletContextHandler@439a8f59{/jobs/job/json,null,AVAILABLE}
17:13:59.181 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4375ms org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c
17:13:59.181 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b
17:13:59.181 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b
17:13:59.181 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@593e824f{/stages,null,null}
17:13:59.181 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@593e824f{/stages,null,STARTING}
17:13:59.181 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@72ccd81a
17:13:59.182 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-6d8792db from default=false
17:13:59.182 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.182 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.182 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.184 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-6d8792db@770deb30==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.185 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-6d8792db=org.apache.spark.ui.JettyUtils$$anon$3-6d8792db@770deb30==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.185 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@72ccd81a
17:13:59.185 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4380ms org.spark_project.jetty.servlet.ServletHandler@72ccd81a
17:13:59.185 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-6d8792db@770deb30==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.187 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4382ms org.apache.spark.ui.JettyUtils$$anon$3-6d8792db@770deb30==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.188 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@5149f008 for org.apache.spark.ui.JettyUtils$$anon$3-6d8792db
17:13:59.188 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@593e824f{/stages,null,AVAILABLE}
17:13:59.188 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4382ms o.s.j.s.ServletContextHandler@593e824f{/stages,null,AVAILABLE}
17:13:59.188 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4382ms org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b
17:13:59.188 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b
17:13:59.188 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b
17:13:59.188 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@64bc21ac{/stages/json,null,null}
17:13:59.188 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@64bc21ac{/stages/json,null,STARTING}
17:13:59.188 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@493dfb8e
17:13:59.189 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-5d25e6bb from default=false
17:13:59.189 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.191 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.191 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.191 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-5d25e6bb@4e78aaf==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.191 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-5d25e6bb=org.apache.spark.ui.JettyUtils$$anon$3-5d25e6bb@4e78aaf==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.191 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@493dfb8e
17:13:59.191 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4386ms org.spark_project.jetty.servlet.ServletHandler@493dfb8e
17:13:59.191 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-5d25e6bb@4e78aaf==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.191 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4386ms org.apache.spark.ui.JettyUtils$$anon$3-5d25e6bb@4e78aaf==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.191 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@158d255c for org.apache.spark.ui.JettyUtils$$anon$3-5d25e6bb
17:13:59.192 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@64bc21ac{/stages/json,null,AVAILABLE}
17:13:59.192 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4386ms o.s.j.s.ServletContextHandler@64bc21ac{/stages/json,null,AVAILABLE}
17:13:59.192 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4386ms org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b
17:13:59.192 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b
17:13:59.192 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b
17:13:59.192 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@9d157ff{/stages/stage,null,null}
17:13:59.192 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@9d157ff{/stages/stage,null,STARTING}
17:13:59.192 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@2f162cc0
17:13:59.193 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-5df417a7 from default=false
17:13:59.195 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.195 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.195 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.196 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-5df417a7@5d7dd3a5==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.196 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-5df417a7=org.apache.spark.ui.JettyUtils$$anon$3-5df417a7@5d7dd3a5==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.196 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@2f162cc0
17:13:59.196 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4390ms org.spark_project.jetty.servlet.ServletHandler@2f162cc0
17:13:59.196 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-5df417a7@5d7dd3a5==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.197 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4391ms org.apache.spark.ui.JettyUtils$$anon$3-5df417a7@5d7dd3a5==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.197 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@327120c8 for org.apache.spark.ui.JettyUtils$$anon$3-5df417a7
17:13:59.197 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@9d157ff{/stages/stage,null,AVAILABLE}
17:13:59.197 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4391ms o.s.j.s.ServletContextHandler@9d157ff{/stages/stage,null,AVAILABLE}
17:13:59.197 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4392ms org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b
17:13:59.198 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39
17:13:59.198 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39
17:13:59.198 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@7c041b41{/stages/stage/json,null,null}
17:13:59.198 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@7c041b41{/stages/stage/json,null,STARTING}
17:13:59.199 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@7f69d591
17:13:59.199 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-61078690 from default=false
17:13:59.199 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.199 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.199 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.199 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-61078690@df88ed83==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.199 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-61078690=org.apache.spark.ui.JettyUtils$$anon$3-61078690@df88ed83==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.201 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@7f69d591
17:13:59.201 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4396ms org.spark_project.jetty.servlet.ServletHandler@7f69d591
17:13:59.202 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-61078690@df88ed83==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.204 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4399ms org.apache.spark.ui.JettyUtils$$anon$3-61078690@df88ed83==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.206 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@2b5cb9b2 for org.apache.spark.ui.JettyUtils$$anon$3-61078690
17:13:59.206 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7c041b41{/stages/stage/json,null,AVAILABLE}
17:13:59.206 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4401ms o.s.j.s.ServletContextHandler@7c041b41{/stages/stage/json,null,AVAILABLE}
17:13:59.206 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4401ms org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39
17:13:59.206 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2
17:13:59.206 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2
17:13:59.206 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@403132fc{/stages/pool,null,null}
17:13:59.206 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@403132fc{/stages/pool,null,STARTING}
17:13:59.207 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@71c5b236
17:13:59.207 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-2cab9998 from default=false
17:13:59.207 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.207 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.207 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.207 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-2cab9998@eaf7b57d==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.207 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-2cab9998=org.apache.spark.ui.JettyUtils$$anon$3-2cab9998@eaf7b57d==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.207 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@71c5b236
17:13:59.207 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4402ms org.spark_project.jetty.servlet.ServletHandler@71c5b236
17:13:59.207 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-2cab9998@eaf7b57d==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.207 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4402ms org.apache.spark.ui.JettyUtils$$anon$3-2cab9998@eaf7b57d==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.207 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@ecf9049 for org.apache.spark.ui.JettyUtils$$anon$3-2cab9998
17:13:59.207 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@403132fc{/stages/pool,null,AVAILABLE}
17:13:59.208 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4402ms o.s.j.s.ServletContextHandler@403132fc{/stages/pool,null,AVAILABLE}
17:13:59.208 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4402ms org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2
17:13:59.208 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7
17:13:59.208 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7
17:13:59.208 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@2f7a7219{/stages/pool/json,null,null}
17:13:59.208 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@2f7a7219{/stages/pool/json,null,STARTING}
17:13:59.208 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@669513d8
17:13:59.208 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-3a1d593e from default=false
17:13:59.208 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.208 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.208 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.208 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-3a1d593e@973e2b03==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.208 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-3a1d593e=org.apache.spark.ui.JettyUtils$$anon$3-3a1d593e@973e2b03==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.208 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@669513d8
17:13:59.208 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4403ms org.spark_project.jetty.servlet.ServletHandler@669513d8
17:13:59.209 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-3a1d593e@973e2b03==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.209 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4403ms org.apache.spark.ui.JettyUtils$$anon$3-3a1d593e@973e2b03==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.209 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@2970a5bc for org.apache.spark.ui.JettyUtils$$anon$3-3a1d593e
17:13:59.209 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f7a7219{/stages/pool/json,null,AVAILABLE}
17:13:59.209 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4403ms o.s.j.s.ServletContextHandler@2f7a7219{/stages/pool/json,null,AVAILABLE}
17:13:59.209 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4403ms org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7
17:13:59.209 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3
17:13:59.209 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3
17:13:59.209 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@664a9613{/storage,null,null}
17:13:59.209 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@664a9613{/storage,null,STARTING}
17:13:59.209 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@5118388b
17:13:59.209 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-15a902e7 from default=false
17:13:59.209 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.209 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.209 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.209 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-15a902e7@ff71f2fc==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.210 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-15a902e7=org.apache.spark.ui.JettyUtils$$anon$3-15a902e7@ff71f2fc==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.210 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@5118388b
17:13:59.210 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4404ms org.spark_project.jetty.servlet.ServletHandler@5118388b
17:13:59.225 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-15a902e7@ff71f2fc==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.225 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4419ms org.apache.spark.ui.JettyUtils$$anon$3-15a902e7@ff71f2fc==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.225 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@72efb5c1 for org.apache.spark.ui.JettyUtils$$anon$3-15a902e7
17:13:59.225 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@664a9613{/storage,null,AVAILABLE}
17:13:59.225 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4420ms o.s.j.s.ServletContextHandler@664a9613{/storage,null,AVAILABLE}
17:13:59.225 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4420ms org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3
17:13:59.225 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999
17:13:59.225 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999
17:13:59.225 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@7876d598{/storage/json,null,null}
17:13:59.226 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@7876d598{/storage/json,null,STARTING}
17:13:59.226 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@4a3e3e8b
17:13:59.226 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-5af28b27 from default=false
17:13:59.226 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.226 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.226 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.226 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-5af28b27@beb2bd33==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.226 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-5af28b27=org.apache.spark.ui.JettyUtils$$anon$3-5af28b27@beb2bd33==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.226 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@4a3e3e8b
17:13:59.226 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4421ms org.spark_project.jetty.servlet.ServletHandler@4a3e3e8b
17:13:59.226 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-5af28b27@beb2bd33==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.226 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4421ms org.apache.spark.ui.JettyUtils$$anon$3-5af28b27@beb2bd33==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.226 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@41200e0c for org.apache.spark.ui.JettyUtils$$anon$3-5af28b27
17:13:59.227 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7876d598{/storage/json,null,AVAILABLE}
17:13:59.227 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4421ms o.s.j.s.ServletContextHandler@7876d598{/storage/json,null,AVAILABLE}
17:13:59.227 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4421ms org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999
17:13:59.227 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf
17:13:59.227 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf
17:13:59.235 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@4985cbcb{/storage/rdd,null,null}
17:13:59.235 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@4985cbcb{/storage/rdd,null,STARTING}
17:13:59.235 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@72f46e16
17:13:59.238 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-3c9168dc from default=false
17:13:59.238 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.238 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.238 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.239 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-3c9168dc@be32e55==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.241 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-3c9168dc=org.apache.spark.ui.JettyUtils$$anon$3-3c9168dc@be32e55==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.241 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@72f46e16
17:13:59.241 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4436ms org.spark_project.jetty.servlet.ServletHandler@72f46e16
17:13:59.242 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-3c9168dc@be32e55==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.242 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4436ms org.apache.spark.ui.JettyUtils$$anon$3-3c9168dc@be32e55==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.242 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@4fbdc0f0 for org.apache.spark.ui.JettyUtils$$anon$3-3c9168dc
17:13:59.242 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4985cbcb{/storage/rdd,null,AVAILABLE}
17:13:59.242 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4437ms o.s.j.s.ServletContextHandler@4985cbcb{/storage/rdd,null,AVAILABLE}
17:13:59.242 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4437ms org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf
17:13:59.242 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e
17:13:59.242 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e
17:13:59.242 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@332a7fce{/storage/rdd/json,null,null}
17:13:59.242 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@332a7fce{/storage/rdd/json,null,STARTING}
17:13:59.242 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@549621f3
17:13:59.243 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-54361a9 from default=false
17:13:59.243 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.243 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.243 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.243 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-54361a9@2a2ca7fb==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.243 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-54361a9=org.apache.spark.ui.JettyUtils$$anon$3-54361a9@2a2ca7fb==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.243 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@549621f3
17:13:59.243 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4437ms org.spark_project.jetty.servlet.ServletHandler@549621f3
17:13:59.243 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-54361a9@2a2ca7fb==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.243 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4438ms org.apache.spark.ui.JettyUtils$$anon$3-54361a9@2a2ca7fb==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.243 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@6bc28a83 for org.apache.spark.ui.JettyUtils$$anon$3-54361a9
17:13:59.243 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@332a7fce{/storage/rdd/json,null,AVAILABLE}
17:13:59.243 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4438ms o.s.j.s.ServletContextHandler@332a7fce{/storage/rdd/json,null,AVAILABLE}
17:13:59.243 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4438ms org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e
17:13:59.243 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a
17:13:59.243 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a
17:13:59.246 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@293bb8a5{/environment,null,null}
17:13:59.247 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@293bb8a5{/environment,null,STARTING}
17:13:59.247 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@2416a51
17:13:59.247 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-6fa590ba from default=false
17:13:59.247 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.247 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.247 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.247 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-6fa590ba@26b4dc66==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.255 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-6fa590ba=org.apache.spark.ui.JettyUtils$$anon$3-6fa590ba@26b4dc66==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.255 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@2416a51
17:13:59.255 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4450ms org.spark_project.jetty.servlet.ServletHandler@2416a51
17:13:59.256 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-6fa590ba@26b4dc66==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.256 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4450ms org.apache.spark.ui.JettyUtils$$anon$3-6fa590ba@26b4dc66==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.256 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@13579834 for org.apache.spark.ui.JettyUtils$$anon$3-6fa590ba
17:13:59.256 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@293bb8a5{/environment,null,AVAILABLE}
17:13:59.256 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4450ms o.s.j.s.ServletContextHandler@293bb8a5{/environment,null,AVAILABLE}
17:13:59.256 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4450ms org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a
17:13:59.256 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546
17:13:59.256 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546
17:13:59.256 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@6e9319f{/environment/json,null,null}
17:13:59.256 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@6e9319f{/environment/json,null,STARTING}
17:13:59.256 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@72e34f77
17:13:59.256 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-7bf9b098 from default=false
17:13:59.256 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.256 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.256 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.256 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-7bf9b098@c3d05377==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.256 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-7bf9b098=org.apache.spark.ui.JettyUtils$$anon$3-7bf9b098@c3d05377==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.256 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@72e34f77
17:13:59.257 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4451ms org.spark_project.jetty.servlet.ServletHandler@72e34f77
17:13:59.257 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-7bf9b098@c3d05377==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.257 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4451ms org.apache.spark.ui.JettyUtils$$anon$3-7bf9b098@c3d05377==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.257 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@5bd73d1a for org.apache.spark.ui.JettyUtils$$anon$3-7bf9b098
17:13:59.257 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e9319f{/environment/json,null,AVAILABLE}
17:13:59.257 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4451ms o.s.j.s.ServletContextHandler@6e9319f{/environment/json,null,AVAILABLE}
17:13:59.257 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4451ms org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546
17:13:59.257 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920
17:13:59.257 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920
17:13:59.257 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@33617539{/executors,null,null}
17:13:59.257 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@33617539{/executors,null,STARTING}
17:13:59.257 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@2c177f9e
17:13:59.257 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-5db4c359 from default=false
17:13:59.257 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.257 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.257 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.257 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-5db4c359@56c11761==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.258 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-5db4c359=org.apache.spark.ui.JettyUtils$$anon$3-5db4c359@56c11761==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.258 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@2c177f9e
17:13:59.261 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4456ms org.spark_project.jetty.servlet.ServletHandler@2c177f9e
17:13:59.262 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-5db4c359@56c11761==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.262 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4457ms org.apache.spark.ui.JettyUtils$$anon$3-5db4c359@56c11761==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.262 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@2555fff0 for org.apache.spark.ui.JettyUtils$$anon$3-5db4c359
17:13:59.262 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@33617539{/executors,null,AVAILABLE}
17:13:59.262 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4457ms o.s.j.s.ServletContextHandler@33617539{/executors,null,AVAILABLE}
17:13:59.263 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4457ms org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920
17:13:59.263 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a
17:13:59.263 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a
17:13:59.263 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@209775a9{/executors/json,null,null}
17:13:59.263 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@209775a9{/executors/json,null,STARTING}
17:13:59.263 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@18e7143f
17:13:59.263 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-f9b7332 from default=false
17:13:59.263 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.263 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.263 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.263 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-f9b7332@555d017e==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.263 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-f9b7332=org.apache.spark.ui.JettyUtils$$anon$3-f9b7332@555d017e==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.263 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@18e7143f
17:13:59.263 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4458ms org.spark_project.jetty.servlet.ServletHandler@18e7143f
17:13:59.264 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-f9b7332@555d017e==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.264 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4458ms org.apache.spark.ui.JettyUtils$$anon$3-f9b7332@555d017e==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.264 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@120f38e6 for org.apache.spark.ui.JettyUtils$$anon$3-f9b7332
17:13:59.264 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@209775a9{/executors/json,null,AVAILABLE}
17:13:59.264 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4458ms o.s.j.s.ServletContextHandler@209775a9{/executors/json,null,AVAILABLE}
17:13:59.264 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4458ms org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a
17:13:59.268 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a
17:13:59.268 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a
17:13:59.268 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@6fefce9e{/executors/threadDump,null,null}
17:13:59.268 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@6fefce9e{/executors/threadDump,null,STARTING}
17:13:59.268 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@4f8969b0
17:13:59.268 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-1bdf8190 from default=false
17:13:59.268 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.268 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.268 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.269 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-1bdf8190@5386a2cf==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.269 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-1bdf8190=org.apache.spark.ui.JettyUtils$$anon$3-1bdf8190@5386a2cf==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.269 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@4f8969b0
17:13:59.269 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4463ms org.spark_project.jetty.servlet.ServletHandler@4f8969b0
17:13:59.269 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-1bdf8190@5386a2cf==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.269 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4463ms org.apache.spark.ui.JettyUtils$$anon$3-1bdf8190@5386a2cf==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.269 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@702ed190 for org.apache.spark.ui.JettyUtils$$anon$3-1bdf8190
17:13:59.269 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6fefce9e{/executors/threadDump,null,AVAILABLE}
17:13:59.269 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4464ms o.s.j.s.ServletContextHandler@6fefce9e{/executors/threadDump,null,AVAILABLE}
17:13:59.269 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4464ms org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a
17:13:59.269 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff
17:13:59.269 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff
17:13:59.269 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@192f2f27{/executors/threadDump/json,null,null}
17:13:59.269 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@192f2f27{/executors/threadDump/json,null,STARTING}
17:13:59.269 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@8a589a2
17:13:59.270 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-c65a5ef from default=false
17:13:59.270 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.270 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.270 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.270 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-c65a5ef@af24c98b==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.270 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-c65a5ef=org.apache.spark.ui.JettyUtils$$anon$3-c65a5ef@af24c98b==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:13:59.270 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@8a589a2
17:13:59.270 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4464ms org.spark_project.jetty.servlet.ServletHandler@8a589a2
17:13:59.270 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-c65a5ef@af24c98b==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.270 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4465ms org.apache.spark.ui.JettyUtils$$anon$3-c65a5ef@af24c98b==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:13:59.270 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@7c18432b for org.apache.spark.ui.JettyUtils$$anon$3-c65a5ef
17:13:59.270 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@192f2f27{/executors/threadDump/json,null,AVAILABLE}
17:13:59.270 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4465ms o.s.j.s.ServletContextHandler@192f2f27{/executors/threadDump/json,null,AVAILABLE}
17:13:59.270 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4465ms org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff
17:13:59.270 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2
17:13:59.270 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2
17:13:59.271 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@b672aa8{/static,null,null}
17:13:59.271 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@b672aa8{/static,null,STARTING}
17:13:59.271 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@2fab4aff
17:13:59.271 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.spark_project.jetty.servlet.DefaultServlet-a77614d from default=false
17:13:59.271 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.271 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.271 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.271 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.spark_project.jetty.servlet.DefaultServlet-a77614d@22ce533d==org.spark_project.jetty.servlet.DefaultServlet,-1,true}
17:13:59.271 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.spark_project.jetty.servlet.DefaultServlet-a77614d=org.spark_project.jetty.servlet.DefaultServlet-a77614d@22ce533d==org.spark_project.jetty.servlet.DefaultServlet,-1,true}
17:13:59.271 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@2fab4aff
17:13:59.271 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4466ms org.spark_project.jetty.servlet.ServletHandler@2fab4aff
17:13:59.271 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.DefaultServlet-a77614d@22ce533d==org.spark_project.jetty.servlet.DefaultServlet,-1,true
17:13:59.271 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4466ms org.spark_project.jetty.servlet.DefaultServlet-a77614d@22ce533d==org.spark_project.jetty.servlet.DefaultServlet,-1,true
17:13:59.272 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.spark_project.jetty.servlet.DefaultServlet@70e29e14 for org.spark_project.jetty.servlet.DefaultServlet-a77614d
17:13:59.297 [main] DEBUG o.s.jetty.servlet.DefaultServlet - resource base = jar:file:/home/aims/spark/jars/spark-core_2.11-2.1.0.jar!/org/apache/spark/ui/static
17:13:59.297 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b672aa8{/static,null,AVAILABLE}
17:13:59.297 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4492ms o.s.j.s.ServletContextHandler@b672aa8{/static,null,AVAILABLE}
17:13:59.297 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4492ms org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2
17:13:59.297 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d
17:13:59.297 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d
17:13:59.302 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@a1217f9{/,null,null}
17:13:59.302 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@a1217f9{/,null,STARTING}
17:13:59.302 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@3bde62ff
17:13:59.302 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$4-523424b5 from default=false
17:13:59.302 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.302 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.302 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.302 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$4-523424b5@a5c8f39e==org.apache.spark.ui.JettyUtils$$anon$4,-1,true}
17:13:59.303 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$4-523424b5=org.apache.spark.ui.JettyUtils$$anon$4-523424b5@a5c8f39e==org.apache.spark.ui.JettyUtils$$anon$4,-1,true}
17:13:59.303 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@3bde62ff
17:13:59.303 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4497ms org.spark_project.jetty.servlet.ServletHandler@3bde62ff
17:13:59.303 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$4-523424b5@a5c8f39e==org.apache.spark.ui.JettyUtils$$anon$4,-1,true
17:13:59.303 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4497ms org.apache.spark.ui.JettyUtils$$anon$4-523424b5@a5c8f39e==org.apache.spark.ui.JettyUtils$$anon$4,-1,true
17:13:59.303 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$4@5af9926a for org.apache.spark.ui.JettyUtils$$anon$4-523424b5
17:13:59.303 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@a1217f9{/,null,AVAILABLE}
17:13:59.303 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4498ms o.s.j.s.ServletContextHandler@a1217f9{/,null,AVAILABLE}
17:13:59.303 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4498ms org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d
17:13:59.303 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1
17:13:59.303 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1
17:13:59.303 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@791cbf87{/api,null,null}
17:13:59.303 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@791cbf87{/api,null,STARTING}
17:13:59.303 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@a7e2d9d
17:13:59.303 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/* mapped to servlet=org.glassfish.jersey.servlet.ServletContainer-7de0c6ae from default=false
17:13:59.304 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.304 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.304 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.304 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/*=org.glassfish.jersey.servlet.ServletContainer-7de0c6ae@83760c5d==org.glassfish.jersey.servlet.ServletContainer,-1,false}
17:13:59.304 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.glassfish.jersey.servlet.ServletContainer-7de0c6ae=org.glassfish.jersey.servlet.ServletContainer-7de0c6ae@83760c5d==org.glassfish.jersey.servlet.ServletContainer,-1,false}
17:13:59.304 [main] DEBUG o.s.jetty.servlet.ServletHandler - Adding Default404Servlet to org.spark_project.jetty.servlet.ServletHandler@a7e2d9d
17:13:59.305 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@a7e2d9d added {org.spark_project.jetty.servlet.ServletHandler$Default404Servlet-649f2009@a2b97056==org.spark_project.jetty.servlet.ServletHandler$Default404Servlet,-1,false,AUTO}
17:13:59.306 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@a7e2d9d added {[/]=>org.spark_project.jetty.servlet.ServletHandler$Default404Servlet-649f2009,POJO}
17:13:59.306 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/* mapped to servlet=org.glassfish.jersey.servlet.ServletContainer-7de0c6ae from default=false
17:13:59.306 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.spark_project.jetty.servlet.ServletHandler$Default404Servlet-649f2009 from default=false
17:13:59.306 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.306 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.306 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.308 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/*=org.glassfish.jersey.servlet.ServletContainer-7de0c6ae@83760c5d==org.glassfish.jersey.servlet.ServletContainer,-1,false, /=org.spark_project.jetty.servlet.ServletHandler$Default404Servlet-649f2009@a2b97056==org.spark_project.jetty.servlet.ServletHandler$Default404Servlet,-1,false}
17:13:59.308 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.glassfish.jersey.servlet.ServletContainer-7de0c6ae=org.glassfish.jersey.servlet.ServletContainer-7de0c6ae@83760c5d==org.glassfish.jersey.servlet.ServletContainer,-1,false, org.spark_project.jetty.servlet.ServletHandler$Default404Servlet-649f2009=org.spark_project.jetty.servlet.ServletHandler$Default404Servlet-649f2009@a2b97056==org.spark_project.jetty.servlet.ServletHandler$Default404Servlet,-1,false}
17:13:59.308 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@a7e2d9d
17:13:59.308 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4503ms org.spark_project.jetty.servlet.ServletHandler@a7e2d9d
17:13:59.308 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.glassfish.jersey.servlet.ServletContainer-7de0c6ae@83760c5d==org.glassfish.jersey.servlet.ServletContainer,-1,false
17:13:59.308 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4503ms org.glassfish.jersey.servlet.ServletContainer-7de0c6ae@83760c5d==org.glassfish.jersey.servlet.ServletContainer,-1,false
17:13:59.309 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler$Default404Servlet-649f2009@a2b97056==org.spark_project.jetty.servlet.ServletHandler$Default404Servlet,-1,false
17:13:59.309 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4503ms org.spark_project.jetty.servlet.ServletHandler$Default404Servlet-649f2009@a2b97056==org.spark_project.jetty.servlet.ServletHandler$Default404Servlet,-1,false
17:13:59.309 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@791cbf87{/api,null,AVAILABLE}
17:13:59.309 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4503ms o.s.j.s.ServletContextHandler@791cbf87{/api,null,AVAILABLE}
17:13:59.309 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4503ms org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1
17:13:59.309 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a
17:13:59.309 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a
17:13:59.310 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@4c36250e{/jobs/job/kill,null,null}
17:13:59.310 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@4c36250e{/jobs/job/kill,null,STARTING}
17:13:59.310 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@21526f6c
17:13:59.310 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$4-49f5c307 from default=false
17:13:59.310 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.310 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.310 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.311 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$4-49f5c307@75ee116==org.apache.spark.ui.JettyUtils$$anon$4,-1,true}
17:13:59.311 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$4-49f5c307=org.apache.spark.ui.JettyUtils$$anon$4-49f5c307@75ee116==org.apache.spark.ui.JettyUtils$$anon$4,-1,true}
17:13:59.311 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@21526f6c
17:13:59.311 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4506ms org.spark_project.jetty.servlet.ServletHandler@21526f6c
17:13:59.311 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$4-49f5c307@75ee116==org.apache.spark.ui.JettyUtils$$anon$4,-1,true
17:13:59.312 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4506ms org.apache.spark.ui.JettyUtils$$anon$4-49f5c307@75ee116==org.apache.spark.ui.JettyUtils$$anon$4,-1,true
17:13:59.312 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$4@797501a for org.apache.spark.ui.JettyUtils$$anon$4-49f5c307
17:13:59.312 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4c36250e{/jobs/job/kill,null,AVAILABLE}
17:13:59.312 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4506ms o.s.j.s.ServletContextHandler@4c36250e{/jobs/job/kill,null,AVAILABLE}
17:13:59.312 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4506ms org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a
17:13:59.312 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b
17:13:59.312 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b
17:13:59.312 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@66ea1466{/stages/stage/kill,null,null}
17:13:59.312 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@66ea1466{/stages/stage/kill,null,STARTING}
17:13:59.312 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@1601e47
17:13:59.312 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$4-3bffddff from default=false
17:13:59.312 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:13:59.312 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:13:59.312 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:13:59.313 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$4-3bffddff@1b10e45a==org.apache.spark.ui.JettyUtils$$anon$4,-1,true}
17:13:59.313 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$4-3bffddff=org.apache.spark.ui.JettyUtils$$anon$4-3bffddff@1b10e45a==org.apache.spark.ui.JettyUtils$$anon$4,-1,true}
17:13:59.313 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@1601e47
17:13:59.313 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4508ms org.spark_project.jetty.servlet.ServletHandler@1601e47
17:13:59.313 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$4-3bffddff@1b10e45a==org.apache.spark.ui.JettyUtils$$anon$4,-1,true
17:13:59.313 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4508ms org.apache.spark.ui.JettyUtils$$anon$4-3bffddff@1b10e45a==org.apache.spark.ui.JettyUtils$$anon$4,-1,true
17:13:59.313 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$4@57f791c6 for org.apache.spark.ui.JettyUtils$$anon$4-3bffddff
17:13:59.313 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@66ea1466{/stages/stage/kill,null,AVAILABLE}
17:13:59.314 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4508ms o.s.j.s.ServletContextHandler@66ea1466{/stages/stage/kill,null,AVAILABLE}
17:13:59.320 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4514ms org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b
17:13:59.327 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4522ms org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a, org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff, org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2, org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d, org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1, org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a, org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b]
17:13:59.328 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting ServerConnector@1b5bc39d{HTTP/1.1}{0.0.0.0:4040}
17:13:59.357 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - ServerConnector@1b5bc39d{HTTP/1.1}{0.0.0.0:4040} added {sun.nio.ch.ServerSocketChannelImpl[/0:0:0:0:0:0:0:0:4040],POJO}
17:13:59.357 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.util.thread.ScheduledExecutorScheduler@655a5d9c
17:13:59.358 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4553ms org.spark_project.jetty.util.thread.ScheduledExecutorScheduler@655a5d9c
17:13:59.358 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting HttpConnectionFactory@5910de75{HTTP/1.1}
17:13:59.359 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4553ms HttpConnectionFactory@5910de75{HTTP/1.1}
17:13:59.359 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.server.ServerConnector$ServerConnectorManager@1df98368
17:13:59.377 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.io.SelectorManager$ManagedSelector@2ef8a8c3 keys=-1 selected=-1
17:13:59.378 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4572ms org.spark_project.jetty.io.SelectorManager$ManagedSelector@2ef8a8c3 keys=0 selected=0
17:13:59.379 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.io.SelectorManager$ManagedSelector@1e11bc55 keys=-1 selected=-1
17:13:59.379 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4573ms org.spark_project.jetty.io.SelectorManager$ManagedSelector@1e11bc55 keys=0 selected=0
17:13:59.379 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4574ms org.spark_project.jetty.server.ServerConnector$ServerConnectorManager@1df98368
17:13:59.380 [SparkUI-36-selector-ServerConnectorManager@1df98368/0] DEBUG o.s.jetty.io.SelectorManager - Starting Thread[SparkUI-36-selector-ServerConnectorManager@1df98368/0,5,main] on org.spark_project.jetty.io.SelectorManager$ManagedSelector@2ef8a8c3 keys=0 selected=0
17:13:59.381 [SparkUI-36-selector-ServerConnectorManager@1df98368/0] DEBUG o.s.jetty.io.SelectorManager - Selector loop waiting on select
17:13:59.381 [SparkUI-37-selector-ServerConnectorManager@1df98368/1] DEBUG o.s.jetty.io.SelectorManager - Starting Thread[SparkUI-37-selector-ServerConnectorManager@1df98368/1,5,main] on org.spark_project.jetty.io.SelectorManager$ManagedSelector@1e11bc55 keys=0 selected=0
17:13:59.381 [SparkUI-37-selector-ServerConnectorManager@1df98368/1] DEBUG o.s.jetty.io.SelectorManager - Selector loop waiting on select
17:13:59.383 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - ServerConnector@1b5bc39d{HTTP/1.1}{0.0.0.0:4040} added {acceptor-0@7957dc72,POJO}
17:13:59.384 [main] INFO o.s.jetty.server.ServerConnector - Started ServerConnector@1b5bc39d{HTTP/1.1}{0.0.0.0:4040}
17:13:59.386 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4579ms ServerConnector@1b5bc39d{HTTP/1.1}{0.0.0.0:4040}
17:13:59.386 [main] INFO o.spark_project.jetty.server.Server - Started @4580ms
17:13:59.386 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @4580ms org.spark_project.jetty.server.Server@514eedd8
17:13:59.386 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040.
17:13:59.414 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://10.0.0.3:4040
17:13:59.785 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost
17:13:59.827 [main] DEBUG o.a.s.network.server.TransportServer - Shuffle server started on port: 44157
17:13:59.827 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 44157.
17:13:59.828 [main] INFO o.a.s.n.n.NettyBlockTransferService - Server created on 10.0.0.3:44157
17:13:59.831 [main] INFO o.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy
17:13:59.834 [main] INFO o.a.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 10.0.0.3, 44157, None)
17:13:59.837 [dispatcher-event-loop-2] DEBUG o.a.s.storage.DefaultTopologyMapper - Got a request for 10.0.0.3
17:13:59.839 [dispatcher-event-loop-2] INFO o.a.s.s.BlockManagerMasterEndpoint - Registering block manager 10.0.0.3:44157 with 335.4 MB RAM, BlockManagerId(driver, 10.0.0.3, 44157, None)
17:13:59.855 [main] INFO o.a.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 10.0.0.3, 44157, None)
17:13:59.856 [main] INFO o.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 10.0.0.3, 44157, None)
17:14:00.115 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@779de014{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@5c41d037,AUTO}
17:14:00.115 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@5c41d037 added {org.apache.spark.ui.JettyUtils$$anon$3-2234078@880f5364==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:14:00.115 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@5c41d037 added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-2234078,POJO}
17:14:00.122 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a, org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff, org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2, org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d, org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1, org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a, org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b, o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,null}] added {o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,null},UNMANAGED}
17:14:00.127 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - ->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d,[o.s.j.s.ServletContextHandler@a1217f9{/,null,AVAILABLE}]}]
17:14:00.127 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/rdd->[{org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf,[o.s.j.s.ServletContextHandler@4985cbcb{/storage/rdd,null,AVAILABLE}]}]
17:14:00.127 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage->[{org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3,[o.s.j.s.ServletContextHandler@664a9613{/storage,null,AVAILABLE}]}]
17:14:00.127 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/rdd/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e,[o.s.j.s.ServletContextHandler@332a7fce{/storage/rdd/json,null,AVAILABLE}]}]
17:14:00.127 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - api->[{org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1,[o.s.j.s.ServletContextHandler@791cbf87{/api,null,AVAILABLE}]}]
17:14:00.127 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/pool/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7,[o.s.j.s.ServletContextHandler@2f7a7219{/stages/pool/json,null,AVAILABLE}]}]
17:14:00.127 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/pool->[{org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2,[o.s.j.s.ServletContextHandler@403132fc{/stages/pool,null,AVAILABLE}]}]
17:14:00.127 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148,[o.s.j.s.ServletContextHandler@303e3593{/jobs/json,null,AVAILABLE}]}]
17:14:00.127 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - static->[{org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2,[o.s.j.s.ServletContextHandler@b672aa8{/static,null,AVAILABLE}]}]
17:14:00.127 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a,[o.s.j.s.ServletContextHandler@209775a9{/executors/json,null,AVAILABLE}]}]
17:14:00.127 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39,[o.s.j.s.ServletContextHandler@7c041b41{/stages/stage/json,null,AVAILABLE}]}]
17:14:00.128 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/threadDump/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff,[o.s.j.s.ServletContextHandler@192f2f27{/executors/threadDump/json,null,AVAILABLE}]}]
17:14:00.128 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - environment/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546,[o.s.j.s.ServletContextHandler@6e9319f{/environment/json,null,AVAILABLE}]}]
17:14:00.128 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c,[o.s.j.s.ServletContextHandler@439a8f59{/jobs/job/json,null,AVAILABLE}]}]
17:14:00.128 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs->[{org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e,[o.s.j.s.ServletContextHandler@60bdf15d{/jobs,null,AVAILABLE}]}]
17:14:00.128 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b,[o.s.j.s.ServletContextHandler@64bc21ac{/stages/json,null,AVAILABLE}]}]
17:14:00.128 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage->[{org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b,[o.s.j.s.ServletContextHandler@9d157ff{/stages/stage,null,AVAILABLE}]}]
17:14:00.128 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999,[o.s.j.s.ServletContextHandler@7876d598{/storage/json,null,AVAILABLE}]}]
17:14:00.128 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage/kill->[{org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b,[o.s.j.s.ServletContextHandler@66ea1466{/stages/stage/kill,null,AVAILABLE}]}]
17:14:00.128 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job->[{org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4,[o.s.j.s.ServletContextHandler@3f23a3a0{/jobs/job,null,AVAILABLE}]}]
17:14:00.128 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - environment->[{org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a,[o.s.j.s.ServletContextHandler@293bb8a5{/environment,null,AVAILABLE}]}]
17:14:00.128 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages->[{org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b,[o.s.j.s.ServletContextHandler@593e824f{/stages,null,AVAILABLE}]}]
17:14:00.128 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors->[{org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920,[o.s.j.s.ServletContextHandler@33617539{/executors,null,AVAILABLE}]}]
17:14:00.128 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job/kill->[{org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a,[o.s.j.s.ServletContextHandler@4c36250e{/jobs/job/kill,null,AVAILABLE}]}]
17:14:00.129 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - metrics/json->[{o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,null},[o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,null}]}]
17:14:00.129 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/threadDump->[{org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a,[o.s.j.s.ServletContextHandler@6fefce9e{/executors/threadDump,null,AVAILABLE}]}]
17:14:00.129 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,null}
17:14:00.129 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,STARTING}
17:14:00.129 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@5c41d037
17:14:00.129 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-2234078 from default=false
17:14:00.129 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:14:00.129 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:14:00.129 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:14:00.129 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-2234078@880f5364==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:14:00.129 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-2234078=org.apache.spark.ui.JettyUtils$$anon$3-2234078@880f5364==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:14:00.129 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@5c41d037
17:14:00.129 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @5324ms org.spark_project.jetty.servlet.ServletHandler@5c41d037
17:14:00.130 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-2234078@880f5364==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:00.130 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @5324ms org.apache.spark.ui.JettyUtils$$anon$3-2234078@880f5364==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:00.130 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@6719a5b8 for org.apache.spark.ui.JettyUtils$$anon$3-2234078
17:14:00.130 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,AVAILABLE}
17:14:00.130 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @5324ms o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,AVAILABLE}
17:14:00.179 [main] DEBUG org.apache.spark.SparkContext - Adding shutdown hook
17:14:00.198 [main] INFO o.a.spark.sql.internal.SharedState - Warehouse path is 'file:/home/aims/workspace/sparkhive/spark-warehouse'.
17:14:00.212 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@c9d82f9{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@6f012914,AUTO}
17:14:00.213 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@6f012914 added {org.apache.spark.ui.JettyUtils$$anon$3-18fdb6cf@a9219c08==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:14:00.213 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@6f012914 added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-18fdb6cf,POJO}
17:14:00.213 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@d02f8d{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@60baef24,AUTO}
17:14:00.213 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@60baef24 added {org.apache.spark.ui.JettyUtils$$anon$3-61533ae@5a0f32f2==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:14:00.213 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@60baef24 added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-61533ae,POJO}
17:14:00.214 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a, org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff, org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2, org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d, org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1, org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a, org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b, o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,AVAILABLE}, o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,null}] added {o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,null},UNMANAGED}
17:14:00.214 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - ->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d,[o.s.j.s.ServletContextHandler@a1217f9{/,null,AVAILABLE}]}]
17:14:00.214 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/rdd->[{org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf,[o.s.j.s.ServletContextHandler@4985cbcb{/storage/rdd,null,AVAILABLE}]}]
17:14:00.214 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage->[{org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3,[o.s.j.s.ServletContextHandler@664a9613{/storage,null,AVAILABLE}]}]
17:14:00.215 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/rdd/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e,[o.s.j.s.ServletContextHandler@332a7fce{/storage/rdd/json,null,AVAILABLE}]}]
17:14:00.215 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - api->[{org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1,[o.s.j.s.ServletContextHandler@791cbf87{/api,null,AVAILABLE}]}]
17:14:00.215 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/pool/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7,[o.s.j.s.ServletContextHandler@2f7a7219{/stages/pool/json,null,AVAILABLE}]}]
17:14:00.215 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/pool->[{org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2,[o.s.j.s.ServletContextHandler@403132fc{/stages/pool,null,AVAILABLE}]}]
17:14:00.215 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148,[o.s.j.s.ServletContextHandler@303e3593{/jobs/json,null,AVAILABLE}]}]
17:14:00.215 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - static->[{org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2,[o.s.j.s.ServletContextHandler@b672aa8{/static,null,AVAILABLE}]}]
17:14:00.215 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a,[o.s.j.s.ServletContextHandler@209775a9{/executors/json,null,AVAILABLE}]}]
17:14:00.215 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39,[o.s.j.s.ServletContextHandler@7c041b41{/stages/stage/json,null,AVAILABLE}]}]
17:14:00.215 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/threadDump/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff,[o.s.j.s.ServletContextHandler@192f2f27{/executors/threadDump/json,null,AVAILABLE}]}]
17:14:00.215 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - environment/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546,[o.s.j.s.ServletContextHandler@6e9319f{/environment/json,null,AVAILABLE}]}]
17:14:00.215 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c,[o.s.j.s.ServletContextHandler@439a8f59{/jobs/job/json,null,AVAILABLE}]}]
17:14:00.215 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs->[{org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e,[o.s.j.s.ServletContextHandler@60bdf15d{/jobs,null,AVAILABLE}]}]
17:14:00.215 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b,[o.s.j.s.ServletContextHandler@64bc21ac{/stages/json,null,AVAILABLE}]}]
17:14:00.216 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage->[{org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b,[o.s.j.s.ServletContextHandler@9d157ff{/stages/stage,null,AVAILABLE}]}]
17:14:00.216 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999,[o.s.j.s.ServletContextHandler@7876d598{/storage/json,null,AVAILABLE}]}]
17:14:00.216 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - SQL->[{o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,null},[o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,null}]}]
17:14:00.216 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage/kill->[{org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b,[o.s.j.s.ServletContextHandler@66ea1466{/stages/stage/kill,null,AVAILABLE}]}]
17:14:00.216 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job->[{org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4,[o.s.j.s.ServletContextHandler@3f23a3a0{/jobs/job,null,AVAILABLE}]}]
17:14:00.216 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - environment->[{org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a,[o.s.j.s.ServletContextHandler@293bb8a5{/environment,null,AVAILABLE}]}]
17:14:00.216 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages->[{org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b,[o.s.j.s.ServletContextHandler@593e824f{/stages,null,AVAILABLE}]}]
17:14:00.216 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors->[{org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920,[o.s.j.s.ServletContextHandler@33617539{/executors,null,AVAILABLE}]}]
17:14:00.216 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job/kill->[{org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a,[o.s.j.s.ServletContextHandler@4c36250e{/jobs/job/kill,null,AVAILABLE}]}]
17:14:00.217 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - metrics/json->[{o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,AVAILABLE},[o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,AVAILABLE}]}]
17:14:00.217 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/threadDump->[{org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a,[o.s.j.s.ServletContextHandler@6fefce9e{/executors/threadDump,null,AVAILABLE}]}]
17:14:00.217 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,null}
17:14:00.217 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,STARTING}
17:14:00.217 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@6f012914
17:14:00.217 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-18fdb6cf from default=false
17:14:00.217 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:14:00.217 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:14:00.217 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:14:00.217 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-18fdb6cf@a9219c08==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:14:00.217 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-18fdb6cf=org.apache.spark.ui.JettyUtils$$anon$3-18fdb6cf@a9219c08==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:14:00.217 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@6f012914
17:14:00.217 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @5412ms org.spark_project.jetty.servlet.ServletHandler@6f012914
17:14:00.217 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-18fdb6cf@a9219c08==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:00.218 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @5412ms org.apache.spark.ui.JettyUtils$$anon$3-18fdb6cf@a9219c08==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:00.218 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@720653c2 for org.apache.spark.ui.JettyUtils$$anon$3-18fdb6cf
17:14:00.218 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,AVAILABLE}
17:14:00.218 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @5412ms o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,AVAILABLE}
17:14:00.218 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a, org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff, org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2, org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d, org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1, org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a, org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b, o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,AVAILABLE}, o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,AVAILABLE}, o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,null}] added {o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,null},UNMANAGED}
17:14:00.219 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - ->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d,[o.s.j.s.ServletContextHandler@a1217f9{/,null,AVAILABLE}]}]
17:14:00.219 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/rdd->[{org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf,[o.s.j.s.ServletContextHandler@4985cbcb{/storage/rdd,null,AVAILABLE}]}]
17:14:00.219 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage->[{org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3,[o.s.j.s.ServletContextHandler@664a9613{/storage,null,AVAILABLE}]}]
17:14:00.219 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/rdd/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e,[o.s.j.s.ServletContextHandler@332a7fce{/storage/rdd/json,null,AVAILABLE}]}]
17:14:00.219 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - api->[{org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1,[o.s.j.s.ServletContextHandler@791cbf87{/api,null,AVAILABLE}]}]
17:14:00.219 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/pool/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7,[o.s.j.s.ServletContextHandler@2f7a7219{/stages/pool/json,null,AVAILABLE}]}]
17:14:00.219 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/pool->[{org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2,[o.s.j.s.ServletContextHandler@403132fc{/stages/pool,null,AVAILABLE}]}]
17:14:00.219 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148,[o.s.j.s.ServletContextHandler@303e3593{/jobs/json,null,AVAILABLE}]}]
17:14:00.219 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - static->[{org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2,[o.s.j.s.ServletContextHandler@b672aa8{/static,null,AVAILABLE}]}]
17:14:00.220 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a,[o.s.j.s.ServletContextHandler@209775a9{/executors/json,null,AVAILABLE}]}]
17:14:00.220 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39,[o.s.j.s.ServletContextHandler@7c041b41{/stages/stage/json,null,AVAILABLE}]}]
17:14:00.220 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/threadDump/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff,[o.s.j.s.ServletContextHandler@192f2f27{/executors/threadDump/json,null,AVAILABLE}]}]
17:14:00.220 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - environment/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546,[o.s.j.s.ServletContextHandler@6e9319f{/environment/json,null,AVAILABLE}]}]
17:14:00.220 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c,[o.s.j.s.ServletContextHandler@439a8f59{/jobs/job/json,null,AVAILABLE}]}]
17:14:00.220 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs->[{org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e,[o.s.j.s.ServletContextHandler@60bdf15d{/jobs,null,AVAILABLE}]}]
17:14:00.220 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b,[o.s.j.s.ServletContextHandler@64bc21ac{/stages/json,null,AVAILABLE}]}]
17:14:00.220 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage->[{org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b,[o.s.j.s.ServletContextHandler@9d157ff{/stages/stage,null,AVAILABLE}]}]
17:14:00.220 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999,[o.s.j.s.ServletContextHandler@7876d598{/storage/json,null,AVAILABLE}]}]
17:14:00.221 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - SQL->[{o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,AVAILABLE},[o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,AVAILABLE}]}]
17:14:00.221 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage/kill->[{org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b,[o.s.j.s.ServletContextHandler@66ea1466{/stages/stage/kill,null,AVAILABLE}]}]
17:14:00.221 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job->[{org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4,[o.s.j.s.ServletContextHandler@3f23a3a0{/jobs/job,null,AVAILABLE}]}]
17:14:00.221 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - environment->[{org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a,[o.s.j.s.ServletContextHandler@293bb8a5{/environment,null,AVAILABLE}]}]
17:14:00.221 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages->[{org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b,[o.s.j.s.ServletContextHandler@593e824f{/stages,null,AVAILABLE}]}]
17:14:00.221 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors->[{org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920,[o.s.j.s.ServletContextHandler@33617539{/executors,null,AVAILABLE}]}]
17:14:00.221 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - SQL/json->[{o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,null},[o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,null}]}]
17:14:00.221 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job/kill->[{org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a,[o.s.j.s.ServletContextHandler@4c36250e{/jobs/job/kill,null,AVAILABLE}]}]
17:14:00.221 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - metrics/json->[{o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,AVAILABLE},[o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,AVAILABLE}]}]
17:14:00.221 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/threadDump->[{org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a,[o.s.j.s.ServletContextHandler@6fefce9e{/executors/threadDump,null,AVAILABLE}]}]
17:14:00.221 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,null}
17:14:00.221 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,STARTING}
17:14:00.221 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@60baef24
17:14:00.222 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-61533ae from default=false
17:14:00.222 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:14:00.222 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:14:00.222 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:14:00.222 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-61533ae@5a0f32f2==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:14:00.222 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-61533ae=org.apache.spark.ui.JettyUtils$$anon$3-61533ae@5a0f32f2==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:14:00.222 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@60baef24
17:14:00.222 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @5417ms org.spark_project.jetty.servlet.ServletHandler@60baef24
17:14:00.222 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-61533ae@5a0f32f2==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:00.222 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @5417ms org.apache.spark.ui.JettyUtils$$anon$3-61533ae@5a0f32f2==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:00.222 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@6ad5923a for org.apache.spark.ui.JettyUtils$$anon$3-61533ae
17:14:00.222 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,AVAILABLE}
17:14:00.223 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @5417ms o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,AVAILABLE}
17:14:00.223 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@43b0ade{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@5395ea39,AUTO}
17:14:00.223 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@5395ea39 added {org.apache.spark.ui.JettyUtils$$anon$3-1517f633@ad85bb26==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:14:00.223 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@5395ea39 added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-1517f633,POJO}
17:14:00.223 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@4fe01803{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@13d186db,AUTO}
17:14:00.223 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@13d186db added {org.apache.spark.ui.JettyUtils$$anon$3-6f6962ba@dd8b821a==org.apache.spark.ui.JettyUtils$$anon$3,-1,true,AUTO}
17:14:00.224 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@13d186db added {[/]=>org.apache.spark.ui.JettyUtils$$anon$3-6f6962ba,POJO}
17:14:00.224 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a, org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff, org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2, org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d, org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1, org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a, org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b, o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,AVAILABLE}, o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,AVAILABLE}, o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,AVAILABLE}, o.s.j.s.ServletContextHandler@43b0ade{/SQL/execution,null,null}] added {o.s.j.s.ServletContextHandler@43b0ade{/SQL/execution,null,null},UNMANAGED}
17:14:00.225 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - ->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d,[o.s.j.s.ServletContextHandler@a1217f9{/,null,AVAILABLE}]}]
17:14:00.225 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/rdd->[{org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf,[o.s.j.s.ServletContextHandler@4985cbcb{/storage/rdd,null,AVAILABLE}]}]
17:14:00.225 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage->[{org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3,[o.s.j.s.ServletContextHandler@664a9613{/storage,null,AVAILABLE}]}]
17:14:00.225 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/rdd/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e,[o.s.j.s.ServletContextHandler@332a7fce{/storage/rdd/json,null,AVAILABLE}]}]
17:14:00.225 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - api->[{org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1,[o.s.j.s.ServletContextHandler@791cbf87{/api,null,AVAILABLE}]}]
17:14:00.225 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/pool/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7,[o.s.j.s.ServletContextHandler@2f7a7219{/stages/pool/json,null,AVAILABLE}]}]
17:14:00.225 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/pool->[{org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2,[o.s.j.s.ServletContextHandler@403132fc{/stages/pool,null,AVAILABLE}]}]
17:14:00.225 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148,[o.s.j.s.ServletContextHandler@303e3593{/jobs/json,null,AVAILABLE}]}]
17:14:00.225 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - static->[{org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2,[o.s.j.s.ServletContextHandler@b672aa8{/static,null,AVAILABLE}]}]
17:14:00.225 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a,[o.s.j.s.ServletContextHandler@209775a9{/executors/json,null,AVAILABLE}]}]
17:14:00.225 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39,[o.s.j.s.ServletContextHandler@7c041b41{/stages/stage/json,null,AVAILABLE}]}]
17:14:00.225 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/threadDump/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff,[o.s.j.s.ServletContextHandler@192f2f27{/executors/threadDump/json,null,AVAILABLE}]}]
17:14:00.226 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - environment/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546,[o.s.j.s.ServletContextHandler@6e9319f{/environment/json,null,AVAILABLE}]}]
17:14:00.226 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c,[o.s.j.s.ServletContextHandler@439a8f59{/jobs/job/json,null,AVAILABLE}]}]
17:14:00.226 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs->[{org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e,[o.s.j.s.ServletContextHandler@60bdf15d{/jobs,null,AVAILABLE}]}]
17:14:00.226 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b,[o.s.j.s.ServletContextHandler@64bc21ac{/stages/json,null,AVAILABLE}]}]
17:14:00.226 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage->[{org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b,[o.s.j.s.ServletContextHandler@9d157ff{/stages/stage,null,AVAILABLE}]}]
17:14:00.226 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999,[o.s.j.s.ServletContextHandler@7876d598{/storage/json,null,AVAILABLE}]}]
17:14:00.226 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - SQL->[{o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,AVAILABLE},[o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,AVAILABLE}]}]
17:14:00.226 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage/kill->[{org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b,[o.s.j.s.ServletContextHandler@66ea1466{/stages/stage/kill,null,AVAILABLE}]}]
17:14:00.226 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job->[{org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4,[o.s.j.s.ServletContextHandler@3f23a3a0{/jobs/job,null,AVAILABLE}]}]
17:14:00.226 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - environment->[{org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a,[o.s.j.s.ServletContextHandler@293bb8a5{/environment,null,AVAILABLE}]}]
17:14:00.226 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages->[{org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b,[o.s.j.s.ServletContextHandler@593e824f{/stages,null,AVAILABLE}]}]
17:14:00.226 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors->[{org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920,[o.s.j.s.ServletContextHandler@33617539{/executors,null,AVAILABLE}]}]
17:14:00.226 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - SQL/json->[{o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,AVAILABLE},[o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,AVAILABLE}]}]
17:14:00.227 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job/kill->[{org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a,[o.s.j.s.ServletContextHandler@4c36250e{/jobs/job/kill,null,AVAILABLE}]}]
17:14:00.227 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - metrics/json->[{o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,AVAILABLE},[o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,AVAILABLE}]}]
17:14:00.227 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - SQL/execution->[{o.s.j.s.ServletContextHandler@43b0ade{/SQL/execution,null,null},[o.s.j.s.ServletContextHandler@43b0ade{/SQL/execution,null,null}]}]
17:14:00.227 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/threadDump->[{org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a,[o.s.j.s.ServletContextHandler@6fefce9e{/executors/threadDump,null,AVAILABLE}]}]
17:14:00.227 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@43b0ade{/SQL/execution,null,null}
17:14:00.227 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@43b0ade{/SQL/execution,null,STARTING}
17:14:00.227 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@5395ea39
17:14:00.227 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-1517f633 from default=false
17:14:00.227 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:14:00.227 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:14:00.227 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:14:00.228 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-1517f633@ad85bb26==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:14:00.228 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-1517f633=org.apache.spark.ui.JettyUtils$$anon$3-1517f633@ad85bb26==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:14:00.228 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@5395ea39
17:14:00.228 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @5422ms org.spark_project.jetty.servlet.ServletHandler@5395ea39
17:14:00.228 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-1517f633@ad85bb26==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:00.228 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @5422ms org.apache.spark.ui.JettyUtils$$anon$3-1517f633@ad85bb26==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:00.228 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@a0a9fa5 for org.apache.spark.ui.JettyUtils$$anon$3-1517f633
17:14:00.228 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43b0ade{/SQL/execution,null,AVAILABLE}
17:14:00.228 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @5423ms o.s.j.s.ServletContextHandler@43b0ade{/SQL/execution,null,AVAILABLE}
17:14:00.228 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a, org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff, org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2, org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d, org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1, org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a, org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b, o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,AVAILABLE}, o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,AVAILABLE}, o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,AVAILABLE}, o.s.j.s.ServletContextHandler@43b0ade{/SQL/execution,null,AVAILABLE}, o.s.j.s.ServletContextHandler@4fe01803{/SQL/execution/json,null,null}] added {o.s.j.s.ServletContextHandler@4fe01803{/SQL/execution/json,null,null},UNMANAGED}
17:14:00.229 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - ->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d,[o.s.j.s.ServletContextHandler@a1217f9{/,null,AVAILABLE}]}]
17:14:00.229 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/rdd->[{org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf,[o.s.j.s.ServletContextHandler@4985cbcb{/storage/rdd,null,AVAILABLE}]}]
17:14:00.229 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage->[{org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3,[o.s.j.s.ServletContextHandler@664a9613{/storage,null,AVAILABLE}]}]
17:14:00.229 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/rdd/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e,[o.s.j.s.ServletContextHandler@332a7fce{/storage/rdd/json,null,AVAILABLE}]}]
17:14:00.229 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - SQL/execution/json->[{o.s.j.s.ServletContextHandler@4fe01803{/SQL/execution/json,null,null},[o.s.j.s.ServletContextHandler@4fe01803{/SQL/execution/json,null,null}]}]
17:14:00.230 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - api->[{org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1,[o.s.j.s.ServletContextHandler@791cbf87{/api,null,AVAILABLE}]}]
17:14:00.230 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/pool/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7,[o.s.j.s.ServletContextHandler@2f7a7219{/stages/pool/json,null,AVAILABLE}]}]
17:14:00.230 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/pool->[{org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2,[o.s.j.s.ServletContextHandler@403132fc{/stages/pool,null,AVAILABLE}]}]
17:14:00.230 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148,[o.s.j.s.ServletContextHandler@303e3593{/jobs/json,null,AVAILABLE}]}]
17:14:00.230 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - static->[{org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2,[o.s.j.s.ServletContextHandler@b672aa8{/static,null,AVAILABLE}]}]
17:14:00.230 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a,[o.s.j.s.ServletContextHandler@209775a9{/executors/json,null,AVAILABLE}]}]
17:14:00.230 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39,[o.s.j.s.ServletContextHandler@7c041b41{/stages/stage/json,null,AVAILABLE}]}]
17:14:00.230 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/threadDump/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff,[o.s.j.s.ServletContextHandler@192f2f27{/executors/threadDump/json,null,AVAILABLE}]}]
17:14:00.230 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - environment/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546,[o.s.j.s.ServletContextHandler@6e9319f{/environment/json,null,AVAILABLE}]}]
17:14:00.230 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c,[o.s.j.s.ServletContextHandler@439a8f59{/jobs/job/json,null,AVAILABLE}]}]
17:14:00.231 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs->[{org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e,[o.s.j.s.ServletContextHandler@60bdf15d{/jobs,null,AVAILABLE}]}]
17:14:00.231 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b,[o.s.j.s.ServletContextHandler@64bc21ac{/stages/json,null,AVAILABLE}]}]
17:14:00.231 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage->[{org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b,[o.s.j.s.ServletContextHandler@9d157ff{/stages/stage,null,AVAILABLE}]}]
17:14:00.231 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999,[o.s.j.s.ServletContextHandler@7876d598{/storage/json,null,AVAILABLE}]}]
17:14:00.231 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - SQL->[{o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,AVAILABLE},[o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,AVAILABLE}]}]
17:14:00.231 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage/kill->[{org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b,[o.s.j.s.ServletContextHandler@66ea1466{/stages/stage/kill,null,AVAILABLE}]}]
17:14:00.231 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job->[{org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4,[o.s.j.s.ServletContextHandler@3f23a3a0{/jobs/job,null,AVAILABLE}]}]
17:14:00.231 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - environment->[{org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a,[o.s.j.s.ServletContextHandler@293bb8a5{/environment,null,AVAILABLE}]}]
17:14:00.231 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages->[{org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b,[o.s.j.s.ServletContextHandler@593e824f{/stages,null,AVAILABLE}]}]
17:14:00.231 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors->[{org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920,[o.s.j.s.ServletContextHandler@33617539{/executors,null,AVAILABLE}]}]
17:14:00.231 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - SQL/json->[{o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,AVAILABLE},[o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,AVAILABLE}]}]
17:14:00.231 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job/kill->[{org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a,[o.s.j.s.ServletContextHandler@4c36250e{/jobs/job/kill,null,AVAILABLE}]}]
17:14:00.232 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - metrics/json->[{o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,AVAILABLE},[o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,AVAILABLE}]}]
17:14:00.232 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - SQL/execution->[{o.s.j.s.ServletContextHandler@43b0ade{/SQL/execution,null,AVAILABLE},[o.s.j.s.ServletContextHandler@43b0ade{/SQL/execution,null,AVAILABLE}]}]
17:14:00.232 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/threadDump->[{org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a,[o.s.j.s.ServletContextHandler@6fefce9e{/executors/threadDump,null,AVAILABLE}]}]
17:14:00.232 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@4fe01803{/SQL/execution/json,null,null}
17:14:00.232 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@4fe01803{/SQL/execution/json,null,STARTING}
17:14:00.232 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@13d186db
17:14:00.232 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.apache.spark.ui.JettyUtils$$anon$3-6f6962ba from default=false
17:14:00.232 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:14:00.232 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:14:00.232 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:14:00.232 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.apache.spark.ui.JettyUtils$$anon$3-6f6962ba@dd8b821a==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:14:00.232 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.apache.spark.ui.JettyUtils$$anon$3-6f6962ba=org.apache.spark.ui.JettyUtils$$anon$3-6f6962ba@dd8b821a==org.apache.spark.ui.JettyUtils$$anon$3,-1,true}
17:14:00.232 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@13d186db
17:14:00.232 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @5427ms org.spark_project.jetty.servlet.ServletHandler@13d186db
17:14:00.233 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.apache.spark.ui.JettyUtils$$anon$3-6f6962ba@dd8b821a==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:00.233 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @5427ms org.apache.spark.ui.JettyUtils$$anon$3-6f6962ba@dd8b821a==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:00.233 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.apache.spark.ui.JettyUtils$$anon$3@312afbc7 for org.apache.spark.ui.JettyUtils$$anon$3-6f6962ba
17:14:00.233 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4fe01803{/SQL/execution/json,null,AVAILABLE}
17:14:00.233 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @5427ms o.s.j.s.ServletContextHandler@4fe01803{/SQL/execution/json,null,AVAILABLE}
17:14:00.234 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - o.s.j.s.ServletContextHandler@6569dded{/,null,null} added {org.spark_project.jetty.servlet.ServletHandler@466d49f0,AUTO}
17:14:00.235 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@466d49f0 added {org.spark_project.jetty.servlet.DefaultServlet-710d7aff@e53f04e7==org.spark_project.jetty.servlet.DefaultServlet,-1,true,AUTO}
17:14:00.235 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.servlet.ServletHandler@466d49f0 added {[/]=>org.spark_project.jetty.servlet.DefaultServlet-710d7aff,POJO}
17:14:00.235 [main] DEBUG o.s.j.u.component.ContainerLifeCycle - org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a, org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff, org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2, org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d, org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1, org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a, org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b, o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,AVAILABLE}, o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,AVAILABLE}, o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,AVAILABLE}, o.s.j.s.ServletContextHandler@43b0ade{/SQL/execution,null,AVAILABLE}, o.s.j.s.ServletContextHandler@4fe01803{/SQL/execution/json,null,AVAILABLE}, o.s.j.s.ServletContextHandler@6569dded{/static/sql,null,null}] added {o.s.j.s.ServletContextHandler@6569dded{/static/sql,null,null},UNMANAGED}
17:14:00.236 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - ->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d,[o.s.j.s.ServletContextHandler@a1217f9{/,null,AVAILABLE}]}]
17:14:00.236 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/rdd->[{org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf,[o.s.j.s.ServletContextHandler@4985cbcb{/storage/rdd,null,AVAILABLE}]}]
17:14:00.236 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage->[{org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3,[o.s.j.s.ServletContextHandler@664a9613{/storage,null,AVAILABLE}]}]
17:14:00.236 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/rdd/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e,[o.s.j.s.ServletContextHandler@332a7fce{/storage/rdd/json,null,AVAILABLE}]}]
17:14:00.236 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - SQL/execution/json->[{o.s.j.s.ServletContextHandler@4fe01803{/SQL/execution/json,null,AVAILABLE},[o.s.j.s.ServletContextHandler@4fe01803{/SQL/execution/json,null,AVAILABLE}]}]
17:14:00.236 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - api->[{org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1,[o.s.j.s.ServletContextHandler@791cbf87{/api,null,AVAILABLE}]}]
17:14:00.236 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/pool/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7,[o.s.j.s.ServletContextHandler@2f7a7219{/stages/pool/json,null,AVAILABLE}]}]
17:14:00.236 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/pool->[{org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2,[o.s.j.s.ServletContextHandler@403132fc{/stages/pool,null,AVAILABLE}]}]
17:14:00.236 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148,[o.s.j.s.ServletContextHandler@303e3593{/jobs/json,null,AVAILABLE}]}]
17:14:00.236 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - static->[{org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2,[o.s.j.s.ServletContextHandler@b672aa8{/static,null,AVAILABLE}]}]
17:14:00.236 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a,[o.s.j.s.ServletContextHandler@209775a9{/executors/json,null,AVAILABLE}]}]
17:14:00.236 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39,[o.s.j.s.ServletContextHandler@7c041b41{/stages/stage/json,null,AVAILABLE}]}]
17:14:00.236 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/threadDump/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff,[o.s.j.s.ServletContextHandler@192f2f27{/executors/threadDump/json,null,AVAILABLE}]}]
17:14:00.237 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - environment/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546,[o.s.j.s.ServletContextHandler@6e9319f{/environment/json,null,AVAILABLE}]}]
17:14:00.237 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c,[o.s.j.s.ServletContextHandler@439a8f59{/jobs/job/json,null,AVAILABLE}]}]
17:14:00.237 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs->[{org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e,[o.s.j.s.ServletContextHandler@60bdf15d{/jobs,null,AVAILABLE}]}]
17:14:00.237 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b,[o.s.j.s.ServletContextHandler@64bc21ac{/stages/json,null,AVAILABLE}]}]
17:14:00.237 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage->[{org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b,[o.s.j.s.ServletContextHandler@9d157ff{/stages/stage,null,AVAILABLE}]}]
17:14:00.237 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - storage/json->[{org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999,[o.s.j.s.ServletContextHandler@7876d598{/storage/json,null,AVAILABLE}]}]
17:14:00.237 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - SQL->[{o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,AVAILABLE},[o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,AVAILABLE}]}]
17:14:00.237 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - static/sql->[{o.s.j.s.ServletContextHandler@6569dded{/static/sql,null,null},[o.s.j.s.ServletContextHandler@6569dded{/static/sql,null,null}]}]
17:14:00.237 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages/stage/kill->[{org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b,[o.s.j.s.ServletContextHandler@66ea1466{/stages/stage/kill,null,AVAILABLE}]}]
17:14:00.237 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job->[{org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4,[o.s.j.s.ServletContextHandler@3f23a3a0{/jobs/job,null,AVAILABLE}]}]
17:14:00.237 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - environment->[{org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a,[o.s.j.s.ServletContextHandler@293bb8a5{/environment,null,AVAILABLE}]}]
17:14:00.237 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - stages->[{org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b,[o.s.j.s.ServletContextHandler@593e824f{/stages,null,AVAILABLE}]}]
17:14:00.237 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors->[{org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920,[o.s.j.s.ServletContextHandler@33617539{/executors,null,AVAILABLE}]}]
17:14:00.237 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - SQL/json->[{o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,AVAILABLE},[o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,AVAILABLE}]}]
17:14:00.238 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - jobs/job/kill->[{org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a,[o.s.j.s.ServletContextHandler@4c36250e{/jobs/job/kill,null,AVAILABLE}]}]
17:14:00.238 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - metrics/json->[{o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,AVAILABLE},[o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,AVAILABLE}]}]
17:14:00.238 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - SQL/execution->[{o.s.j.s.ServletContextHandler@43b0ade{/SQL/execution,null,AVAILABLE},[o.s.j.s.ServletContextHandler@43b0ade{/SQL/execution,null,AVAILABLE}]}]
17:14:00.238 [main] DEBUG o.s.j.s.h.ContextHandlerCollection - executors/threadDump->[{org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a,[o.s.j.s.ServletContextHandler@6fefce9e{/executors/threadDump,null,AVAILABLE}]}]
17:14:00.238 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting o.s.j.s.ServletContextHandler@6569dded{/static/sql,null,null}
17:14:00.238 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting o.s.j.s.ServletContextHandler@6569dded{/static/sql,null,STARTING}
17:14:00.238 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.ServletHandler@466d49f0
17:14:00.238 [main] DEBUG o.s.jetty.servlet.ServletHandler - Chose path=/ mapped to servlet=org.spark_project.jetty.servlet.DefaultServlet-710d7aff from default=false
17:14:00.238 [main] DEBUG o.s.jetty.servlet.ServletHandler - filterNameMap={}
17:14:00.238 [main] DEBUG o.s.jetty.servlet.ServletHandler - pathFilters=null
17:14:00.238 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletFilterMap=null
17:14:00.238 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletPathMap={/=org.spark_project.jetty.servlet.DefaultServlet-710d7aff@e53f04e7==org.spark_project.jetty.servlet.DefaultServlet,-1,true}
17:14:00.238 [main] DEBUG o.s.jetty.servlet.ServletHandler - servletNameMap={org.spark_project.jetty.servlet.DefaultServlet-710d7aff=org.spark_project.jetty.servlet.DefaultServlet-710d7aff@e53f04e7==org.spark_project.jetty.servlet.DefaultServlet,-1,true}
17:14:00.239 [main] DEBUG o.s.j.server.handler.AbstractHandler - starting org.spark_project.jetty.servlet.ServletHandler@466d49f0
17:14:00.239 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @5433ms org.spark_project.jetty.servlet.ServletHandler@466d49f0
17:14:00.239 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - starting org.spark_project.jetty.servlet.DefaultServlet-710d7aff@e53f04e7==org.spark_project.jetty.servlet.DefaultServlet,-1,true
17:14:00.239 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @5433ms org.spark_project.jetty.servlet.DefaultServlet-710d7aff@e53f04e7==org.spark_project.jetty.servlet.DefaultServlet,-1,true
17:14:00.239 [main] DEBUG o.s.jetty.servlet.ServletHolder - Servlet.init org.spark_project.jetty.servlet.DefaultServlet@65327f5 for org.spark_project.jetty.servlet.DefaultServlet-710d7aff
17:14:00.239 [main] DEBUG o.s.jetty.servlet.DefaultServlet - resource base = jar:file:/home/aims/spark/jars/spark-sql_2.11-2.1.0.jar!/org/apache/spark/sql/execution/ui/static
17:14:00.239 [main] INFO o.s.j.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6569dded{/static/sql,null,AVAILABLE}
17:14:00.239 [main] DEBUG o.s.j.u.component.AbstractLifeCycle - STARTED @5434ms o.s.j.s.ServletContextHandler@6569dded{/static/sql,null,AVAILABLE}
17:14:00.447 [main] INFO org.apache.spark.sql.hive.HiveUtils - Initializing HiveMetastoreConnection version 1.2.1 using Spark classes.
17:14:00.459 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - Initializing the logger to avoid disaster...
17:14:00.479 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl - -1216048068
17:14:00.483 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.hive.client.HiveClient
17:14:00.483 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.internal.Logging
17:14:00.483 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.Object
17:14:00.484 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.Function0
17:14:00.484 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.collection.Seq
17:14:00.484 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.Throwable
17:14:00.488 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.runtime.NonLocalReturnControl
17:14:00.489 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.Exception
17:14:00.489 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.runtime.Nothing$
17:14:00.496 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.Option
17:14:00.496 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.None$
17:14:00.496 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.catalyst.parser.ParseException
17:14:00.498 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.Function1
17:14:00.498 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.SparkException
17:14:00.498 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.MatchError
17:14:00.498 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.collection.Map
17:14:00.501 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.Shim_v0_12 - -1239488159
17:14:00.503 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.Shim - -2042608167
17:14:00.505 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.Shim_v0_13 - -1844275366
17:14:00.507 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.Shim_v0_14 - -1855246717
17:14:00.510 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.Shim_v1_0 - -648535241
17:14:00.512 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.Shim_v1_1 - 1819942976
17:14:00.513 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.Shim_v1_2 - 1026546884
17:14:00.515 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.OutputStream - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/OutputStream.class
17:14:00.516 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.util.CircularBuffer
17:14:00.517 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.hive.client.package$HiveVersion
17:14:00.517 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.SparkConf
17:14:00.517 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.conf.Configuration
17:14:00.518 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.collection.immutable.Map
17:14:00.518 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.ClassLoader
17:14:00.519 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.hive.client.IsolatedClientLoader
17:14:00.519 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.hive.client.HiveClient$class
17:14:00.521 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.internal.Logging$class
17:14:00.522 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.util.CircularBuffer$
17:14:00.523 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.hive.client.package$hive$v12$
17:14:00.524 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.hive.client.package$hive$v13$
17:14:00.524 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.hive.client.package$hive$v14$
17:14:00.524 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.hive.client.package$hive$v1_0$
17:14:00.525 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.hive.client.package$hive$v1_1$
17:14:00.526 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.hive.client.package$hive$v1_2$
17:14:00.526 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.UnsupportedOperationException
17:14:00.526 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.AnalysisException
17:14:00.529 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.catalyst.analysis.NoSuchPermanentFunctionException
17:14:00.531 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.List - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/List.class
17:14:00.532 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.Some
17:14:00.532 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.reflect.InvocationTargetException
17:14:00.533 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.Collection - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/Collection.class
17:14:00.536 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.RuntimeException
17:14:00.537 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.CharSequence
17:14:00.537 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.PartialFunction
17:14:00.578 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.HiveConf - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/HiveConf.class
17:14:00.582 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.Thread
17:14:00.585 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.session.SessionState - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/session/SessionState.class
17:14:00.588 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.Map - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/Map.class
17:14:00.589 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.metadata.HiveException - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/metadata/HiveException.class
17:14:00.590 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.fs.FileSystem
17:14:00.591 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.fs.LocalFileSystem
17:14:00.595 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactory.class
17:14:00.596 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.AssertionError
17:14:00.597 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.IllegalArgumentException
17:14:00.597 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.IOException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/IOException.class
17:14:00.597 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.Set - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/Set.class
17:14:00.597 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.net.URLClassLoader
17:14:00.598 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.net.URISyntaxException
17:14:00.598 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.ClassNotFoundException
17:14:00.599 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.ThreadLocal
17:14:00.599 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.session.SessionState$1 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/session/SessionState$1.class
17:14:00.599 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.history.HiveHistory - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/history/HiveHistory.class
17:14:00.600 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.commons.logging.LogFactory - jar:file:/home/aims/SentimentAnalysis/target/sentiment-analysis-1.0.jar!/org/apache/commons/logging/LogFactory.class
17:14:00.603 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.security.PrivilegedAction - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/security/PrivilegedAction.class
17:14:00.603 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.UnsupportedEncodingException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/UnsupportedEncodingException.class
17:14:00.603 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.SecurityException
17:14:00.604 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.String
17:14:00.604 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.Reader - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/Reader.class
17:14:00.604 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.InputStreamReader - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/InputStreamReader.class
17:14:00.605 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.NoClassDefFoundError
17:14:00.605 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.FileOutputStream - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/FileOutputStream.class
17:14:00.605 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.Enumeration - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/Enumeration.class
17:14:00.606 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.LinkageError
17:14:00.606 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.ClassCastException
17:14:00.607 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.commons.logging.LogConfigurationException - jar:file:/home/aims/SentimentAnalysis/target/sentiment-analysis-1.0.jar!/org/apache/commons/logging/LogConfigurationException.class
17:14:00.607 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.NoSuchMethodException
17:14:00.607 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.IllegalAccessException
17:14:00.607 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.reflect.Method
17:14:00.608 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.Class
17:14:00.608 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.commons.logging.LogFactory$6 - jar:file:/home/aims/SentimentAnalysis/target/sentiment-analysis-1.0.jar!/org/apache/commons/logging/LogFactory$6.class
17:14:00.609 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.security.AccessController - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/security/AccessController.class
17:14:00.609 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.System
17:14:00.609 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.commons.logging.impl.WeakHashtable - jar:file:/home/aims/SentimentAnalysis/target/sentiment-analysis-1.0.jar!/org/apache/commons/logging/impl/WeakHashtable.class
17:14:00.610 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.Hashtable - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/Hashtable.class
17:14:00.610 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.NullPointerException
17:14:00.611 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.ref.ReferenceQueue
17:14:00.611 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.commons.logging.LogFactory$1 - jar:file:/home/aims/SentimentAnalysis/target/sentiment-analysis-1.0.jar!/org/apache/commons/logging/LogFactory$1.class
17:14:00.612 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.commons.logging.impl.WeakHashtable$Referenced - jar:file:/home/aims/SentimentAnalysis/target/sentiment-analysis-1.0.jar!/org/apache/commons/logging/impl/WeakHashtable$Referenced.class
17:14:00.612 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.ref.Reference
17:14:00.612 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.ref.WeakReference
17:14:00.613 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.commons.logging.impl.WeakHashtable$WeakKey - jar:file:/home/aims/SentimentAnalysis/target/sentiment-analysis-1.0.jar!/org/apache/commons/logging/impl/WeakHashtable$WeakKey.class
17:14:00.613 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.commons.logging.LogFactory$4 - jar:file:/home/aims/SentimentAnalysis/target/sentiment-analysis-1.0.jar!/org/apache/commons/logging/LogFactory$4.class
17:14:00.614 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.NoSuchMethodError
17:14:00.636 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.commons.logging.LogFactory$3 - jar:file:/home/aims/SentimentAnalysis/target/sentiment-analysis-1.0.jar!/org/apache/commons/logging/LogFactory$3.class
17:14:00.637 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.InputStream - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/InputStream.class
17:14:00.637 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.BufferedReader - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/BufferedReader.class
17:14:00.638 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.commons.logging.LogFactory$2 - jar:file:/home/aims/SentimentAnalysis/target/sentiment-analysis-1.0.jar!/org/apache/commons/logging/LogFactory$2.class
17:14:00.638 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.commons.logging.impl.SLF4JLogFactory - jar:file:/home/aims/SentimentAnalysis/target/sentiment-analysis-1.0.jar!/org/apache/commons/logging/impl/SLF4JLogFactory.class
17:14:00.639 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.commons.logging.impl.SLF4JLocationAwareLog - jar:file:/home/aims/SentimentAnalysis/target/sentiment-analysis-1.0.jar!/org/apache/commons/logging/impl/SLF4JLocationAwareLog.class
17:14:00.640 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.commons.logging.Log - jar:file:/home/aims/SentimentAnalysis/target/sentiment-analysis-1.0.jar!/org/apache/commons/logging/Log.class
17:14:00.640 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.Serializable - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/Serializable.class
17:14:00.640 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.commons.logging.impl.SLF4JLog - jar:file:/home/aims/SentimentAnalysis/target/sentiment-analysis-1.0.jar!/org/apache/commons/logging/impl/SLF4JLog.class
17:14:00.641 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.concurrent.ConcurrentMap - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/concurrent/ConcurrentMap.class
17:14:00.641 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.concurrent.ConcurrentHashMap - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/concurrent/ConcurrentHashMap.class
17:14:00.642 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.slf4j.LoggerFactory
17:14:00.642 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.slf4j.spi.LocationAwareLogger
17:14:00.643 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.session.SessionState$SessionStates - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/session/SessionState$SessionStates.class
17:14:00.644 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.ByteArrayOutputStream - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/ByteArrayOutputStream.class
17:14:00.644 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.LoopingByteArrayInputStream - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/LoopingByteArrayInputStream.class
17:14:00.645 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: javax.security.auth.login.LoginException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/javax/security/auth/login/LoginException.class
17:14:00.647 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.HashMap - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/HashMap.class
17:14:00.648 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.HiveConf$ConfVars - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/HiveConf$ConfVars.class
17:14:00.654 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.Enum
17:14:00.656 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.Validator - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/Validator.class
17:14:00.670 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.HiveConf$ConfVars$VarType - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/HiveConf$ConfVars$VarType.class
17:14:00.678 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.HiveConf$ConfVars$VarType$1 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/HiveConf$ConfVars$VarType$1.class
17:14:00.679 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.HiveConf$ConfVars$VarType$2 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/HiveConf$ConfVars$VarType$2.class
17:14:00.682 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.HiveConf$ConfVars$VarType$3 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/HiveConf$ConfVars$VarType$3.class
17:14:00.683 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.HiveConf$ConfVars$VarType$4 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/HiveConf$ConfVars$VarType$4.class
17:14:00.684 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.HiveConf$ConfVars$VarType$5 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/HiveConf$ConfVars$VarType$5.class
17:14:00.686 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.SystemVariables - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/SystemVariables.class
17:14:00.687 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.IllegalStateException
17:14:00.688 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.regex.Pattern - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/regex/Pattern.class
17:14:00.688 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.StringBuilder
17:14:00.688 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.regex.Matcher - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/regex/Matcher.class
17:14:00.689 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.File - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/File.class
17:14:00.689 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.Boolean
17:14:00.689 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.Integer
17:14:00.689 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.Long
17:14:00.689 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.Float
17:14:00.690 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.util.Shell
17:14:00.702 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.shims.ShimLoader - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/shims/ShimLoader.class
17:14:00.705 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.shims.HadoopShims - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/shims/HadoopShims.class
17:14:00.706 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.util.VersionInfo
17:14:00.707 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.shims.Hadoop23Shims - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/shims/Hadoop23Shims.class
17:14:00.710 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.shims.HadoopShimsSecure - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/shims/HadoopShimsSecure.class
17:14:00.712 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.shims.HadoopShims$MiniMrShim - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/shims/HadoopShims$MiniMrShim.class
17:14:00.713 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.shims.Hadoop23Shims$MiniMrShim - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/shims/Hadoop23Shims$MiniMrShim.class
17:14:00.714 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.shims.Hadoop23Shims$MiniTezShim - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/shims/Hadoop23Shims$MiniTezShim.class
17:14:00.717 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.shims.Hadoop23Shims$MiniSparkShim - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/shims/Hadoop23Shims$MiniSparkShim.class
17:14:00.719 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.shims.HadoopShims$MiniDFSShim - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/shims/HadoopShims$MiniDFSShim.class
17:14:00.720 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.shims.HadoopShims$CombineFileInputFormatShim - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/shims/HadoopShims$CombineFileInputFormatShim.class
17:14:00.720 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.mapreduce.TaskAttemptContext
17:14:00.720 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.mapreduce.JobContext
17:14:00.720 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.Comparator - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/Comparator.class
17:14:00.721 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.shims.HadoopShims$HdfsFileStatus - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/shims/HadoopShims$HdfsFileStatus.class
17:14:00.721 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.Iterable
17:14:00.722 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.shims.HadoopShims$HCatHadoopShims - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/shims/HadoopShims$HCatHadoopShims.class
17:14:00.722 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.shims.HadoopShims$WebHCatJTShim - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/shims/HadoopShims$WebHCatJTShim.class
17:14:00.724 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.shims.Hadoop23Shims$ProxyFileSystem23 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/shims/Hadoop23Shims$ProxyFileSystem23.class
17:14:00.726 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.fs.ProxyFileSystem
17:14:00.726 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.shims.HadoopShims$StoragePolicyShim - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/shims/HadoopShims$StoragePolicyShim.class
17:14:00.729 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.shims.HadoopShims$KerberosNameShim - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/shims/HadoopShims$KerberosNameShim.class
17:14:00.731 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.shims.HadoopShims$HdfsEncryptionShim - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/shims/HadoopShims$HdfsEncryptionShim.class
17:14:00.732 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: com.google.common.base.Predicate
17:14:00.735 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.security.AccessControlException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/security/AccessControlException.class
17:14:00.736 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.fs.Path
17:14:00.737 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.fs.permission.FsAction
17:14:00.737 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.fs.CacheFlag
17:14:00.737 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.hdfs.protocol.BlockStoragePolicy
17:14:00.739 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.Validator$TimeValidator - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/Validator$TimeValidator.class
17:14:00.740 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.concurrent.TimeUnit - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/concurrent/TimeUnit.class
17:14:00.745 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.Validator$StringSet - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/Validator$StringSet.class
17:14:00.746 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.LinkedHashSet - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/LinkedHashSet.class
17:14:00.755 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.Validator$RatioValidator - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/Validator$RatioValidator.class
17:14:00.755 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.NumberFormatException
17:14:00.757 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.Validator$PatternSet - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/Validator$PatternSet.class
17:14:00.761 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.ArrayList - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/ArrayList.class
17:14:00.766 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.Validator$RangeValidator - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/Validator$RangeValidator.class
17:14:00.767 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.Validator$TYPE - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/Validator$TYPE.class
17:14:00.768 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.Validator$TYPE$1 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/Validator$TYPE$1.class
17:14:00.771 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.Validator$TYPE$2 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/Validator$TYPE$2.class
17:14:00.772 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.Validator$TYPE$3 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/Validator$TYPE$3.class
17:14:00.777 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hive.common.HiveCompat - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hive/common/HiveCompat.class
17:14:00.779 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hive.common.HiveCompat$CompatLevel - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hive/common/HiveCompat$CompatLevel.class
17:14:00.792 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.mapred.JobConf
17:14:00.800 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.jaxp.DocumentBuilderFactoryImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/jaxp/DocumentBuilderFactoryImpl.class
17:14:00.806 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: javax.xml.parsers.DocumentBuilderFactory - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/javax/xml/parsers/DocumentBuilderFactory.class
17:14:00.806 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.xml.sax.SAXException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/xml/sax/SAXException.class
17:14:00.806 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: javax.xml.parsers.ParserConfigurationException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/javax/xml/parsers/ParserConfigurationException.class
17:14:00.806 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: javax.xml.parsers.DocumentBuilder - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/javax/xml/parsers/DocumentBuilder.class
17:14:00.807 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.jaxp.DocumentBuilderImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/jaxp/DocumentBuilderImpl.class
17:14:00.808 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.jaxp.JAXPConstants - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/jaxp/JAXPConstants.class
17:14:00.809 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.xml.sax.SAXNotRecognizedException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/xml/sax/SAXNotRecognizedException.class
17:14:00.809 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.xml.sax.SAXNotSupportedException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/xml/sax/SAXNotSupportedException.class
17:14:00.810 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.parsers.AbstractDOMParser - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/parsers/AbstractDOMParser.class
17:14:00.812 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.parsers.AbstractXMLDocumentParser - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/parsers/AbstractXMLDocumentParser.class
17:14:00.813 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.XMLDocumentHandler - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/XMLDocumentHandler.class
17:14:00.814 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.XMLDTDHandler - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/XMLDTDHandler.class
17:14:00.815 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.XMLDTDContentModelHandler - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/XMLDTDContentModelHandler.class
17:14:00.816 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.parsers.XMLParser - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/parsers/XMLParser.class
17:14:00.817 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.parsers.DOMParser - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/parsers/DOMParser.class
17:14:00.817 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.xml.sax.ErrorHandler - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/xml/sax/ErrorHandler.class
17:14:00.818 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.parser.XMLDTDSource - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/parser/XMLDTDSource.class
17:14:00.819 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.parser.XMLComponentManager - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/parser/XMLComponentManager.class
17:14:00.820 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.xs.XMLSchemaValidator - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/xs/XMLSchemaValidator.class
17:14:00.823 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.parser.XMLComponent - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/parser/XMLComponent.class
17:14:00.824 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.parser.XMLDocumentFilter - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/parser/XMLDocumentFilter.class
17:14:00.826 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.parser.XMLDocumentSource - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/parser/XMLDocumentSource.class
17:14:00.829 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.xs.identity.FieldActivator - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/xs/identity/FieldActivator.class
17:14:00.830 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.RevalidationHandler - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/RevalidationHandler.class
17:14:00.832 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.jaxp.JAXPValidatorComponent - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/jaxp/JAXPValidatorComponent.class
17:14:00.833 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.jaxp.TeeXMLDocumentFilterImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/jaxp/TeeXMLDocumentFilterImpl.class
17:14:00.835 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.parser.XMLConfigurationException - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/parser/XMLConfigurationException.class
17:14:00.835 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.XNIException - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/XNIException.class
17:14:00.836 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.Document - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/Document.class
17:14:00.836 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.Vector - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/Vector.class
17:14:00.836 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.Stack - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/Stack.class
17:14:00.837 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.parsers.AbstractDOMParser$1 - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/parsers/AbstractDOMParser$1.class
17:14:00.837 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.CharacterData - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/CharacterData.class
17:14:00.837 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.Node - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/Node.class
17:14:00.839 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.CoreDocumentImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/CoreDocumentImpl.class
17:14:00.841 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.ParentNode - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/ParentNode.class
17:14:00.843 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.ChildNode - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/ChildNode.class
17:14:00.844 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.NodeImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/NodeImpl.class
17:14:00.845 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.NodeList - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/NodeList.class
17:14:00.845 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.events.EventTarget - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/events/EventTarget.class
17:14:00.845 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.Cloneable
17:14:00.848 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.DeferredDocumentImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/DeferredDocumentImpl.class
17:14:00.850 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.DeferredNode - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/DeferredNode.class
17:14:00.851 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.DocumentImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/DocumentImpl.class
17:14:00.853 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.traversal.DocumentTraversal - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/traversal/DocumentTraversal.class
17:14:00.853 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.events.DocumentEvent - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/events/DocumentEvent.class
17:14:00.853 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.ranges.DocumentRange - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/ranges/DocumentRange.class
17:14:00.853 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.DOMError - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/DOMError.class
17:14:00.854 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.EntityImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/EntityImpl.class
17:14:00.855 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.Entity - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/Entity.class
17:14:00.856 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xs.ItemPSVI - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xs/ItemPSVI.class
17:14:00.857 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xs.XSSimpleTypeDefinition - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xs/XSSimpleTypeDefinition.class
17:14:00.859 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xs.XSTypeDefinition - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xs/XSTypeDefinition.class
17:14:00.859 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xs.XSObject - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xs/XSObject.class
17:14:00.862 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.EntityReferenceImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/EntityReferenceImpl.class
17:14:00.863 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.EntityReference - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/EntityReference.class
17:14:00.864 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.parser.XMLParseException - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/parser/XMLParseException.class
17:14:00.865 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.xml.sax.Locator - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/xml/sax/Locator.class
17:14:00.867 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.xml.sax.SAXParseException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/xml/sax/SAXParseException.class
17:14:00.868 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.xml.sax.EntityResolver - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/xml/sax/EntityResolver.class
17:14:00.868 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.parser.XMLEntityResolver - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/parser/XMLEntityResolver.class
17:14:00.870 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.parser.XMLErrorHandler - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/parser/XMLErrorHandler.class
17:14:00.873 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.parsers.ObjectFactory - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/parsers/ObjectFactory.class
17:14:00.874 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.FileInputStream - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/FileInputStream.class
17:14:00.874 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.Properties - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/Properties.class
17:14:00.875 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.parsers.ObjectFactory$ConfigurationError - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/parsers/ObjectFactory$ConfigurationError.class
17:14:00.879 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.Error
17:14:00.880 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.parsers.SecuritySupport - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/parsers/SecuritySupport.class
17:14:00.881 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.security.PrivilegedActionException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/security/PrivilegedActionException.class
17:14:00.881 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.security.PrivilegedExceptionAction - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/security/PrivilegedExceptionAction.class
17:14:00.882 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.FileNotFoundException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/FileNotFoundException.class
17:14:00.884 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.parsers.SecuritySupport$1 - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/parsers/SecuritySupport$1.class
17:14:00.885 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.parsers.SecuritySupport$2 - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/parsers/SecuritySupport$2.class
17:14:00.886 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.parsers.SecuritySupport$3 - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/parsers/SecuritySupport$3.class
17:14:00.887 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.parsers.SecuritySupport$4 - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/parsers/SecuritySupport$4.class
17:14:00.889 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.StringBuffer
17:14:00.890 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.parsers.SecuritySupport$7 - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/parsers/SecuritySupport$7.class
17:14:00.892 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.parsers.SecuritySupport$6 - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/parsers/SecuritySupport$6.class
17:14:00.895 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.parsers.XIncludeAwareParserConfiguration - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/parsers/XIncludeAwareParserConfiguration.class
17:14:00.899 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.parsers.XML11Configuration - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/parsers/XML11Configuration.class
17:14:00.900 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.parser.XMLPullParserConfiguration - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/parser/XMLPullParserConfiguration.class
17:14:00.904 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.parser.XMLParserConfiguration - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/parser/XMLParserConfiguration.class
17:14:00.908 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.parsers.XML11Configurable - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/parsers/XML11Configurable.class
17:14:00.908 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.ParserConfigurationSettings - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/ParserConfigurationSettings.class
17:14:00.911 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.XMLLocator - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/XMLLocator.class
17:14:00.912 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.parser.XMLDTDScanner - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/parser/XMLDTDScanner.class
17:14:00.912 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.parser.XMLDTDContentModelSource - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/parser/XMLDTDContentModelSource.class
17:14:00.916 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dtd.XMLDTDValidator - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dtd/XMLDTDValidator.class
17:14:00.917 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dtd.XMLDTDValidatorFilter - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dtd/XMLDTDValidatorFilter.class
17:14:00.919 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dtd.XMLNSDTDValidator - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dtd/XMLNSDTDValidator.class
17:14:00.925 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.MessageFormatter - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/MessageFormatter.class
17:14:00.927 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLDTDScannerImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLDTDScannerImpl.class
17:14:00.933 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLEntityHandler - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLEntityHandler.class
17:14:00.935 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLScanner - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLScanner.class
17:14:00.949 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XML11DTDScannerImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XML11DTDScannerImpl.class
17:14:00.951 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dtd.XMLDTDProcessor - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dtd/XMLDTDProcessor.class
17:14:00.953 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.parser.XMLDTDFilter - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/parser/XMLDTDFilter.class
17:14:00.956 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.parser.XMLDTDContentModelFilter - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/parser/XMLDTDContentModelFilter.class
17:14:00.964 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dtd.XML11DTDProcessor - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dtd/XML11DTDProcessor.class
17:14:00.965 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dtd.XMLDTDLoader - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dtd/XMLDTDLoader.class
17:14:00.966 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.grammars.XMLGrammarLoader - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/grammars/XMLGrammarLoader.class
17:14:00.970 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.parser.XMLDocumentScanner - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/parser/XMLDocumentScanner.class
17:14:00.971 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLDocumentFragmentScannerImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLDocumentFragmentScannerImpl.class
17:14:00.977 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XML11NSDocumentScannerImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XML11NSDocumentScannerImpl.class
17:14:00.979 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XML11DocumentScannerImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XML11DocumentScannerImpl.class
17:14:00.983 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLDocumentScannerImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLDocumentScannerImpl.class
17:14:00.985 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dtd.XML11NSDTDValidator - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dtd/XML11NSDTDValidator.class
17:14:00.986 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dtd.XML11DTDValidator - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dtd/XML11DTDValidator.class
17:14:00.988 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLNSDocumentScannerImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLNSDocumentScannerImpl.class
17:14:00.991 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.NamespaceContext - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/NamespaceContext.class
17:14:00.992 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.SymbolTable - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/SymbolTable.class
17:14:00.993 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.grammars.XMLGrammarPool - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/grammars/XMLGrammarPool.class
17:14:00.995 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.SymbolTable$Entry - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/SymbolTable$Entry.class
17:14:00.996 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLEntityManager - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLEntityManager.class
17:14:00.999 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.Augmentations - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/Augmentations.class
17:14:01.001 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLEntityScanner - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLEntityScanner.class
17:14:01.003 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XML11EntityScanner - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XML11EntityScanner.class
17:14:01.004 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.URI$MalformedURIException - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/URI$MalformedURIException.class
17:14:01.005 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLEntityManager$RewindableInputStream - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLEntityManager$RewindableInputStream.class
17:14:01.007 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.XMLResourceIdentifier - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/XMLResourceIdentifier.class
17:14:01.007 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.net.URLConnection
17:14:01.007 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.net.HttpURLConnection
17:14:01.008 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLEntityManager$Entity - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLEntityManager$Entity.class
17:14:01.009 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLEntityManager$ScannedEntity - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLEntityManager$ScannedEntity.class
17:14:01.010 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.StringReader - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/StringReader.class
17:14:01.011 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLEntityManager$ExternalEntity - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLEntityManager$ExternalEntity.class
17:14:01.012 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLEntityManager$InternalEntity - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLEntityManager$InternalEntity.class
17:14:01.013 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.io.UTF8Reader - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/io/UTF8Reader.class
17:14:01.018 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.io.UCSReader - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/io/UCSReader.class
17:14:01.019 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.io.Latin1Reader - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/io/Latin1Reader.class
17:14:01.021 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.io.ASCIIReader - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/io/ASCIIReader.class
17:14:01.025 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLEntityManager$1 - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLEntityManager$1.class
17:14:01.026 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.XMLResourceIdentifierImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/XMLResourceIdentifierImpl.class
17:14:01.027 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.AugmentationsImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/AugmentationsImpl.class
17:14:01.029 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.AugmentationsImpl$AugmentationsItemsContainer - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/AugmentationsImpl$AugmentationsItemsContainer.class
17:14:01.030 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.AugmentationsImpl$SmallContainer - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/AugmentationsImpl$SmallContainer.class
17:14:01.031 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.AugmentationsImpl$LargeContainer - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/AugmentationsImpl$LargeContainer.class
17:14:01.033 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLEntityManager$ByteBufferPool - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLEntityManager$ByteBufferPool.class
17:14:01.034 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLEntityManager$CharacterBufferPool - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLEntityManager$CharacterBufferPool.class
17:14:01.039 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLEntityManager$CharacterBuffer - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLEntityManager$CharacterBuffer.class
17:14:01.040 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.EOFException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/EOFException.class
17:14:01.040 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLEntityScanner$1 - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLEntityScanner$1.class
17:14:01.044 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLErrorReporter - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLErrorReporter.class
17:14:01.047 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.XMLString - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/XMLString.class
17:14:01.048 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.XMLStringBuffer - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/XMLStringBuffer.class
17:14:01.053 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.XMLAttributes - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/XMLAttributes.class
17:14:01.058 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLDocumentFragmentScannerImpl$Dispatcher - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLDocumentFragmentScannerImpl$Dispatcher.class
17:14:01.061 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dtd.XMLDTDDescription - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dtd/XMLDTDDescription.class
17:14:01.062 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.grammars.XMLDTDDescription - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/grammars/XMLDTDDescription.class
17:14:01.063 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.grammars.XMLGrammarDescription - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/grammars/XMLGrammarDescription.class
17:14:01.067 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLDocumentFragmentScannerImpl$ElementStack - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLDocumentFragmentScannerImpl$ElementStack.class
17:14:01.068 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.QName - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/QName.class
17:14:01.071 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLNSDocumentScannerImpl$NSContentDispatcher - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLNSDocumentScannerImpl$NSContentDispatcher.class
17:14:01.073 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLDocumentScannerImpl$ContentDispatcher - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLDocumentScannerImpl$ContentDispatcher.class
17:14:01.074 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLDocumentFragmentScannerImpl$FragmentContentDispatcher - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLDocumentFragmentScannerImpl$FragmentContentDispatcher.class
17:14:01.075 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.CharConversionException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/CharConversionException.class
17:14:01.075 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.io.MalformedByteSequenceException - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/io/MalformedByteSequenceException.class
17:14:01.078 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.XMLAttributesImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/XMLAttributesImpl.class
17:14:01.080 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.XMLAttributesImpl$Attribute - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/XMLAttributesImpl$Attribute.class
17:14:01.081 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.NamespaceSupport - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/NamespaceSupport.class
17:14:01.084 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLDocumentScannerImpl$XMLDeclDispatcher - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLDocumentScannerImpl$XMLDeclDispatcher.class
17:14:01.088 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLDocumentScannerImpl$PrologDispatcher - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLDocumentScannerImpl$PrologDispatcher.class
17:14:01.091 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLDocumentScannerImpl$DTDDispatcher - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLDocumentScannerImpl$DTDDispatcher.class
17:14:01.101 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLDocumentScannerImpl$TrailingMiscDispatcher - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLDocumentScannerImpl$TrailingMiscDispatcher.class
17:14:01.113 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dtd.XMLEntityDecl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dtd/XMLEntityDecl.class
17:14:01.115 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.InvalidDatatypeValueException - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/InvalidDatatypeValueException.class
17:14:01.116 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.DatatypeException - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/DatatypeException.class
17:14:01.117 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.ValidationContext - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/ValidationContext.class
17:14:01.120 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.validation.EntityState - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/validation/EntityState.class
17:14:01.123 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dtd.DTDGrammar - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dtd/DTDGrammar.class
17:14:01.128 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.grammars.Grammar - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/grammars/Grammar.class
17:14:01.129 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dtd.BalancedDTDGrammar - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dtd/BalancedDTDGrammar.class
17:14:01.131 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.validation.ValidationState - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/validation/ValidationState.class
17:14:01.134 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dtd.XMLElementDecl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dtd/XMLElementDecl.class
17:14:01.143 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dtd.XMLSimpleType - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dtd/XMLSimpleType.class
17:14:01.144 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dtd.XMLAttributeDecl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dtd/XMLAttributeDecl.class
17:14:01.147 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dtd.DTDGrammarBucket - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dtd/DTDGrammarBucket.class
17:14:01.148 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.DTDDVFactory - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/DTDDVFactory.class
17:14:01.149 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.DVFactoryException - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/DVFactoryException.class
17:14:01.150 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.ObjectFactory - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/ObjectFactory.class
17:14:01.206 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.ObjectFactory$ConfigurationError - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/ObjectFactory$ConfigurationError.class
17:14:01.208 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.SecuritySupport - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/SecuritySupport.class
17:14:01.209 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.SecuritySupport$1 - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/SecuritySupport$1.class
17:14:01.210 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.SecuritySupport$2 - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/SecuritySupport$2.class
17:14:01.212 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.SecuritySupport$3 - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/SecuritySupport$3.class
17:14:01.213 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.dtd.DTDDVFactoryImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/dtd/DTDDVFactoryImpl.class
17:14:01.214 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.DatatypeValidator - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/DatatypeValidator.class
17:14:01.216 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.dtd.StringDatatypeValidator - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/dtd/StringDatatypeValidator.class
17:14:01.217 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.dtd.IDDatatypeValidator - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/dtd/IDDatatypeValidator.class
17:14:01.218 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.dtd.IDREFDatatypeValidator - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/dtd/IDREFDatatypeValidator.class
17:14:01.219 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.dtd.ListDatatypeValidator - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/dtd/ListDatatypeValidator.class
17:14:01.220 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.dtd.ENTITYDatatypeValidator - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/dtd/ENTITYDatatypeValidator.class
17:14:01.221 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.dtd.NOTATIONDatatypeValidator - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/dtd/NOTATIONDatatypeValidator.class
17:14:01.222 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.dv.dtd.NMTOKENDatatypeValidator - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/dv/dtd/NMTOKENDatatypeValidator.class
17:14:01.223 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.validation.ValidationManager - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/validation/ValidationManager.class
17:14:01.224 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.XMLVersionDetector - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/XMLVersionDetector.class
17:14:01.226 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.msg.XMLMessageFormatter - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/msg/XMLMessageFormatter.class
17:14:01.227 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.MissingResourceException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/MissingResourceException.class
17:14:01.227 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.Locale - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/Locale.class
17:14:01.229 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xni.parser.XMLInputSource - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xni/parser/XMLInputSource.class
17:14:01.230 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.xml.sax.InputSource - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/xml/sax/InputSource.class
17:14:01.230 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.URI - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/URI.class
17:14:01.234 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.XMLChar - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/XMLChar.class
17:14:01.236 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.Arrays - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/Arrays.class
17:14:01.237 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xinclude.XIncludeHandler - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xinclude/XIncludeHandler.class
17:14:01.239 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.SecurityManager - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/SecurityManager.class
17:14:01.240 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.HTTPInputSource - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/HTTPInputSource.class
17:14:01.242 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xinclude.MultipleScopeNamespaceSupport - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xinclude/MultipleScopeNamespaceSupport.class
17:14:01.242 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xinclude.XIncludeNamespaceSupport - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xinclude/XIncludeNamespaceSupport.class
17:14:01.243 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xinclude.XIncludeHandler$Notation - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xinclude/XIncludeHandler$Notation.class
17:14:01.244 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xinclude.XIncludeHandler$UnparsedEntity - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xinclude/XIncludeHandler$UnparsedEntity.class
17:14:01.246 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xpointer.XPointerProcessor - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xpointer/XPointerProcessor.class
17:14:01.247 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xpointer.XPointerHandler - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xpointer/XPointerHandler.class
17:14:01.248 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xinclude.XIncludeTextReader - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xinclude/XIncludeTextReader.class
17:14:01.249 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xinclude.XInclude11TextReader - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xinclude/XInclude11TextReader.class
17:14:01.252 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.XMLSymbols - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/XMLSymbols.class
17:14:01.253 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.XMLLocatorWrapper - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/XMLLocatorWrapper.class
17:14:01.254 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.xinclude.XIncludeMessageFormatter - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/xinclude/XIncludeMessageFormatter.class
17:14:01.255 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.util.IntStack - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/util/IntStack.class
17:14:01.256 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.Constants - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/Constants.class
17:14:01.258 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.impl.Constants$ArrayEnumeration - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/impl/Constants$ArrayEnumeration.class
17:14:01.261 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.NoSuchElementException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/NoSuchElementException.class
17:14:01.264 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.ElementImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/ElementImpl.class
17:14:01.264 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.Element - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/Element.class
17:14:01.264 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.TypeInfo - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/TypeInfo.class
17:14:01.266 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.DOMException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/DOMException.class
17:14:01.266 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.CloneNotSupportedException
17:14:01.269 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.DocumentTypeImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/DocumentTypeImpl.class
17:14:01.270 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.DocumentType - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/DocumentType.class
17:14:01.270 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.Attr - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/Attr.class
17:14:01.270 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.Notation - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/Notation.class
17:14:01.271 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.CDATASection - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/CDATASection.class
17:14:01.271 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.ProcessingInstruction - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/ProcessingInstruction.class
17:14:01.271 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.Comment - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/Comment.class
17:14:01.272 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.Text - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/Text.class
17:14:01.273 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.ElementNSImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/ElementNSImpl.class
17:14:01.276 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.AttrImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/AttrImpl.class
17:14:01.278 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.AttrNSImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/AttrNSImpl.class
17:14:01.280 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.DOMConfiguration - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/DOMConfiguration.class
17:14:01.281 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.DocumentFragment - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/DocumentFragment.class
17:14:01.282 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.events.Event - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/events/Event.class
17:14:01.282 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.events.MutationEvent - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/events/MutationEvent.class
17:14:01.283 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.ranges.Range - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/ranges/Range.class
17:14:01.283 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.traversal.TreeWalker - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/traversal/TreeWalker.class
17:14:01.283 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.traversal.NodeIterator - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/traversal/NodeIterator.class
17:14:01.284 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.events.EventException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/events/EventException.class
17:14:01.286 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.DeferredAttrNSImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/DeferredAttrNSImpl.class
17:14:01.287 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.DeferredAttrImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/DeferredAttrImpl.class
17:14:01.289 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.DeferredCDATASectionImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/DeferredCDATASectionImpl.class
17:14:01.290 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.CDATASectionImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/CDATASectionImpl.class
17:14:01.291 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.TextImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/TextImpl.class
17:14:01.293 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.CharacterDataImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/CharacterDataImpl.class
17:14:01.297 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.DeferredCommentImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/DeferredCommentImpl.class
17:14:01.298 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.CommentImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/CommentImpl.class
17:14:01.299 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.DeferredDocumentTypeImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/DeferredDocumentTypeImpl.class
17:14:01.300 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.DeferredElementNSImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/DeferredElementNSImpl.class
17:14:01.301 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.DeferredElementImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/DeferredElementImpl.class
17:14:01.302 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.DeferredEntityImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/DeferredEntityImpl.class
17:14:01.303 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.DeferredEntityReferenceImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/DeferredEntityReferenceImpl.class
17:14:01.303 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.DeferredNotationImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/DeferredNotationImpl.class
17:14:01.304 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.NotationImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/NotationImpl.class
17:14:01.305 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.DeferredProcessingInstructionImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/DeferredProcessingInstructionImpl.class
17:14:01.306 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.ProcessingInstructionImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/ProcessingInstructionImpl.class
17:14:01.307 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.DeferredTextImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/DeferredTextImpl.class
17:14:01.307 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.DeferredElementDefinitionImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/DeferredElementDefinitionImpl.class
17:14:01.308 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.ElementDefinitionImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/ElementDefinitionImpl.class
17:14:01.318 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.DeferredDocumentImpl$RefCount - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/DeferredDocumentImpl$RefCount.class
17:14:01.384 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.w3c.dom.NamedNodeMap - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/org/w3c/dom/NamedNodeMap.class
17:14:01.384 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.NamedNodeMapImpl - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/NamedNodeMapImpl.class
17:14:01.385 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.AttributeMap - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/AttributeMap.class
17:14:01.389 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.StringIndexOutOfBoundsException
17:14:01.390 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.CharacterDataImpl$1 - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/CharacterDataImpl$1.class
17:14:01.394 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.xerces.dom.NodeListCache - jar:file:/home/aims/spark/jars/xercesImpl-2.9.1.jar!/org/apache/xerces/dom/NodeListCache.class
17:14:01.519 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.Iterator - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/Iterator.class
17:14:01.519 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.Map$Entry - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/Map$Entry.class
17:14:01.525 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for all properties in config...
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.reducers.bytes.per.reducer
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.security.authorization.createtable.group.grants
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for datanucleus.storeManagerType
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.aux.jars.path
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.stagingdir
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.merge.rcfile.block.level
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for mapreduce.input.fileinputformat.split.minsize.per.rack
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.default.partition.name
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.event.expiry.duration
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.orc.default.compress
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.mode.local.auto.input.files.max
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.key.prefix
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.hadoop.supports.splittable.combineinputformat
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.vectorized.execution.mapjoin.native.fast.hashtable.enabled
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.skewjoin.compiletime
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.smbjoin.cache.rows
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.vectorized.execution.mapjoin.overflow.repeated.threshold
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.tez.log.level
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.merge.mapfiles
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.post.hooks
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.client.socket.lifetime
17:14:01.526 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for fs.har.impl
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.aggregate.stats.cache.max.variance
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for mapreduce.input.fileinputformat.split.minsize
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.zookeeper.quorum
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for stream.stderr.reporter.prefix
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for mapreduce.reduce.speculative
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.mapjoin.hybridgrace.memcheckfrequency
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.index.filter.compact.maxsize
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.counters.pull.interval
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.security.command.whitelist
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.end.function.listeners
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.downloaded.resources.dir
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.join.emit.interval
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.orc.zerocopy
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.compute.query.using.stats
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.orc.block.padding.tolerance
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.lazysimple.extended_boolean_literal
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.error.on.empty.partition
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.orc.splits.include.file.footer
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.prewarm.enabled
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hadoop.bin.path
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.io.rcfile.record.buffer.size
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.default.rcfile.serde
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.multi.insert.move.tasks.share.dependencies
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.security.authorization.createtable.owner.grants
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.users.in.admin.role
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.autogen.columnalias.prefix.includefuncname
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.tez.max.partition.factor
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.thrift.port
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.orc.cache.stripe.details.size
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.max.created.files
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for mapreduce.job.committer.task.cleanup.needed
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.cli.prompt
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for mapreduce.input.fileinputformat.input.dir.recursive
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.deserialization.factor
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metadata.export.location
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.log.explain.output
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.skewjoin
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.default.fileformat
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.mapjoin.optimized.hashtable.wbsize
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.security.metastore.authorization.auth.reads
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for javax.jdo.option.NonTransactionalRead
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.remove.identity.project
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.infer.bucket.sort.num.buckets.power.two
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.compactor.worker.threads
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exim.strict.repl.tables
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.collect.tablekeys
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.spark.client.future.timeout
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.display.partition.cols.separately
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.async.exec.shutdown.timeout
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.disallow.incompatible.col.type.changes
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.thrift.http.max.idle.time
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.test.dummystats.aggregator
17:14:01.527 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.querylog.enable.plan.progress
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.test.mode
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.thrift.http.cookie.auth.enabled
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.thrift.http.worker.keepalive.time
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.archive.intermediate.archived
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.warehouse.dir
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.hwi.listen.host
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.collect.scancols
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.hwi.war.file
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.tez.input.format
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.test.dummystats.publisher
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.thrift.http.cookie.is.httponly
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.uris
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.querylog.location
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.localize.resource.num.wait.attempts
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.orc.default.stripe.size
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.querylog.plan.progress.interval
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.limit.optimize.enable
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.job.debug.timeout
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.security.authorization.createtable.role.grants
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.decode.partition.name
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.partition.inherit.table.properties
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.cluster.delegation.token.store.class
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for datanucleus.autoStartMechanismMode
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.zookeeper.client.port
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.thrift.http.cookie.max.age
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.orc.default.row.index.stride
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.alias
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.thrift.exponential.backoff.slot.length
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.vectorized.execution.mapjoin.native.enabled
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.tez.default.queues
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.compat
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.mapred.partitioner
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.merge.smallfiles.avgsize
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.hbase.wal.enabled
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.entity.capture.transform
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.index.blockfilter.file
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.logging.operation.enabled
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.lockmgr.zookeeper.default.partition.name
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.support.concurrency
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.file.max.footer
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.test.mode.prefix
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.cli.print.header
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.table.type.mapping
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.event.db.listener.timetolive
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.tasklog.debug.timeout
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.hashtable.loadfactor
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.filter.hook
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.mapred.local.mem
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.union.remove
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.global.init.file.location
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.client.drop.partitions.using.expressions
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.outerjoin.supports.filters
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.script.auto.progress
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.dynamic.partition
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.intermediate.compression.type
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.try.direct.sql
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.failure.retries
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.analyze.stmt.collect.partlevel.stats
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.hbase.generatehfiles
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.join.factor
17:14:01.528 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.pre.event.listeners
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.map.fair.scheduler.queue
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.reducededuplication
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.localize.resource.wait.interval
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.index.filter.compact.minsize
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.copyfile.maxsize
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.vectorized.execution.enabled
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.sasl.enabled
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.security.authorization.manager
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.orc.compression.strategy
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.rpc.query.plan
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.merge.mapredfiles
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.cache.expr.evaluation
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.counters.group.name
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for datanucleus.transactionIsolation
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.in.test
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.groupby.skewindata
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.mapjoin.hybridgrace.hashtable
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.aggregate.stats.cache.clean.until
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.reliable
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.batch.retrieve.max
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.entity.separator
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.binary.record.max.length
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.max.dynamic.partitions
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.map.groupby.sorted
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.hashtable.initialCapacity
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.mapjoin.check.memory.rows
17:14:01.529 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.idle.operation.timeout
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.orc.default.block.size
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.cbo.costmodel.hdfs.read
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.spark.client.server.connect.timeout
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.transport.mode
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.thrift.http.path
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.index.compact.query.max.entries
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.execute.setugi
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for mapreduce.input.fileinputformat.split.maxsize
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.mapjoin.bucket.cache.size
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.drop.ignorenonexistent
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.serdes.using.metastore.for.schema
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.test.mode.nosamplelist
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.merge.sparkfiles
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exim.uri.scheme.whitelist
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.query.redactor.hooks
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.log4j.file
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for datanucleus.fixedDatastore
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.thrift.sasl.qop
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for mapreduce.job.committer.setup.cleanup.needed
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.compactor.delta.num.threshold
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.plan
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.script.serde
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.log4j.file
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.ddl.createtablelike.properties.whitelist
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for mapreduce.input.fileinputformat.split.minsize.per.node
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.bucketmapjoin
17:14:01.530 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.map.aggr.hash.percentmemory
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.server.max.message.size
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.job.debug.capture.stacktraces
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.cluster.delegation.token.store.zookeeper.acl
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.map.groupby.sorted.testmode
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.vectorized.execution.mapjoin.minmax.enabled
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.sample.seednumber
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.event.clean.freq
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.session.hook
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.mapred.reduce.tasks.speculative.execution
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.auto.convert.sortmerge.join.bigtable.selection.policy
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stageid.rearrange
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.vectorized.groupby.flush.percent
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.temporary.table.storage
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.vectorized.groupby.maxentries
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.mapjoin.optimized.hashtable
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.limit.optimize.fetch.max
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.security.authenticator.manager
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.client.stats.publishers
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for datanucleus.validateColumns
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.parallel
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.io.rcfile.record.interval
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.submitviachild
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.fetch.task.conversion
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.udtf.auto.progress
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.archive.enabled
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.builtin.udf.whitelist
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.spark.client.rpc.max.size
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.authentication.spnego.principal
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.test.authz.sstd.hs2.mode
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.async.exec.threads
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.convert.join.bucket.mapjoin.tez
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.execution.engine
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.tez.container.size
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for javax.jdo.option.ConnectionPassword
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.use.SSL
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.null.scan
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.aggregate.stats.cache.size
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.mapjoin.smalltable.filesize
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.session.silent
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.query.string
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.thrift.min.worker.threads
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.enforce.sorting
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.auto.convert.join.use.nonstaged
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.tez.sessions.per.default.queue
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.idle.session.check.operation
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.thrift.http.port
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.key.prefix.max.length
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.logging.operation.log.location
17:14:01.531 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for javax.jdo.option.ConnectionURL
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.mapjoin.hybridgrace.minnumpartitions
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.hmshandler.force.reload.conf
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.server.tcp.keepalive
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.semantic.analyzer.hook
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.spark.client.rpc.threads
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.map.aggr.hash.min.reduction
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.io.rcfile.column.number.conf
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.cbo.costmodel.cpu
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.zookeeper.clean.extra.nodes
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.metadataonly
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.insert.into.multilevel.dirs
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.added.archives.path
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.hmshandler.retry.attempts
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for datanucleus.validateConstraints
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.retries.max
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.orc.memory.pool
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.prewarm.numcontainers
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for datanucleus.identifierFactory
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.cli.errors.ignore
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.multigroupby.singlereducer
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.conf.restricted.list
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.sampling.orderby.number
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.fetch.task.aggr
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.auto.convert.sortmerge.join.to.mapjoin
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.warehouse.subdir.inherit.perms
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.txn.timeout
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.fetch.partition.stats
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.auto.progress.timeout
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.cbo.returnpath.hiveop
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.orc.dictionary.key.size.threshold
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.scratchdir
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.limit.optimize.limit.file
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.server.max.threads
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.try.direct.sql.ddl
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.script.allow.partial.consumption
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.mapjoin.hybridgrace.minwbsize
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.zookeeper.namespace
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.long.polling.timeout
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.debug.localtask
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.security.authorization.createtable.user.grants
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server.tcp.keepalive
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.ppd
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.script.maxerrsize
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.thrift.worker.keepalive.time
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.enforce.bucketmapjoin
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.spark.client.connect.timeout
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.session.id
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.allow.user.substitution
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.auto.convert.join.noconditionaltask
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.input.format
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.index.autoupdate
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.ssl.protocol.blacklist
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.tez.dynamic.partition.pruning
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.fetch.column.stats
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.max.dynamic.partitions.pernode
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.compactor.cleaner.run.interval
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.skewjoin.mapjoin.map.tasks
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.schema.verification.record.version
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for mapreduce.job.reduces
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.compactor.abortedtxn.threshold
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.map.aggr
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.support.quoted.identifiers
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for javax.jdo.PersistenceManagerFactoryClass
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.compactor.initiator.on
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.orc.row.index.stride.dictionary.check
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.fs.handler.class
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.security.authorization.task.factory
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.lock.numretries
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.typecheck.on.insert
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.auto.convert.join
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.support.dynamic.service.discovery
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.distinct.rewrite
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.authorization.storage.checks
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.orc.skip.corrupt.data
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for datanucleus.cache.level2
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.builtin.udf.blacklist
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.kerberos.principal
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for datanucleus.rdbms.useLegacyNativeValueStrategy
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.collect.rawdatasize
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.ppd.storage
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.index.compact.binary.search
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.cbo.costmodel.local.fs.read
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.aggregate.stats.cache.max.full
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.security.authorization.enabled
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.correlation
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.thrift.http.cookie.is.secure
17:14:01.532 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.merge.orcfile.stripe.level
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.reorder.nway.joins
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.compress.output
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.user.install.directory
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.list.num.entries
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.security.authorization.sqlstd.confwhitelist.append
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.insert.into.external.tables
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.vectorized.groupby.checkinterval
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.explain.dependency.append.tasktype
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.bucketingsorting
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.thrift.login.timeout
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.cli.print.current.db
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.scratch.dir.permission
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.hashtable.key.count.adjustment
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.failure.hooks
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.integral.jdo.pushdown
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.io.exception.handlers
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.jobname.length
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.thrift.bind.host
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.added.jars.path
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.tez.initialize.default.sessions
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.client.socket.timeout
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for javax.jdo.option.DetachAllOnCommit
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.txn.max.open.batch
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.compactor.check.interval
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.query.id
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.key.prefix.reserve.length
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for javax.jdo.option.ConnectionDriverName
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.vectorized.execution.reduce.enabled
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.compactor.delta.pct.threshold
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.current.database
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.orm.retrieveMapNullsAsEmptyStrings
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.max.variable.length
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.start.cleanup.scratchdir
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.rcfile.use.explicit.header
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.merge.tezfiles
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.orc.split.strategy
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.async.exec.keepalive.time
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.index.filter
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.security.authorization.sqlstd.confwhitelist
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.default.serde
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.listbucketing
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.zookeeper.connection.basesleeptime
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.ds.connection.url.hook
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.query.result.fileformat
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.partition.name.whitelist.pattern
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.map.num.entries
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.tez.dynamic.partition.pruning.max.event.size
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.cbo.enable
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.constant.propagation
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.mode.local.auto
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.reducededuplication.min.reducer
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.transform.escape.input
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.max.start.attempts
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.thrift.max.worker.threads
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.dynamic.partition.mode
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.cbo.costmodel.network
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.aggregate.stats.cache.fpp
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.driver.run.hooks
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.pre.hooks
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.conf.validation
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.added.files.path
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.session.history.enabled
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.script.operator.id.env.var
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.unlock.numretries
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for javax.jdo.option.Multithreaded
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.rework.mapredwork
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.groupby
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.client.connect.retry.delay
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.check.crossproducts
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server.read.socket.timeout
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.retries.wait
17:14:01.533 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.reducers.max
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.fetch.task.conversion.threshold
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.limit.row.max.size
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.perf.logger
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.thrift.compact.protocol.enabled
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for datanucleus.plugin.pluginRegistryBundleCheck
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.auto.convert.join.noconditionaltask.size
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.script.operator.truncate.env
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.join.cache.size
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.parallel.thread.number
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for datanucleus.validateTables
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.skewjoin.key
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.aggregate.stats.cache.max.reader.wait
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.security.metastore.authenticator.manager
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.default.fileformat.managed
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.enforce.bucketing
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.reloadable.aux.jars.path
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.index.compact.file.ignore.hdfs
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.hmshandler.retry.interval
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.local.scratchdir
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.thrift.max.message.size
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.mapred.mode
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.orc.default.buffer.size
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.gather.num.threads
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.limit.pushdown.memory.usage
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.archive.intermediate.original
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.mode.local.auto.inputbytes.max
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.mapjoin.localtask.max.memory.usage
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.support.sql11.reserved.keywords
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.batch.retrieve.table.partition.max
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.tez.dynamic.partition.pruning.max.data.size
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metadata.move.exported.metadata.to.trash
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.cli.pretty.output.num.cols
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.zookeeper.session.timeout
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.fetch.output.serde
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.hbase.snapshot.restoredir
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.skewjoin.mapjoin.min.split
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.resultset.use.unique.column.names
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.zookeeper.connection.max.retries
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.aggregate.stats.cache.max.partitions
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.compactor.worker.timeout
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.session.check.interval
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.compute.splits.in.am
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for parquet.memory.pool.ratio
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.mapred.supports.subdirectories
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.authentication.kerberos.principal
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.new.job.grouping.set.cardinality
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.client.stats.counters
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.enforce.sortmergebucketmapjoin
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.tez.smb.number.waves
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.aggregate.stats.cache.max.writer.wait
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.ppd.recognizetransivity
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.authentication.spnego.keytab
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.io.rcfile.tolerate.corruptions
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.spark.client.secret.bits
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for stream.stderr.reporter.enabled
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.autogen.columnalias.prefix.label
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.event.listeners
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.repl.task.factory
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.int.timestamp.conversion.in.seconds
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.tez.auto.reducer.parallelism
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.rawstore.impl
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.security.metastore.authorization.manager
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for datanucleus.autoCreateSchema
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.jar.path
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.vectorized.execution.mapjoin.native.multikey.only.enabled
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.orc.compute.splits.num.threads
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.limit.query.max.table.partition
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.rowoffset
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.default.publisher
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.script.recordwriter
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.ppd.remove.duplicatefilters
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.keystore.password
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.logging.operation.level
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.variable.substitute
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.txn.manager
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for datanucleus.cache.level2.type
17:14:01.534 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.stats.ndv.densityfunction
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.direct.sql.batch.size
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.jdbc.timeout
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.intermediate.compression.codec
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.server.min.threads
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.tez.exec.print.summary
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.compress.intermediate
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.aggregate.stats.cache.enabled
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.expression.proxy
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.script.recordreader
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.autogather
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.sort.dynamic.partition
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.init.hooks
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.dml.events
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.plan.serialization.format
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.thrift.framed.transport.enabled
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.log.every.n.records
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.heartbeat.interval
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.index.compact.query.max.size
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.vectorized.execution.reduce.groupby.enabled
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.lock.sleep.between.retries
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.test.mode.samplefreq
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.dbclass
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.jdbcdriver
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.concatenate.check.index
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.authentication
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for datanucleus.connectionPoolingType
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.rcfile.use.sync.cache
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.map.aggr.hash.force.flush.memory.threshold
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.cache.pinobjtypes
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.fileformat.check
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.async.exec.wait.queue.size
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.default.aggregator
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.explain.user
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.orc.encoding.strategy
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.keystore.path
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.schema.verification
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.connect.retries
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.cluster.delegation.token.store.zookeeper.connectString
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.infer.bucket.sort
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.aggregate.stats.cache.ttl
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.submit.local.task.via.child
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.index.compact.file
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.hwi.listen.port
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.archive.intermediate.extracted
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.cluster.delegation.token.store.zookeeper.znode
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.dbconnectionstring
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.cbo.costmodel.hdfs.write
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.authentication.kerberos.keytab
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.tez.cpu.vcores
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.mapjoin.followby.map.aggr.hash.percentmemory
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.parquet.timestamp.skip.conversion
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.tez.exec.inplace.progress
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.lock.manager
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.mapper.cannot.span.multiple.partitions
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.variable.substitute.depth
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.merge.size.per.task
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.table.parameters.default
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.ignore.mapjoin.hint
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.sampling.orderby.percent
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.lock.mapred.only.operation
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.tez.min.partition.factor
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.spark.client.rpc.sasl.mechanisms
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.orc.default.block.padding
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.sampling.orderby
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.metastore.kerberos.keytab.file
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.groupby.mapaggr.checkinterval
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.script.trust
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.mapjoin.followby.gby.localtask.max.memory.usage
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for javax.jdo.option.ConnectionUserName
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.spark.job.monitor.timeout
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.exec.show.job.failure.debug.info
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.groupby.orderby.position.alias
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.cbo.costmodel.extended
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.script.operator.env.blacklist
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.cbo.costmodel.local.fs.write
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.in.tez.test
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.ndv.error
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.bucketmapjoin.sortedmerge
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.enable.doAs
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.zookeeper.namespace
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.stats.atomic
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.optimize.index.groupby
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.auto.convert.sortmerge.join
17:14:01.535 [main] DEBUG org.apache.hadoop.conf.Configuration - Handling deprecation for hive.server2.idle.session.timeout
17:14:01.570 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: com.sun.org.apache.xalan.internal.xsltc.trax.TransformerFactoryImpl - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/com/sun/org/apache/xalan/internal/xsltc/trax/TransformerFactoryImpl.class
17:14:01.696 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.LoopingByteArrayInputStream$1 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/LoopingByteArrayInputStream$1.class
17:14:01.713 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: sun.reflect.ConstructorAccessorImpl - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/sun/reflect/ConstructorAccessorImpl.class
17:14:01.758 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.ByteArrayInputStream - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/ByteArrayInputStream.class
17:14:01.780 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.conf.HiveConfUtil - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/conf/HiveConfUtil.class
17:14:01.785 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: com.google.common.base.Joiner
17:14:01.786 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.collection.JavaConverters$
17:14:01.789 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.collection.convert.Decorators$AsScala
17:14:01.789 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.collection.Iterator
17:14:01.792 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$2 - -18148984
17:14:01.793 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.Serializable
17:14:01.793 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.runtime.AbstractFunction1
17:14:01.794 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$2$$anonfun$apply$2 - -874254007
17:14:01.795 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.runtime.AbstractFunction0
17:14:01.795 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.StringContext
17:14:01.795 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.Predef$
17:14:01.795 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.address=local
17:14:01.795 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.runtime.BoxedUnit
17:14:01.795 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.scheduler.monitor.policies=org.apache.hadoop.yarn.server.resourcemanager.monitor.capacity.ProportionalCapacityPreemptionPolicy
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.client.thread-count=10
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.leveldb-state-store.compaction-interval-secs=3600
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapred.child.java.opts=-Xmx200m
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.am.containerlauncher.threadpool-initial-size=10
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.retiredjobs.cache.size=1000
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.admin.acl=*
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.am.job.committer.cancel-timeout=60000
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.emit-timeline-data=false
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.ftp.host.port=21
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.end-notification.retry.attempts=0
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.leveldb-timeline-store.ttl-interval-ms=300000
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.leveldb-state-store.path=${hadoop.tmp.dir}/yarn/system/rmstore
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.ipc.rpc.class=org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ipc.client.connection.maxidletime=10000
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.process-kill-wait.ms=2000
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.cluster.acls.enabled=false
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.handler.count=10
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: io.map.index.interval=128
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: io.seqfile.sorter.recordlimit=1000000
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3n.multipart.uploads.enabled=false
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.task.profile.reduces=0-2
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.ubertask.maxmaps=9
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.tasktracker.tasks.sleeptimebeforesigkill=5000
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.util.hash.type=murmur
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.container-manager.thread-count=20
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.jobhistory.block.size=3145728
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.app-submission.cross-platform=false
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.AbstractFileSystem.file.impl=org.apache.hadoop.fs.local.LocalFs
17:14:01.796 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: net.topology.script.number.args=100
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.container-tokens.master-key-rolling-interval-secs=86400
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.map.output.compress.codec=org.apache.hadoop.io.compress.DefaultCodec
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.reducer.preempt.delay.sec=0
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: s3native.bytes-per-checksum=512
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.windows-container.memory-limit.enabled=false
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.input.fileinputformat.split.minsize=0
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.group.mapping=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.system.dir=${hadoop.tmp.dir}/mapred/system
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.end-notification.max.attempts=5
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.markreset.buffer.percent=0.0
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.speculative=true
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.localizer.cache.cleanup.interval-ms=600000
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3a.threads.core=15
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.recovery.store.fs.uri=${hadoop.tmp.dir}/mapred/history/recoverystore
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.leveldb-timeline-store.start-time-read-cache-size=10000
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.registry.zk.retry.interval.ms=1000
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.keytab=/etc/krb5.keytab
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.admin.address=${yarn.resourcemanager.hostname}:8033
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.maps=2
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.maxtasks.perjob=-1
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.ubertask.enable=false
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.fs.state-store.retry-policy-spec=2000, 500
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.ttl-enable=true
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.delete.debug-delay-sec=0
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.skip.maxgroups=0
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.trash.interval=0
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.am.max-attempts=2
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.heartbeats.in.second=100
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.zk-num-retries=1000
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: s3.blocksize=67108864
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.persist.jobstatus.active=true
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.shuffle.parallelcopies=5
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3.buffer.dir=${hadoop.tmp.dir}/s3
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.done-dir=${yarn.app.mapreduce.am.staging-dir}/history/done
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.instrumentation.requires.admin=false
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.registry.zk.retry.ceiling.ms=60000
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.env-whitelist=JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,HADOOP_YARN_HOME
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.container.liveness-monitor.interval-ms=600000
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.client.job.max-retries=0
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.cleaner.enable=true
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.linux-container-executor.cgroups.hierarchy=/hadoop-yarn
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.tasktracker.http.address=0.0.0.0:50060
17:14:01.797 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ha.failover-controller.graceful-fence.connection.retries=1
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.recovery.enabled=false
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.am.container.log.backups=0
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.disk-health-checker.interval-ms=120000
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.java.secure.random.algorithm=SHA1PRNG
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.cluster.temp.dir=${hadoop.tmp.dir}/mapred/temp
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3a.max.total.tasks=1000
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.fail-fast=false
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.client.submit.file.replication=10
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.shuffle.port=13562
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.maxattempts=4
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.resource-tracker.client.thread-count=50
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.webapp.cross-origin.enabled=false
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.delete.thread-count=4
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.admin-env=MALLOC_ARENA_MAX=$MALLOC_ARENA_MAX
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.hostname=0.0.0.0
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: io.skip.checksum.errors=false
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.speculative.speculative-cap-total-tasks=0.01
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.proxy-user-privileges.enabled=false
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.acl.enable=false
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3a.fast.upload=false
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: file.blocksize=67108864
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ftp.replication=3
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.speculative.slowtaskthreshold=1.0
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.cleaner.initial-delay-mins=10
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: s3native.client-write-packet-size=65536
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: file.bytes-per-checksum=512
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: io.seqfile.lazydecompress=true
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.task.skip.start.attempts=2
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.common.configuration.version=0.23.0
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.client.thread-count=50
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.admin.address=0.0.0.0:8047
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.taskcache.levels=2
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.linux-container-executor.nonsecure-mode.user-pattern=^[_.A-Za-z0-9][-@_.A-Za-z0-9]{0,255}?[$]?$
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.zk-timeout-ms=10000
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.max-completed-applications=10000
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.cleaner.period-mins=1440
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.jvm.numtasks=1
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.tasktracker.maxblacklists=4
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.linux-container-executor.cgroups.mount=false
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.checksum.algo.impl=org.apache.hadoop.yarn.sharedcache.ChecksumSHA256Impl
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.end-notification.max.retry.interval=5000
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.acl-view-job=
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.classloader=false
17:14:01.798 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.log-aggregation-enable=false
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.shuffle.fetch.retry.interval-ms=1000
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.nodemanager.minimum.version=NONE
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.kms.client.encrypted.key.cache.size=500
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.am.job.task.listener.thread-count=30
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.am.resource.cpu-vcores=1
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.output.fileoutputformat.compress.type=RECORD
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.group.mapping.ldap.search.attr.member=member
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.log.retain-seconds=10800
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.end-notification.retry.interval=1000
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.local-cache.max-files-per-directory=8192
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.ssl.client.conf=ssl-client.xml
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.root-dir=/sharedcache
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.shuffle.fetch.retry.timeout-ms=30000
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.tasktracker.instrumentation=org.apache.hadoop.mapred.TaskTrackerMetricsInst
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.jobhistory.task.numberprogresssplits=12
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.map.memory.mb=1024
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ha.failover-controller.new-active.rpc-timeout.ms=60000
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.ssl.hostname.verifier=DEFAULT
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.tasktracker.healthchecker.interval=60000
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.tasktracker.taskmemorymanager.monitoringinterval=5000
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.webapp.address=${yarn.timeline-service.hostname}:8188
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.connect.retry-interval.ms=30000
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: s3native.blocksize=67108864
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.scheduler.minimum-allocation-mb=1024
17:14:01.799 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.cleaner.resource-sleep-ms=0
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: net.topology.impl=org.apache.hadoop.net.NetworkTopology
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: io.seqfile.compress.blocksize=1000000
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.registry.zk.retry.times=5
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.AbstractFileSystem.ftp.impl=org.apache.hadoop.fs.ftp.FtpFs
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.running.reduce.limit=0
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.scheduler.maximum-allocation-mb=8192
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.reduce.shuffle.consumer.plugin.class=org.apache.hadoop.mapreduce.task.reduce.Shuffle
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.task.io.sort.factor=10
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.persist.jobstatus.dir=/jobtracker/jobsInfo
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.vmem-check-enabled=true
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.rpc.protection=authentication
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.permissions.umask-mode=022
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3.sleepTimeSeconds=10
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ha.health-monitor.rpc-timeout.ms=45000
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.http.staticuser.user=dr.who
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3a.connection.maximum=15
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.linux-container-executor.nonsecure-mode.limit-users=true
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3a.paging.maximum=5000
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.AbstractFileSystem.viewfs.impl=org.apache.hadoop.fs.viewfs.ViewFs
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.ftp.host=0.0.0.0
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.linux-container-executor.nonsecure-mode.local-user=nobody
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.tasktracker.http.threads=40
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.am-rm-tokens.master-key-rolling-interval-secs=86400
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.tasktracker.dns.nameserver=default
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: io.compression.codec.bzip2.library=system-native
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.map.skip.maxrecords=0
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ipc.ping.interval=60000
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.maxtaskfailures.per.tracker=3
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.shuffle.max.connections=0
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: net.topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.loadedjobs.cache.size=5
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.client.application-client-protocol.poll-interval-ms=200
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.localizer.address=${yarn.nodemanager.hostname}:8040
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.client.output.filter=FAILED
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.client.best-effort=false
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ha.zookeeper.parent-znode=/hadoop-ha
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.persist.jobstatus.hours=1
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.admin.thread-count=1
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.resource.cpu-vcores=8
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.http.policy=HTTP_ONLY
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.speculative.retry-after-no-speculate=1000
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3a.attempts.maximum=10
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.webapp.delegation-token-auth-filter.enabled=true
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: s3native.stream-buffer-size=4096
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.log-aggregation.retain-check-interval-seconds=-1
17:14:01.800 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: io.seqfile.local.dir=${hadoop.tmp.dir}/io/local
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3n.multipart.copy.block.size=5368709120
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.zk-acl=world:anyone:rwcda
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.ssl.keystores.factory.class=org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.split.metainfo.maxsize=10000000
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.random.device.file.path=/dev/urandom
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.client.nodemanager-connect.max-wait-ms=180000
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3.maxRetries=4
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.task.io.sort.mb=100
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.zk-state-store.parent-path=/rmstore
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.client-am.ipc.max-retries=3
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.client.resolve.remote.symlinks=true
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.linux-container-executor.cgroups.strict-resource-usage=false
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.ssl.enabled.protocols=TLSv1
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.cpu.vcores=1
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.address=0.0.0.0:10020
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.client.failover-retries=0
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.ssl.enabled=false
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ipc.client.kill.max=10
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.committer.setup.cleanup.needed=true
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.localizer.cache.target-size-mb=10240
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.admin.client.thread-count=1
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.speculative.retry-after-speculate=15000
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.restart.recover=false
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ipc.client.connect.max.retries=10
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.store-class=org.apache.hadoop.yarn.server.timeline.LeveldbTimelineStore
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.tmp.dir=/tmp/hadoop-${user.name}
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.ha.automatic-failover.embedded=true
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.ttl-ms=604800000
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.task.profile.map.params=${mapreduce.task.profile.params}
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.nodemanagers.heartbeat-interval-ms=1000
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.map.speculative=true
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.recovery.enabled=false
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.recovery.dir=${hadoop.tmp.dir}/yarn-nm-recovery
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.counters.max=120
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.keytab=/etc/krb5.keytab
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.map.log.level=INFO
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.client.max-cached-nodemanagers-proxies=0
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.am.scheduler.heartbeat.interval-ms=1000
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.trash.checkpoint.interval=0
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.app-checker.class=org.apache.hadoop.yarn.server.sharedcachemanager.RemoteAppChecker
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.tasktracker.local.dir.minspacestart=0
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.am.staging-dir=/tmp/hadoop-yarn/staging
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nm.liveness-monitor.expiry-interval-ms=600000
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ha.health-monitor.check-interval.ms=1000
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.shuffle.merge.percent=0.66
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.docker-container-executor.exec-name=/usr/bin/docker
17:14:01.801 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ipc.client.connect.timeout=20000
17:14:01.802 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.fs.state-store.retry-interval-ms=1000
17:14:01.802 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.local-dirs=${hadoop.tmp.dir}/nm-local-dir
17:14:01.802 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.output.fileoutputformat.compress=false
17:14:01.802 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.recovery.enabled=false
17:14:01.802 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: io.native.lib.available=true
17:14:01.802 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.store.in-memory.staleness-period-mins=10080
17:14:01.802 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: s3.replication=3
17:14:01.802 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.am.max-attempts=2
17:14:01.802 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.AbstractFileSystem.har.impl=org.apache.hadoop.fs.HarFs
17:14:01.802 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.input.buffer.percent=0.0
17:14:01.802 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.running.map.limit=0
17:14:01.802 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.webapp.address=${yarn.nodemanager.hostname}:8042
17:14:01.802 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.group.mapping.ldap.ssl=false
17:14:01.802 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.defaultFS=file:///
17:14:01.802 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3a.multipart.size=104857600
17:14:01.802 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.am.job.committer.commit-window=10000
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.group.mapping.ldap.search.attr.group.name=cn
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.disk-health-checker.max-disk-utilization-per-disk-percentage=90.0
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.container-monitor.procfs-tree.smaps-based-rss.enabled=false
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.map.sort.spill.percent=0.80
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.crypto.codec.classes.aes.ctr.nopadding=org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec,org.apache.hadoop.crypto.JceAesCtrCryptoCodec
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.groups.negative-cache.secs=30
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.ssl.server.conf=ssl-server.xml
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.ifile.readahead=true
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.client.nodemanager-client-async.thread-pool-max-size=500
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: s3native.replication=3
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.admin.address=0.0.0.0:10033
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.health-checker.interval-ms=600000
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: s3.stream-buffer-size=4096
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ftp.client-write-packet-size=65536
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.output.fileoutputformat.compress.codec=org.apache.hadoop.io.compress.DefaultCodec
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.keytab=/etc/krb5.keytab
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.store.in-memory.initial-delay-mins=10
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.webapp.address=0.0.0.0:19888
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.task.userlog.limit.kb=0
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.task.container.log.backups=0
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.shuffle.fetch.retry.enabled=${yarn.nodemanager.recovery.enabled}
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ha.zookeeper.session-timeout.ms=5000
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3a.connection.ssl.enabled=true
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.webapp.address=0.0.0.0:8788
17:14:01.804 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.log-aggregation.compression-type=none
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ipc.server.max.connections=0
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.work-preserving-recovery.scheduling-wait-ms=10000
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.log-dirs=${yarn.log.dir}/userlogs
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.am.resource.mb=1536
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.container-monitor.interval-ms=3000
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.groups.cache.secs=300
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.speculative.minimum-allowed-tasks=10
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.recovery.store.class=org.apache.hadoop.mapreduce.v2.hs.HistoryServerFileSystemStateStoreService
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.task.combine.progress.records=10000
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: s3.client-write-packet-size=65536
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.instrumentation=org.apache.hadoop.mapred.JobTrackerMetricsInst
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.shuffle.transfer.buffer.size=131072
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.group.mapping.ldap.directory.search.timeout=10000
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.work.around.non.threadsafe.getpwuid=false
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.client-am.ipc.max-retries-on-timeouts=3
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.address=${yarn.nodemanager.hostname}:0
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.tasktracker.taskcontroller=org.apache.hadoop.mapred.DefaultTaskController
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.tasktracker.indexcache.mb=10
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.scheduler.maximum-allocation-vcores=32
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.reduces=1
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.sleep-delay-before-sigkill.ms=250
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.kms.client.encrypted.key.cache.expiry=43200000
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.address=${yarn.timeline-service.hostname}:10200
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.configuration.provider-class=org.apache.hadoop.yarn.LocalConfigurationProvider
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.enabled=false
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.registry.zk.session.timeout.ms=60000
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: tfile.io.chunk.size=1048576
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.acl-modify-job=
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.automatic.close=true
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ha.health-monitor.sleep-after-disconnect.ms=1000
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.tasktracker.reduce.tasks.maximum=2
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.input.fileinputformat.list-status.num-threads=1
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.http.authentication.token.validity=36000
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3.block.size=67108864
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.tasktracker.local.dir.minspacekill=0
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ha.failover-controller.graceful-fence.rpc-timeout.ms=5000
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.cleaner.interval-ms=86400000
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.intermediate-done-dir=${yarn.app.mapreduce.am.staging-dir}/history/done_intermediate
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.http.address=0.0.0.0:50030
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.recovery.compaction-interval-secs=3600
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.shuffle.input.buffer.percent=0.70
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.http.policy=HTTP_ONLY
17:14:01.805 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.map.maxattempts=4
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.groups.cache.warn.after.ms=5000
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: io.serializations=org.apache.hadoop.io.serializer.WritableSerialization,org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization,org.apache.hadoop.io.serializer.avro.AvroReflectSerialization
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.tasktracker.outofband.heartbeat=false
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.crypto.buffer.size=8192
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.shuffle.read.timeout=180000
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.http.cross-origin.allowed-methods=GET,POST,HEAD
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.skip.proc.count.autoincr=true
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.ifile.readahead.bytes=4194304
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.registry.secure=false
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.cluster.local.dir=${hadoop.tmp.dir}/mapred/local
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.tasktracker.report.address=127.0.0.1:0
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.kerberos.kinit.command=kinit
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.http-authentication.type=simple
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.dispatcher.drain-events.timeout=300000
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.log-aggregation.retain-seconds=-1
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.complete.cancel.delegation.tokens=true
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.fail-fast=${yarn.fail-fast}
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.local.clientfactory.class.name=org.apache.hadoop.mapred.LocalClientFactory
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.shuffle.connection-keep-alive.timeout=5
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.scheduler.minimum-allocation-vcores=1
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ipc.client.connect.max.retries.on.timeouts=45
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.client.retry-interval-ms=1000
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.client.max-retries=30
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.map.output.collector.class=org.apache.hadoop.mapred.MapTask$MapOutputBuffer
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3n.block.size=67108864
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: nfs.exports.allowed.hosts=* rw
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.shuffle.max.threads=0
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ha.health-monitor.connect-retry-interval.ms=1000
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.tasktracker.map.tasks.maximum=2
17:14:01.806 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: io.file.buffer.size=65536
17:14:01.807 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.container-metrics.unregister-delay-ms=10000
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: io.mapfile.bloom.size=1048576
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ipc.client.connect.retry.interval=1000
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.kms.client.authentication.retry-count=1
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.shuffle.connect.timeout=180000
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.fs.state-store.uri=${hadoop.tmp.dir}/yarn/system/rmstore
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.swift.impl=org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.registry.zk.connection.timeout.ms=15000
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.shuffle.log.backups=0
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.am.container.log.limit.kb=0
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.resourcemanager.minimum.version=NONE
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ftp.blocksize=67108864
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.address=${yarn.resourcemanager.hostname}:8032
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: file.stream-buffer-size=4096
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.scheduler.monitor.enable=false
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.ubertask.maxreduces=1
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.nodemanager-connect-retries=10
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.nm.uploader.thread-count=20
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ipc.client.idlethreshold=4000
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.client-server.address=0.0.0.0:8045
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ftp.stream-buffer-size=4096
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.client.job.retry-interval=2000
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.authorization=false
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.http.authentication.simple.anonymous.allowed=true
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.client.nodemanager-connect.retry-interval-ms=10000
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.har.impl.disable.cache=true
17:14:01.808 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.am.liveness-monitor.expiry-interval-ms=600000
17:14:01.814 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.linux-container-executor.resources-handler.class=org.apache.hadoop.yarn.server.nodemanager.util.DefaultLCEResourcesHandler
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.leveldb-timeline-store.read-cache-size=104857600
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.authentication=simple
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.task.files.preserve.failedtasks=false
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.leveldb-timeline-store.path=${hadoop.tmp.dir}/yarn/timeline
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.reduce.slowstart.completedmaps=0.05
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.minicluster.fixed.ports=false
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: file.replication=1
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.ha.automatic-failover.enabled=true
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.userlog.retain.hours=24
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.joblist.cache.size=20000
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.work-preserving-recovery.enabled=true
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.store.class=org.apache.hadoop.yarn.server.resourcemanager.recovery.FileSystemRMStateStore
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: io.mapfile.bloom.error.rate=0.005
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.leveldb-state-store.path=${hadoop.tmp.dir}/yarn/timeline
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3a.multipart.purge=false
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3a.connection.establish.timeout=5000
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.container-executor.class=org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3a.multipart.purge.age=86400
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.scheduler.client.thread-count=50
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.shuffle.log.separate=true
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.kms.client.encrypted.key.cache.low-watermark=0.3f
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.user.group.static.mapping.overrides=dr.who=;
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3a.fast.buffer.size=1048576
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ipc.maximum.data.length=67108864
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: tfile.fs.input.buffer.size=262144
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.http.authentication.type=simple
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.map.cpu.vcores=1
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.zk-retry-interval-ms=1000
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ftp.bytes-per-checksum=512
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ipc.client.fallback-to-simple-auth-allowed=false
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.pmem-check-enabled=true
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.remote-app-log-dir=/tmp/logs
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.task.profile.maps=0-2
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.tasktracker.healthchecker.script.timeout=600000
17:14:01.815 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.shuffle.ssl.file.buffer.size=65536
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.webapp.https.address=${yarn.timeline-service.hostname}:8190
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.resource.percentage-physical-cpu-limit=100
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.am.command-opts=-Xmx1024m
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.amlauncher.thread-count=50
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.nm.uploader.replication.factor=10
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.registry.zk.root=/registry
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.jetty.logs.serve.aliases=true
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.client.failover-proxy-provider=org.apache.hadoop.yarn.client.ConfiguredRMFailoverProxyProvider
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.admin.acl=*
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.reducer.unconditional-preempt.delay.sec=300
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.http.cross-origin.max-age=1800
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.am.hard-kill-timeout-ms=10000
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.remote-app-log-dir-suffix=logs
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.principal=jhs/_HOST@REALM.TLD
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.webapp.address=${yarn.resourcemanager.hostname}:8088
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.recovery.enable=false
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.store.in-memory.check-period-mins=720
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.merge.inmem.threshold=1000
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.df.interval=60000
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.enabled=false
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.jobhistory.lru.cache.size=5
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.task.profile=false
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.http.cross-origin.allowed-headers=X-Requested-With,Content-Type,Accept,Origin
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.hostname=0.0.0.0
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.queuename=default
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.max-age-ms=604800000
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.token.tracking.ids.enabled=false
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.localizer.client.thread-count=5
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.uploader.server.thread-count=50
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.move.thread-count=3
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.AbstractFileSystem.hdfs.impl=org.apache.hadoop.fs.Hdfs
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.http-authentication.simple.anonymous.allowed=true
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.http.filter.initializers=org.apache.hadoop.http.lib.StaticUserWebFilter
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ipc.client.rpc-timeout.ms=0
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.client-server.thread-count=50
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.resource-tracker.address=${yarn.resourcemanager.hostname}:8031
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.datestring.cache.size=200000
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.task.profile.params=-agentlib:hprof=cpu=samples,heap=sites,force=n,thread=y,verbose=n,file=%s
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: rpc.metrics.quantile.enable=false
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.expire.trackers.interval=600000
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: io.bytes.per.checksum=512
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.task.timeout=600000
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.client.max-retries=3
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.resource.memory-mb=8192
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.disk-health-checker.min-healthy-disks=0.25
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.handler-thread-count=10
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3a.threads.max=256
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ipc.server.listen.queue.size=128
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.connect.max-wait.ms=900000
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.framework.name=local
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.fileoutputcommitter.algorithm.version=1
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.map.skip.proc.count.autoincr=true
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.max.split.locations=10
17:14:01.816 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.scheduler.class=org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler
17:14:01.817 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.system-metrics-publisher.enabled=false
17:14:01.817 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.nested-level=3
17:14:01.817 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3a.connection.timeout=50000
17:14:01.817 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.shuffle.connection-keep-alive.enable=false
17:14:01.817 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3a.threads.keepalivetime=60
17:14:01.817 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: file.client-write-packet-size=65536
17:14:01.817 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ha.failover-controller.cli-check.rpc-timeout.ms=20000
17:14:01.817 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ha.zookeeper.acl=world:anyone:rwcda
17:14:01.817 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: ipc.client.ping=true
17:14:01.817 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.delayed.delegation-token.removal-interval-ms=30000
17:14:01.817 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.input.lineinputformat.linespermap=1
17:14:01.817 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.localizer.fetch.thread-count=4
17:14:01.817 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.webapp.cross-origin.enabled=false
17:14:01.817 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.scheduler.address=${yarn.resourcemanager.hostname}:8030
17:14:01.817 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.leveldb-timeline-store.start-time-write-cache-size=10000
17:14:01.817 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.health-checker.script.timeout-ms=1200000
17:14:01.817 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.fs.state-store.num-retries=0
17:14:01.817 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.ssl.require.client.cert=false
17:14:01.817 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.uid.cache.secs=14400
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.keytab=/etc/security/keytab/jhs.service.keytab
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.ha.automatic-failover.zk-base-path=/yarn-leader-election
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.shuffle.ssl.enabled=false
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.log.level=INFO
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.registry.rm.enabled=false
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.tasktracker.dns.interface=default
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.system-metrics-publisher.dispatcher.pool-size=10
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.speculative.speculative-cap-running-tasks=0.1
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.ha.enabled=false
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3a.multipart.threshold=2147483647
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.shuffle.memory.limit.percent=0.25
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.http.cross-origin.enabled=false
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: io.map.index.skip=0
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.job.hdfs-servers=${fs.defaultFS}
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobtracker.taskscheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.state-store.max-completed-applications=${yarn.resourcemanager.max-completed-applications}
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.map.output.compress=false
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.kms.client.encrypted.key.cache.num.refill.threads=2
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3n.multipart.uploads.block.size=67108864
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.task.merge.progress.records=10000
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.aux-services.mapreduce_shuffle.class=org.apache.hadoop.mapred.ShuffleHandler
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: map.sort.class=org.apache.hadoop.util.QuickSort
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: tfile.fs.output.buffer.size=262144
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.du.interval=600000
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: fs.s3a.buffer.dir=${hadoop.tmp.dir}/s3a
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.shuffle.retry-delay.max.ms=60000
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.uploader.server.address=0.0.0.0:8046
17:14:01.818 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.client.progressmonitor.pollinterval=1000
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.app.mapreduce.shuffle.log.limit.kb=0
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.move.interval-ms=180000
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: dfs.ha.fencing.ssh.connect-timeout=30000
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.log-aggregation.roll-monitoring-interval-seconds=-1
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.registry.zk.quorum=localhost:2181
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.http.cross-origin.allowed-origins=*
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.state-store-class=org.apache.hadoop.yarn.server.timeline.recovery.LeveldbTimelineStateStore
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.registry.system.acls=sasl:yarn@, sasl:mapred@, sasl:hdfs@
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.crypto.cipher.suite=AES/CTR/NoPadding
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.task.profile.reduce.params=${mapreduce.task.profile.params}
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.timeline-service.generic-application-history.max-applications=10000
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.registry.jaas.context=Client
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.hostname=0.0.0.0
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.reduce.memory.mb=1024
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.group.mapping.ldap.search.filter.group=(objectClass=group)
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.http.authentication.kerberos.principal=HTTP/_HOST@LOCALHOST
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: hadoop.security.group.mapping.ldap.search.filter.user=(&(objectClass=user)(sAMAccountName={0}))
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.disk-health-checker.min-free-space-per-disk-mb=0
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.client.failover-retries-on-socket-timeouts=0
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.client.completion.pollinterval=5000
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.sharedcache.store.class=org.apache.hadoop.yarn.server.sharedcachemanager.store.InMemorySCMStore
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: mapreduce.jobhistory.recovery.store.leveldb.path=${hadoop.tmp.dir}/mapred/history/recoverystore
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: s3.bytes-per-checksum=512
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.windows-container.cpu-limit.enabled=false
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.resourcemanager.webapp.https.address=${yarn.resourcemanager.hostname}:8090
17:14:01.819 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Hadoop and Hive config to Hive Conf: yarn.nodemanager.vmem-pmem-ratio=2.1
17:14:01.820 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$3 - 1985319428
17:14:01.821 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.collection.mutable.ArrayOps
17:14:01.821 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.Tuple2
17:14:01.822 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$3$$anonfun$apply$4 - 1861883919
17:14:01.822 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Spark config to Hive Conf: spark.app.name=Java Spark Hive Example
17:14:01.822 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Spark config to Hive Conf: spark.sql.catalogImplementation=hive
17:14:01.822 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Spark config to Hive Conf: hive.metastore.warehouse.dir=file:/home/aims/workspace/sparkhive/spark-warehouse
17:14:01.822 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Spark config to Hive Conf: spark.app.id=local-1487850239737
17:14:01.822 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Spark config to Hive Conf: hive.metastore.uris=thrift://localhost:9083
17:14:01.822 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Spark config to Hive Conf: spark.master=local[*]
17:14:01.822 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Spark config to Hive Conf: spark.executor.id=driver
17:14:01.822 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Spark config to Hive Conf: spark.driver.port=44072
17:14:01.822 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying Spark config to Hive Conf: spark.driver.host=10.0.0.3
17:14:01.823 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$4 - -1410803663
17:14:01.824 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$4$$anonfun$apply$6 - 641245913
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.localize.resource.wait.interval=5000
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.spark.client.connect.timeout=1000
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.hmshandler.retry.interval=2000
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.spark.client.future.timeout=60
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.metastore.aggregate.stats.cache.ttl=600
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.metastore.aggregate.stats.cache.max.writer.wait=5000
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.server.read.socket.timeout=10
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.server2.idle.operation.timeout=432000000
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.stats.jdbc.timeout=30
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.compactor.cleaner.run.interval=5000
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.metastore.event.db.listener.timetolive=86400
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.server2.async.exec.keepalive.time=10
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.server2.long.polling.timeout=5000
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.querylog.plan.progress.interval=60000
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.server2.thrift.http.worker.keepalive.time=60
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.compactor.check.interval=300
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.txn.timeout=300
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.server2.thrift.login.timeout=20
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.metastore.event.clean.freq=0
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.metastore.client.connect.retry.delay=1
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.server2.session.check.interval=21600000
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.spark.job.monitor.timeout=60
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.metastore.event.expiry.duration=0
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.compactor.worker.timeout=86400
17:14:01.825 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.zookeeper.connection.basesleeptime=1000
17:14:01.826 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.metastore.client.socket.lifetime=0
17:14:01.826 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.server2.async.exec.shutdown.timeout=10
17:14:01.826 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.server2.thrift.worker.keepalive.time=60
17:14:01.826 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.server2.thrift.http.cookie.max.age=86400
17:14:01.826 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.metastore.aggregate.stats.cache.max.reader.wait=1000
17:14:01.826 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.auto.progress.timeout=0
17:14:01.826 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.server2.thrift.http.max.idle.time=1800000
17:14:01.826 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.lock.sleep.between.retries=60
17:14:01.826 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.stats.retries.wait=3000
17:14:01.827 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.zookeeper.session.timeout=1200000
17:14:01.827 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.spark.client.server.connect.timeout=90000
17:14:01.827 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.server2.idle.session.timeout=604800000
17:14:01.827 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.metastore.client.socket.timeout=600
17:14:01.827 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Applying extra config to HiveConf: hive.server2.thrift.exponential.backoff.slot.length=100
17:14:01.829 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.HashSet - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/HashSet.class
17:14:01.831 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Registry - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Registry.class
17:14:01.834 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.parse.SemanticException - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/parse/SemanticException.class
17:14:01.835 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.udf.generic.GenericUDF - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.class
17:14:01.837 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.Closeable - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/Closeable.class
17:14:01.837 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.class
17:14:01.839 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFResolver.class
17:14:01.840 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.FunctionInfo - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/FunctionInfo.class
17:14:01.843 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.WindowFunctionInfo - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/WindowFunctionInfo.class
17:14:01.847 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.udf.generic.GenericUDFMacro - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMacro.class
17:14:01.850 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.regex.PatternSyntaxException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/regex/PatternSyntaxException.class
17:14:01.850 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.udf.generic.GenericUDAFParameterInfo - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFParameterInfo.class
17:14:01.852 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.FunctionRegistry - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/FunctionRegistry.class
17:14:01.856 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.LinkedHashMap - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/LinkedHashMap.class
17:14:01.856 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.Collections - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/Collections.class
17:14:01.856 [main] DEBUG o.a.h.hive.ql.session.SessionState - SessionState user: null
17:14:01.856 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.session.LineageState - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/session/LineageState.class
17:14:01.858 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Operator - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Operator.class
17:14:01.860 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.lib.Node - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/lib/Node.class
17:14:01.862 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.FileSinkOperator - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/FileSinkOperator.class
17:14:01.867 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.TerminalOperator - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/TerminalOperator.class
17:14:01.868 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.hooks.LineageInfo - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/hooks/LineageInfo.class
17:14:01.869 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.session.ResourceMaps - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/session/ResourceMaps.class
17:14:01.870 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.session.DependencyResolver - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/session/DependencyResolver.class
17:14:01.874 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.session.SessionState$LogHelper - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/session/SessionState$LogHelper.class
17:14:01.876 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.commons.lang.StringUtils - jar:file:/home/aims/SentimentAnalysis/target/sentiment-analysis-1.0.jar!/org/apache/commons/lang/StringUtils.class
17:14:01.884 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.IndexOutOfBoundsException
17:14:01.884 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.UUID - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/UUID.class
17:14:01.885 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.common.JavaUtils - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/common/JavaUtils.class
17:14:01.887 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.history.HiveHistoryProxyHandler - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/history/HiveHistoryProxyHandler.class
17:14:01.887 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.reflect.InvocationHandler
17:14:01.887 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.reflect.Proxy
17:14:01.887 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Task - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Task.class
17:14:01.889 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.history.HiveHistory$Keys - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/history/HiveHistory$Keys.class
17:14:01.890 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.mapred.Counters
17:14:01.894 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.QueryPlan - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/QueryPlan.class
17:14:01.898 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.reflect.UndeclaredThrowableException
17:14:01.949 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.PTFUtils - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/PTFUtils.class
17:14:01.950 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.beans.ExceptionListener - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/beans/ExceptionListener.class
17:14:01.951 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.beans.PersistenceDelegate - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/beans/PersistenceDelegate.class
17:14:01.952 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.PTFUtils$1 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/PTFUtils$1.class
17:14:01.952 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.PTFUtils$2 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/PTFUtils$2.class
17:14:01.953 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.PTFUtils$3 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/PTFUtils$3.class
17:14:01.953 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.PTFUtils$4 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/PTFUtils$4.class
17:14:01.954 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.PTFUtils$5 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/PTFUtils$5.class
17:14:01.954 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Utilities$EnumDelegate - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Utilities$EnumDelegate.class
17:14:01.955 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.beans.DefaultPersistenceDelegate - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/beans/DefaultPersistenceDelegate.class
17:14:01.956 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.beans.IntrospectionException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/beans/IntrospectionException.class
17:14:01.957 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.beans.Introspector - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/beans/Introspector.class
17:14:01.964 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.TaskBeanInfo - null
17:14:01.965 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.TaskBeanInfo - null
17:14:01.965 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.ObjectBeanInfo
17:14:01.967 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.ObjectCustomizer
17:14:01.991 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.TaskCustomizer - null
17:14:01.992 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.TaskCustomizer - null
17:14:01.992 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.plan.MapWork - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/plan/MapWork.class
17:14:01.996 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.plan.BaseWork - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/plan/BaseWork.class
17:14:01.998 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.plan.AbstractOperatorDesc - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.class
17:14:01.999 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.plan.OperatorDesc - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/plan/OperatorDesc.class
17:14:01.999 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.TaskHandle - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/TaskHandle.class
17:14:01.999 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.plan.api.StageType - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/plan/api/StageType.class
17:14:02.000 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.TEnum - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/TEnum.class
17:14:02.001 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.DriverContext - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/DriverContext.class
17:14:02.029 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.beans.BeanInfo - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/beans/BeanInfo.class
17:14:02.029 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.beans.PropertyDescriptor - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/beans/PropertyDescriptor.class
17:14:02.030 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.Queue - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/Queue.class
17:14:02.032 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.TException - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/TException.class
17:14:02.033 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.transport.TTransport - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/transport/TTransport.class
17:14:02.036 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.transport.TMemoryBuffer - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/transport/TMemoryBuffer.class
17:14:02.037 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.protocol.TProtocol - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/protocol/TProtocol.class
17:14:02.039 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.protocol.TJSONProtocol - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/protocol/TJSONProtocol.class
17:14:02.041 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.protocol.TBinaryProtocol - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/protocol/TBinaryProtocol.class
17:14:02.046 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.metadata.Hive - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/metadata/Hive.class
17:14:02.052 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.InvalidOperationException - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/InvalidOperationException.class
17:14:02.055 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.TBase - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/TBase.class
17:14:02.056 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.Comparable
17:14:02.057 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.NoSuchObjectException - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/NoSuchObjectException.class
17:14:02.058 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.AlreadyExistsException - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/AlreadyExistsException.class
17:14:02.060 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.MetaException - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/MetaException.class
17:14:02.062 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.serde2.SerDeException - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/serde2/SerDeException.class
17:14:02.065 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.HiveMetaHookLoader - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/HiveMetaHookLoader.class
17:14:02.066 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.metadata.Hive$1 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/metadata/Hive$1.class
17:14:02.066 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.metadata.InvalidTableException - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/metadata/InvalidTableException.class
17:14:02.069 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.security.UserGroupInformation
17:14:02.070 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.metadata.Hive$2 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/metadata/Hive$2.class
17:14:02.070 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.class
17:14:02.072 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.IMetaStoreClient - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/IMetaStoreClient.class
17:14:02.075 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.HiveMetaStoreClient - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.class
17:14:02.079 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.RetryingMetaStoreClient - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.class
17:14:02.082 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.MetaStoreUtils - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/MetaStoreUtils.class
17:14:02.086 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy$PartitionIterator - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/partition/spec/PartitionSpecProxy$PartitionIterator.class
17:14:02.087 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.Runnable
17:14:02.087 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.net.SocketAddress
17:14:02.087 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.net.InetSocketAddress
17:14:02.088 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.fs.PathFilter
17:14:02.088 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: com.google.common.base.Function
17:14:02.089 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.serde.serdeConstants - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/serde/serdeConstants.class
17:14:02.091 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.MetaStoreUtils$2 - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/MetaStoreUtils$2.class
17:14:02.092 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.MetaStoreUtils$3 - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/MetaStoreUtils$3.class
17:14:02.094 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.net.UnknownHostException
17:14:02.094 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.InstantiationException
17:14:02.095 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.TApplicationException - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/TApplicationException.class
17:14:02.096 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.IMetaStoreClient$IncompatibleMetastoreException - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/IMetaStoreClient$IncompatibleMetastoreException.class
17:14:02.097 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.transport.TTransportException - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/transport/TTransportException.class
17:14:02.097 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.InterruptedException
17:14:02.098 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.transport.TSocket - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/transport/TSocket.class
17:14:02.098 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.transport.TIOStreamTransport - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/transport/TIOStreamTransport.class
17:14:02.099 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.transport.TFramedTransport - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/transport/TFramedTransport.class
17:14:02.100 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.protocol.TCompactProtocol - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/protocol/TCompactProtocol.class
17:14:02.104 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Iface - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$Iface.class
17:14:02.106 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: com.facebook.fb303.FacebookService$Iface - jar:file:/home/aims/spark/jars/libfb303-0.9.2.jar!/com/facebook/fb303/FacebookService$Iface.class
17:14:02.109 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.Character
17:14:02.109 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.reflect.Constructor
17:14:02.110 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.class
17:14:02.111 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.MetaStoreFilterHook - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.class
17:14:02.112 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.net.URI
17:14:02.112 [main] INFO hive.metastore - Trying to connect to metastore with URI thrift://localhost:9083
17:14:02.113 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.BufferedInputStream - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/BufferedInputStream.class
17:14:02.113 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.BufferedOutputStream - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/BufferedOutputStream.class
17:14:02.114 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.net.SocketException
17:14:02.114 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.net.Socket
17:14:02.115 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.protocol.TProtocolException - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/protocol/TProtocolException.class
17:14:02.118 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.protocol.TStruct - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/protocol/TStruct.class
17:14:02.120 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$Client.class
17:14:02.128 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: com.facebook.fb303.FacebookService$Client - jar:file:/home/aims/spark/jars/libfb303-0.9.2.jar!/com/facebook/fb303/FacebookService$Client.class
17:14:02.130 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.TServiceClient - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/TServiceClient.class
17:14:02.132 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.InvalidObjectException - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/InvalidObjectException.class
17:14:02.134 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.UnknownDBException - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/UnknownDBException.class
17:14:02.136 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.UnknownTableException - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/UnknownTableException.class
17:14:02.152 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.InvalidInputException - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/InvalidInputException.class
17:14:02.156 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ConfigValSecurityException - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ConfigValSecurityException.class
17:14:02.157 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.UnknownPartitionException - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/UnknownPartitionException.class
17:14:02.158 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.InvalidPartitionException - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/InvalidPartitionException.class
17:14:02.161 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.NoSuchTxnException - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/NoSuchTxnException.class
17:14:02.162 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.TxnAbortedException - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/TxnAbortedException.class
17:14:02.164 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.NoSuchLockException - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/NoSuchLockException.class
17:14:02.165 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.TxnOpenException - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/TxnOpenException.class
17:14:02.175 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.shims.Utils - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/shims/Utils.class
17:14:02.178 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: javax.security.auth.login.Configuration - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/javax/security/auth/login/Configuration.class
17:14:02.179 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.shims.Utils$JaasConfiguration - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/shims/Utils$JaasConfiguration.class
17:14:02.180 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.security.token.TokenSelector
17:14:02.234 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$set_ugi_args - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$set_ugi_args.class
17:14:02.236 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.TFieldIdEnum - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/TFieldIdEnum.class
17:14:02.236 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.meta_data.FieldValueMetaData - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/meta_data/FieldValueMetaData.class
17:14:02.237 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.meta_data.ListMetaData - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/meta_data/ListMetaData.class
17:14:02.237 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.ObjectInputStream - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/ObjectInputStream.class
17:14:02.237 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.ObjectOutputStream - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/ObjectOutputStream.class
17:14:02.239 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.protocol.TField - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/protocol/TField.class
17:14:02.240 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.scheme.StandardScheme - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/scheme/StandardScheme.class
17:14:02.241 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.scheme.IScheme - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/scheme/IScheme.class
17:14:02.241 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$set_ugi_args$set_ugi_argsStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$set_ugi_args$set_ugi_argsStandardSchemeFactory.class
17:14:02.242 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.scheme.SchemeFactory - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/scheme/SchemeFactory.class
17:14:02.243 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.scheme.TupleScheme - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/scheme/TupleScheme.class
17:14:02.243 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$set_ugi_args$set_ugi_argsTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$set_ugi_args$set_ugi_argsTupleSchemeFactory.class
17:14:02.243 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.EnumMap - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/EnumMap.class
17:14:02.245 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$set_ugi_args$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$set_ugi_args$_Fields.class
17:14:02.247 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.EnumSet - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/EnumSet.class
17:14:02.249 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.meta_data.FieldMetaData - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/meta_data/FieldMetaData.class
17:14:02.250 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.protocol.TMessage - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/protocol/TMessage.class
17:14:02.251 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$set_ugi_args$set_ugi_argsStandardScheme - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$set_ugi_args$set_ugi_argsStandardScheme.class
17:14:02.252 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.protocol.TList - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/protocol/TList.class
17:14:02.254 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$set_ugi_result - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$set_ugi_result.class
17:14:02.256 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$set_ugi_result$set_ugi_resultStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$set_ugi_result$set_ugi_resultStandardSchemeFactory.class
17:14:02.256 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$set_ugi_result$set_ugi_resultTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$set_ugi_result$set_ugi_resultTupleSchemeFactory.class
17:14:02.257 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$set_ugi_result$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$set_ugi_result$_Fields.class
17:14:02.260 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$set_ugi_result$set_ugi_resultStandardScheme - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$set_ugi_result$set_ugi_resultStandardScheme.class
17:14:02.261 [main] INFO hive.metastore - Connected to metastore.
17:14:02.261 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Index - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Index.class
17:14:02.264 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Database - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Database.class
17:14:02.267 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Function - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Function.class
17:14:02.271 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Table - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Table.class
17:14:02.273 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.PartitionDropOptions - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/PartitionDropOptions.class
17:14:02.274 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Partition - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Partition.class
17:14:02.276 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.PrivilegeBag - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/PrivilegeBag.class
17:14:02.277 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/partition/spec/PartitionSpecProxy.class
17:14:02.277 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.PartitionEventType - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/PartitionEventType.class
17:14:02.278 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.common.ValidTxnList - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/common/ValidTxnList.class
17:14:02.278 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.OpenTxnsResponse - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/OpenTxnsResponse.class
17:14:02.280 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.LockResponse - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/LockResponse.class
17:14:02.282 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ShowLocksResponse - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ShowLocksResponse.class
17:14:02.285 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.HeartbeatTxnRangeResponse - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeResponse.class
17:14:02.287 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.IMetaStoreClient$NotificationFilter - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/IMetaStoreClient$NotificationFilter.class
17:14:02.287 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.NotificationEventResponse - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/NotificationEventResponse.class
17:14:02.288 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/CurrentNotificationEventId.class
17:14:02.289 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleRequest - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleRequest.class
17:14:02.291 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleResponse - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleResponse.class
17:14:02.292 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.FireEventRequest - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/FireEventRequest.class
17:14:02.293 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.FireEventResponse - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/FireEventResponse.class
17:14:02.294 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Role - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Role.class
17:14:02.295 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalRequest - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalRequest.class
17:14:02.296 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalResponse - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalResponse.class
17:14:02.297 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.PrincipalType - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/PrincipalType.class
17:14:02.298 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.HiveObjectRef - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/HiveObjectRef.class
17:14:02.299 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/PrincipalPrivilegeSet.class
17:14:02.301 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ColumnStatistics - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ColumnStatistics.class
17:14:02.302 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.SetPartitionsStatsRequest - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/SetPartitionsStatsRequest.class
17:14:02.304 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.AggrStats - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/AggrStats.class
17:14:02.305 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ShowCompactResponse - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ShowCompactResponse.class
17:14:02.306 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/GetOpenTxnsInfoResponse.class
17:14:02.307 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.LockRequest - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/LockRequest.class
17:14:02.308 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.CompactionType - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/CompactionType.class
17:14:02.338 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.meta_data.MapMetaData - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/meta_data/MapMetaData.class
17:14:02.339 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.meta_data.StructMetaData - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/meta_data/StructMetaData.class
17:14:02.340 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.meta_data.EnumMetaData - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/meta_data/EnumMetaData.class
17:14:02.341 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Database$DatabaseStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Database$DatabaseStandardSchemeFactory.class
17:14:02.342 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Database$DatabaseTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Database$DatabaseTupleSchemeFactory.class
17:14:02.342 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Database$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Database$_Fields.class
17:14:02.344 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.Short
17:14:02.345 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Function$FunctionStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Function$FunctionStandardSchemeFactory.class
17:14:02.457 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Function$FunctionTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Function$FunctionTupleSchemeFactory.class
17:14:02.458 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Function$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Function$_Fields.class
17:14:02.459 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.FunctionType - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/FunctionType.class
17:14:02.460 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ResourceUri - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ResourceUri.class
17:14:02.470 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ColumnStatistics$ColumnStatisticsStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ColumnStatistics$ColumnStatisticsStandardSchemeFactory.class
17:14:02.471 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ColumnStatistics$ColumnStatisticsTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ColumnStatistics$ColumnStatisticsTupleSchemeFactory.class
17:14:02.472 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ColumnStatistics$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ColumnStatistics$_Fields.class
17:14:02.473 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ColumnStatisticsDesc.class
17:14:02.476 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ColumnStatisticsObj.class
17:14:02.479 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.FireEventRequest$FireEventRequestStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/FireEventRequest$FireEventRequestStandardSchemeFactory.class
17:14:02.481 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.FireEventRequest$FireEventRequestTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/FireEventRequest$FireEventRequestTupleSchemeFactory.class
17:14:02.481 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.FireEventRequest$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/FireEventRequest$_Fields.class
17:14:02.483 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.FireEventRequestData - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/FireEventRequestData.class
17:14:02.483 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.TUnion - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/TUnion.class
17:14:02.485 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.PrivilegeBag$PrivilegeBagStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/PrivilegeBag$PrivilegeBagStandardSchemeFactory.class
17:14:02.485 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.PrivilegeBag$PrivilegeBagTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/PrivilegeBag$PrivilegeBagTupleSchemeFactory.class
17:14:02.486 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.PrivilegeBag$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/PrivilegeBag$_Fields.class
17:14:02.486 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/HiveObjectPrivilege.class
17:14:02.489 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalRequest$GetRoleGrantsForPrincipalRequestStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalRequest$GetRoleGrantsForPrincipalRequestStandardSchemeFactory.class
17:14:02.489 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalRequest$GetRoleGrantsForPrincipalRequestTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalRequest$GetRoleGrantsForPrincipalRequestTupleSchemeFactory.class
17:14:02.490 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalRequest$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalRequest$_Fields.class
17:14:02.495 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Table$TableStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Table$TableStandardSchemeFactory.class
17:14:02.496 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Table$TableTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Table$TableTupleSchemeFactory.class
17:14:02.497 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Table$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Table$_Fields.class
17:14:02.498 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.StorageDescriptor - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/StorageDescriptor.class
17:14:02.500 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.FieldSchema - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/FieldSchema.class
17:14:02.503 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Index$IndexStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Index$IndexStandardSchemeFactory.class
17:14:02.504 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Index$IndexTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Index$IndexTupleSchemeFactory.class
17:14:02.504 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Index$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Index$_Fields.class
17:14:02.506 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.HiveObjectRef$HiveObjectRefStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/HiveObjectRef$HiveObjectRefStandardSchemeFactory.class
17:14:02.506 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.HiveObjectRef$HiveObjectRefTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/HiveObjectRef$HiveObjectRefTupleSchemeFactory.class
17:14:02.506 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.HiveObjectRef$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/HiveObjectRef$_Fields.class
17:14:02.507 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.HiveObjectType - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/HiveObjectType.class
17:14:02.509 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Partition$PartitionStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Partition$PartitionStandardSchemeFactory.class
17:14:02.509 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Partition$PartitionTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Partition$PartitionTupleSchemeFactory.class
17:14:02.510 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Partition$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Partition$_Fields.class
17:14:02.512 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleRequest$GetPrincipalsInRoleRequestStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleRequest$GetPrincipalsInRoleRequestStandardSchemeFactory.class
17:14:02.513 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleRequest$GetPrincipalsInRoleRequestTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleRequest$GetPrincipalsInRoleRequestTupleSchemeFactory.class
17:14:02.513 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleRequest$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleRequest$_Fields.class
17:14:02.514 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.LockRequest$LockRequestStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/LockRequest$LockRequestStandardSchemeFactory.class
17:14:02.515 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.LockRequest$LockRequestTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/LockRequest$LockRequestTupleSchemeFactory.class
17:14:02.515 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.LockRequest$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/LockRequest$_Fields.class
17:14:02.516 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.LockComponent - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/LockComponent.class
17:14:02.518 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Role$RoleStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Role$RoleStandardSchemeFactory.class
17:14:02.518 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Role$RoleTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Role$RoleTupleSchemeFactory.class
17:14:02.519 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Role$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Role$_Fields.class
17:14:02.520 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.SetPartitionsStatsRequest$SetPartitionsStatsRequestStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/SetPartitionsStatsRequest$SetPartitionsStatsRequestStandardSchemeFactory.class
17:14:02.521 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.SetPartitionsStatsRequest$SetPartitionsStatsRequestTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/SetPartitionsStatsRequest$SetPartitionsStatsRequestTupleSchemeFactory.class
17:14:02.521 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.SetPartitionsStatsRequest$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/SetPartitionsStatsRequest$_Fields.class
17:14:02.523 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_all_databases_args - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_all_databases_args.class
17:14:02.524 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_all_databases_args$get_all_databases_argsStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_all_databases_args$get_all_databases_argsStandardSchemeFactory.class
17:14:02.525 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_all_databases_args$get_all_databases_argsTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_all_databases_args$get_all_databases_argsTupleSchemeFactory.class
17:14:02.525 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_all_databases_args$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_all_databases_args$_Fields.class
17:14:02.526 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_all_databases_args$get_all_databases_argsStandardScheme - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_all_databases_args$get_all_databases_argsStandardScheme.class
17:14:02.527 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_all_databases_result - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_all_databases_result.class
17:14:02.529 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_all_databases_result$get_all_databases_resultStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_all_databases_result$get_all_databases_resultStandardSchemeFactory.class
17:14:02.530 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_all_databases_result$get_all_databases_resultTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_all_databases_result$get_all_databases_resultTupleSchemeFactory.class
17:14:02.530 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_all_databases_result$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_all_databases_result$_Fields.class
17:14:02.579 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_all_databases_result$get_all_databases_resultStandardScheme - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_all_databases_result$get_all_databases_resultStandardScheme.class
17:14:02.581 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_functions_args - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_functions_args.class
17:14:02.583 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_functions_args$get_functions_argsStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_functions_args$get_functions_argsStandardSchemeFactory.class
17:14:02.584 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_functions_args$get_functions_argsTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_functions_args$get_functions_argsTupleSchemeFactory.class
17:14:02.584 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_functions_args$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_functions_args$_Fields.class
17:14:02.585 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_functions_args$get_functions_argsStandardScheme - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_functions_args$get_functions_argsStandardScheme.class
17:14:02.585 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_functions_result - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_functions_result.class
17:14:02.588 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_functions_result$get_functions_resultStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_functions_result$get_functions_resultStandardSchemeFactory.class
17:14:02.588 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_functions_result$get_functions_resultTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_functions_result$get_functions_resultTupleSchemeFactory.class
17:14:02.589 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_functions_result$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_functions_result$_Fields.class
17:14:02.590 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_functions_result$get_functions_resultStandardScheme - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_functions_result$get_functions_resultStandardScheme.class
17:14:02.612 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.fs.viewfs.ViewFileSystem
17:14:02.618 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.fs.ftp.FTPFileSystem
17:14:02.622 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.fs.HarFileSystem
17:14:02.627 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.hdfs.DistributedFileSystem
17:14:02.633 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.hdfs.web.HftpFileSystem
17:14:02.643 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.hdfs.web.HsftpFileSystem
17:14:02.643 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.hdfs.web.WebHdfsFileSystem
17:14:02.856 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.hdfs.web.SWebHdfsFileSystem
17:14:02.901 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.mapred.JobConfigurable
17:14:02.906 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.fs.permission.FsPermission
17:14:02.910 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.fs.FileStatus
17:14:02.918 [main] DEBUG o.a.h.hive.ql.session.SessionState - HDFS root scratch dir: /tmp/hive with schema null, permission: rwx-wx-wx
17:14:02.930 [main] INFO o.a.h.hive.ql.session.SessionState - Created local directory: /tmp/25ef1cac-8805-4eda-85d2-a0f0727ee703_resources
17:14:02.934 [main] INFO o.a.h.hive.ql.session.SessionState - Created HDFS directory: /tmp/hive/aims/25ef1cac-8805-4eda-85d2-a0f0727ee703
17:14:02.940 [main] INFO o.a.h.hive.ql.session.SessionState - Created local directory: /tmp/aims/25ef1cac-8805-4eda-85d2-a0f0727ee703
17:14:02.946 [main] INFO o.a.h.hive.ql.session.SessionState - Created HDFS directory: /tmp/hive/aims/25ef1cac-8805-4eda-85d2-a0f0727ee703/_tmp_space.db
17:14:02.949 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.io.PrintStream - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/io/PrintStream.class
17:14:02.949 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$5 - 1933440993
17:14:02.949 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.collection.mutable.StringBuilder
17:14:02.950 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.collection.immutable.Nil$
17:14:02.950 [main] INFO o.a.s.sql.hive.client.HiveClientImpl - Warehouse location for Hive client (version 1.2.1) is file:/home/aims/workspace/sparkhive/spark-warehouse
17:14:02.950 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.reflect.ClassTag$
17:14:02.950 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.net.URL
17:14:02.950 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.runtime.BoxesRunTime
17:14:02.954 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getDatabaseOption$1 - 1969982682
17:14:02.955 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1 - 422934569
17:14:02.955 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.util.MutableURLClassLoader
17:14:02.955 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.runtime.IntRef
17:14:02.955 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.runtime.ObjectRef
17:14:02.956 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/security/HiveAuthenticationProvider.class
17:14:02.957 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.conf.Configurable
17:14:02.958 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManager.class
17:14:02.959 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.lockmgr.LockException - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/lockmgr/LockException.class
17:14:02.959 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.session.SessionState$AuthorizationMode - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/session/SessionState$AuthorizationMode.class
17:14:02.960 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProvider.class
17:14:02.960 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.class
17:14:02.961 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.session.SessionState$ResourceType - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/session/SessionState$ResourceType.class
17:14:02.962 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.plan.HiveOperation - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/plan/HiveOperation.class
17:14:02.963 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.session.CreateTableAutomaticGrant - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/session/CreateTableAutomaticGrant.class
17:14:02.964 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.log.PerfLogger - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/log/PerfLogger.class
17:14:02.965 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.tez.TezSessionState - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.class
17:14:02.967 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.spark.session.SparkSession - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/spark/session/SparkSession.class
17:14:02.968 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.sql.Timestamp - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/sql/Timestamp.class
17:14:02.968 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.reflect.Modifier
17:14:02.969 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.Shim$$anonfun$findStaticMethod$1 - 2019787164
17:14:02.969 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.Option$
17:14:02.969 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_database_args - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_database_args.class
17:14:02.971 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_database_args$get_database_argsStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_database_args$get_database_argsStandardSchemeFactory.class
17:14:02.972 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_database_args$get_database_argsTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_database_args$get_database_argsTupleSchemeFactory.class
17:14:02.973 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_database_args$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_database_args$_Fields.class
17:14:02.974 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_database_args$get_database_argsStandardScheme - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_database_args$get_database_argsStandardScheme.class
17:14:02.975 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_database_result - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_database_result.class
17:14:02.977 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_database_result$get_database_resultStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_database_result$get_database_resultStandardSchemeFactory.class
17:14:02.978 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_database_result$get_database_resultTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_database_result$get_database_resultTupleSchemeFactory.class
17:14:02.979 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_database_result$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_database_result$_Fields.class
17:14:02.999 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_database_result$get_database_resultStandardScheme - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_database_result$get_database_resultStandardScheme.class
17:14:02.999 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Database$DatabaseStandardScheme - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Database$DatabaseStandardScheme.class
17:14:03.000 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.protocol.TMap - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/protocol/TMap.class
17:14:03.002 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getDatabaseOption$1$$anonfun$apply$7 - 1444841521
17:14:03.002 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.catalyst.catalog.CatalogDatabase
17:14:03.003 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getDatabaseOption$1$$anonfun$apply$7$$anonfun$apply$8 - -1571805055
17:14:03.003 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.collection.TraversableOnce
17:14:03.004 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.metrics.source.HiveCatalogMetrics$
17:14:03.011 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.NoSuchObjectException$NoSuchObjectExceptionStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/NoSuchObjectException$NoSuchObjectExceptionStandardSchemeFactory.class
17:14:03.011 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.NoSuchObjectException$NoSuchObjectExceptionTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/NoSuchObjectException$NoSuchObjectExceptionTupleSchemeFactory.class
17:14:03.012 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.NoSuchObjectException$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/NoSuchObjectException$_Fields.class
17:14:03.013 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.NoSuchObjectException$NoSuchObjectExceptionStandardScheme - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/NoSuchObjectException$NoSuchObjectExceptionStandardScheme.class
17:14:03.131 [main] INFO o.a.s.sql.execution.SparkSqlParser - Parsing command: select survey_response_value from health
17:14:04.310 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1 - -897634389
17:14:04.311 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$9 - -547694562
17:14:04.311 [main] DEBUG o.a.s.sql.hive.client.HiveClientImpl - Looking up default.health
17:14:04.312 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_args - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_table_args.class
17:14:04.315 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_args$get_table_argsStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_table_args$get_table_argsStandardSchemeFactory.class
17:14:04.315 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_args$get_table_argsTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_table_args$get_table_argsTupleSchemeFactory.class
17:14:04.316 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_args$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_table_args$_Fields.class
17:14:04.317 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_args$get_table_argsStandardScheme - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_table_args$get_table_argsStandardScheme.class
17:14:04.318 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_result - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_table_result.class
17:14:04.321 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_result$get_table_resultStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_table_result$get_table_resultStandardSchemeFactory.class
17:14:04.323 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_result$get_table_resultTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_table_result$get_table_resultTupleSchemeFactory.class
17:14:04.324 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_result$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_table_result$_Fields.class
17:14:04.432 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_result$get_table_resultStandardScheme - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore$get_table_result$get_table_resultStandardScheme.class
17:14:04.433 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Table$TableStandardScheme - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Table$TableStandardScheme.class
17:14:04.434 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.thrift.EncodingUtils - jar:file:/home/aims/spark/jars/libthrift-0.9.2.jar!/org/apache/thrift/EncodingUtils.class
17:14:04.436 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.StorageDescriptor$StorageDescriptorStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/StorageDescriptor$StorageDescriptorStandardSchemeFactory.class
17:14:04.437 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.StorageDescriptor$StorageDescriptorTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/StorageDescriptor$StorageDescriptorTupleSchemeFactory.class
17:14:04.438 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.StorageDescriptor$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/StorageDescriptor$_Fields.class
17:14:04.439 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.SerDeInfo - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/SerDeInfo.class
17:14:04.440 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.Order - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/Order.class
17:14:04.441 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.SkewedInfo - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/SkewedInfo.class
17:14:04.443 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.StorageDescriptor$StorageDescriptorStandardScheme - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/StorageDescriptor$StorageDescriptorStandardScheme.class
17:14:04.444 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.FieldSchema$FieldSchemaStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/FieldSchema$FieldSchemaStandardSchemeFactory.class
17:14:04.445 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.FieldSchema$FieldSchemaTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/FieldSchema$FieldSchemaTupleSchemeFactory.class
17:14:04.445 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.FieldSchema$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/FieldSchema$_Fields.class
17:14:04.446 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.FieldSchema$FieldSchemaStandardScheme - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/FieldSchema$FieldSchemaStandardScheme.class
17:14:04.449 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.SerDeInfo$SerDeInfoStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/SerDeInfo$SerDeInfoStandardSchemeFactory.class
17:14:04.450 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.SerDeInfo$SerDeInfoTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/SerDeInfo$SerDeInfoTupleSchemeFactory.class
17:14:04.450 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.SerDeInfo$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/SerDeInfo$_Fields.class
17:14:04.452 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.SerDeInfo$SerDeInfoStandardScheme - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/SerDeInfo$SerDeInfoStandardScheme.class
17:14:04.454 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.SkewedInfo$SkewedInfoStandardSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/SkewedInfo$SkewedInfoStandardSchemeFactory.class
17:14:04.455 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.SkewedInfo$SkewedInfoTupleSchemeFactory - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/SkewedInfo$SkewedInfoTupleSchemeFactory.class
17:14:04.456 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.SkewedInfo$_Fields - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/SkewedInfo$_Fields.class
17:14:04.458 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.api.SkewedInfo$SkewedInfoStandardScheme - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/api/SkewedInfo$SkewedInfoStandardScheme.class
17:14:04.460 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.TableType - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/TableType.class
17:14:04.461 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.class
17:14:04.463 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.serde2.AbstractSerDe - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/serde2/AbstractSerDe.class
17:14:04.463 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.serde2.SerDe - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/serde2/SerDe.class
17:14:04.464 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.serde2.Deserializer - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/serde2/Deserializer.class
17:14:04.465 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.serde2.Serializer - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/serde2/Serializer.class
17:14:04.465 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.metadata.Table - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/metadata/Table.class
17:14:04.469 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.metadata.Table$ValidationFailureSemanticException - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/metadata/Table$ValidationFailureSemanticException.class
17:14:04.472 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$10 - -14254603
17:14:04.472 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.collection.GenTraversableOnce
17:14:04.473 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.collection.TraversableLike
17:14:04.474 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$10$$anonfun$6 - 1448220211
17:14:04.474 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.collection.mutable.Buffer$
17:14:04.474 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.collection.mutable.Buffer
17:14:04.474 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.types.StructType$
17:14:04.478 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.commons.lang3.StringUtils - jar:file:/home/aims/SentimentAnalysis/target/sentiment-analysis-1.0.jar!/org/apache/commons/lang3/StringUtils.class
17:14:04.486 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$10$$anonfun$7 - 552463382
17:14:04.487 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.catalyst.parser.CatalystSqlParser$
17:14:04.489 [main] INFO o.a.s.s.c.parser.CatalystSqlParser - Parsing command: int
17:14:04.495 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.types.MetadataBuilder
17:14:04.496 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.hive.HiveUtils$
17:14:04.497 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.types.StructField
17:14:04.500 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$fromHiveColumn$1 - -1037424356
17:14:04.501 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$fromHiveColumn$2 - -959351106
17:14:04.501 [main] INFO o.a.s.s.c.parser.CatalystSqlParser - Parsing command: string
17:14:04.502 [main] INFO o.a.s.s.c.parser.CatalystSqlParser - Parsing command: string
17:14:04.502 [main] INFO o.a.s.s.c.parser.CatalystSqlParser - Parsing command: string
17:14:04.502 [main] INFO o.a.s.s.c.parser.CatalystSqlParser - Parsing command: date
17:14:04.503 [main] INFO o.a.s.s.c.parser.CatalystSqlParser - Parsing command: date
17:14:04.504 [main] INFO o.a.s.s.c.parser.CatalystSqlParser - Parsing command: int
17:14:04.504 [main] INFO o.a.s.s.c.parser.CatalystSqlParser - Parsing command: string
17:14:04.504 [main] INFO o.a.s.s.c.parser.CatalystSqlParser - Parsing command: string
17:14:04.505 [main] INFO o.a.s.s.c.parser.CatalystSqlParser - Parsing command: string
17:14:04.505 [main] INFO o.a.s.s.c.parser.CatalystSqlParser - Parsing command: int
17:14:04.505 [main] INFO o.a.s.s.c.parser.CatalystSqlParser - Parsing command: string
17:14:04.505 [main] INFO o.a.s.s.c.parser.CatalystSqlParser - Parsing command: int
17:14:04.506 [main] INFO o.a.s.s.c.parser.CatalystSqlParser - Parsing command: string
17:14:04.506 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.collection.mutable.BufferLike
17:14:04.509 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.collection.mutable.ArrayBuffer$
17:14:04.509 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: scala.collection.mutable.ArrayBuffer
17:14:04.510 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$10$$anonfun$8 - 716594440
17:14:04.511 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.catalyst.TableIdentifier
17:14:04.511 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.catalyst.catalog.CatalogTableType$
17:14:04.515 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$10$$anonfun$9 - 302429093
17:14:04.516 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.catalyst.catalog.CatalogStorageFormat
17:14:04.519 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.metadata.HiveStorageHandler - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.class
17:14:04.520 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.metastore.ProtectMode - jar:file:/home/aims/spark/jars/hive-metastore-1.2.1.spark2.jar!/org/apache/hadoop/hive/metastore/ProtectMode.class
17:14:04.521 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer$TableSpec - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer$TableSpec.class
17:14:04.523 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.serde2.objectinspector.StructField - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/serde2/objectinspector/StructField.class
17:14:04.525 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.Shim_v0_12$$anonfun$getDataLocation$1 - 1384380516
17:14:04.526 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Utilities - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Utilities.class
17:14:04.532 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hive.com.esotericsoftware.kryo.Serializer - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hive/com/esotericsoftware/kryo/Serializer.class
17:14:04.533 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hive.com.esotericsoftware.kryo.serializers.FieldSerializer - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hive/com/esotericsoftware/kryo/serializers/FieldSerializer.class
17:14:04.537 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.io.HiveOutputFormat - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/io/HiveOutputFormat.class
17:14:04.537 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.mapred.OutputFormat
17:14:04.540 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.security.NoSuchAlgorithmException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/security/NoSuchAlgorithmException.class
17:14:04.541 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.concurrent.ExecutionException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/concurrent/ExecutionException.class
17:14:04.541 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.concurrent.BlockingQueue - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/concurrent/BlockingQueue.class
17:14:04.541 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.common.HiveInterruptCallback - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/common/HiveInterruptCallback.class
17:14:04.547 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.plan.ReduceWork - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/plan/ReduceWork.class
17:14:04.550 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.zip.DeflaterOutputStream - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/zip/DeflaterOutputStream.class
17:14:04.550 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.fs.FSDataOutputStream
17:14:04.551 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.zip.InflaterInputStream - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/zip/InflaterInputStream.class
17:14:04.551 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.fs.FSDataInputStream
17:14:04.552 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Utilities$DatePersistenceDelegate - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Utilities$DatePersistenceDelegate.class
17:14:04.554 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Utilities$TimestampPersistenceDelegate - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Utilities$TimestampPersistenceDelegate.class
17:14:04.555 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Utilities$MapDelegate - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Utilities$MapDelegate.class
17:14:04.558 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Utilities$ListDelegate - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Utilities$ListDelegate.class
17:14:04.559 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Utilities$CommonTokenDelegate - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Utilities$CommonTokenDelegate.class
17:14:04.561 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Utilities$PathDelegate - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Utilities$PathDelegate.class
17:14:04.562 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.io.compress.CompressionOutputStream
17:14:04.569 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.sql.SQLTransientException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/sql/SQLTransientException.class
17:14:04.574 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.sql.SQLException - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/sql/SQLException.class
17:14:04.575 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.io.Writable
17:14:04.576 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Utilities$1 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Utilities$1.class
17:14:04.577 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Utilities$3 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Utilities$3.class
17:14:04.578 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Utilities$4 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Utilities$4.class
17:14:04.580 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Utilities$5 - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Utilities$5.class
17:14:04.581 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Utilities$ReduceField - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Utilities$ReduceField.class
17:14:04.588 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Utilities$SqlDateSerializer - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Utilities$SqlDateSerializer.class
17:14:04.590 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Utilities$TimestampSerializer - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Utilities$TimestampSerializer.class
17:14:04.591 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Utilities$PathSerializer - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Utilities$PathSerializer.class
17:14:04.593 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.objenesis.strategy.InstantiatorStrategy - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/objenesis/strategy/InstantiatorStrategy.class
17:14:04.598 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.Utilities$CommonTokenSerializer - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/Utilities$CommonTokenSerializer.class
17:14:04.599 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.plan.PlanUtils - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/plan/PlanUtils.class
17:14:04.604 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: java.lang.AbstractMethodError
17:14:04.607 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.plan.TableDesc - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/plan/TableDesc.class
17:14:04.609 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.mapred.TextInputFormat
17:14:04.614 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/io/IgnoreKeyTextOutputFormat.class
17:14:04.615 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.mapred.TextOutputFormat
17:14:04.617 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.io.HiveFileFormatUtils - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.class
17:14:04.619 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.util.Progressable
17:14:04.620 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.class
17:14:04.622 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.mapred.SequenceFileOutputFormat
17:14:04.624 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/io/HiveSequenceFileOutputFormat.class
17:14:04.624 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.mapred.SequenceFileInputFormat
17:14:04.626 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.io.SequenceFileInputFormatChecker - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/io/SequenceFileInputFormatChecker.class
17:14:04.627 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.io.InputFormatChecker - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/io/InputFormatChecker.class
17:14:04.628 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.io.RCFileInputFormat - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/io/RCFileInputFormat.class
17:14:04.629 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.mapred.FileInputFormat
17:14:04.629 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: java.util.Random - jar:file:/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/rt.jar!/java/util/Random.class
17:14:04.641 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$10$$anonfun$10 - -1321183718
17:14:04.644 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.hadoop.mapred.RecordWriter
17:14:04.644 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - hive class: org.apache.hadoop.hive.ql.exec.FileSinkOperator$RecordWriter - jar:file:/home/aims/spark/jars/hive-exec-1.2.1.spark2.jar!/org/apache/hadoop/hive/ql/exec/FileSinkOperator$RecordWriter.class
17:14:04.645 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$10$$anonfun$11 - -1054124338
17:14:04.646 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$10$$anonfun$12 - 1906974194
17:14:04.648 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - custom defining: org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$10$$anonfun$13 - 1350469868
17:14:04.649 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.catalyst.catalog.CatalogTable$
17:14:04.655 [main] DEBUG o.a.s.s.h.c.IsolatedClientLoader - shared class: org.apache.spark.sql.catalyst.catalog.CatalogTable
17:14:04.780 [main] DEBUG o.a.hadoop.hive.ql.exec.Utilities - Hive Conf not found or Session not initiated, use thread based class loader instead
17:14:04.781 [main] DEBUG o.a.hadoop.hive.ql.exec.Utilities - Hive Conf not found or Session not initiated, use thread based class loader instead
17:14:04.791 [main] DEBUG hive.log - DDL: struct health { i32 srno, string survey_question_text, string survey_question, string survey_response_sentiment, date survey_received_date, date discharge_date, i32 discharge_service_key, string discharge_service, string nursing_unit, string sex, i32 age, string survey_response_value, i32 discharge_month_key, string mrn}
17:14:04.912 [main] DEBUG o.a.s.s.c.a.Analyzer$ResolveReferences - Resolving 'survey_response_value to survey_response_value#11
17:14:05.052 [main] DEBUG o.a.s.s.h.HiveSessionState$$anon$1 -
=== Result of Batch Resolution ===
!'Project ['survey_response_value] Project [survey_response_value#11]
!+- 'UnresolvedRelation `health` +- MetastoreRelation default, health

17:14:06.980 [main] DEBUG o.a.s.s.h.HiveSessionState$$anon$1 -
=== Result of Batch Resolution ===
!'DeserializeToObject unresolveddeserializer(createexternalrow(getcolumnbyordinal(0, StringType).toString, StructField(survey_response_value,StringType,true))), obj#15: org.apache.spark.sql.Row DeserializeToObject createexternalrow(survey_response_value#11.toString, StructField(survey_response_value,StringType,true)), obj#15: org.apache.spark.sql.Row
+- LocalRelation <empty>, [survey_response_value#11] +- LocalRelation <empty>, [survey_response_value#11]

17:14:06.998 [main] DEBUG o.a.s.s.h.HiveSessionState$$anon$1 -
=== Result of Batch Resolution ===
!'DeserializeToObject unresolveddeserializer(createexternalrow(getcolumnbyordinal(0, StringType).toString, StructField(survey_response_value,StringType,true))), obj#17: org.apache.spark.sql.Row DeserializeToObject createexternalrow(survey_response_value#11.toString, StructField(survey_response_value,StringType,true)), obj#17: org.apache.spark.sql.Row
+- LocalRelation <empty>, [survey_response_value#11] +- LocalRelation <empty>, [survey_response_value#11]

17:14:07.015 [main] DEBUG o.a.s.s.h.HiveSessionState$$anon$1 -
=== Result of Batch Resolution ===
!'DeserializeToObject unresolveddeserializer(createexternalrow(getcolumnbyordinal(0, StringType).toString, StructField(survey_response_value,StringType,true))), obj#18: org.apache.spark.sql.Row DeserializeToObject createexternalrow(survey_response_value#11.toString, StructField(survey_response_value,StringType,true)), obj#18: org.apache.spark.sql.Row
+- LocalRelation <empty>, [survey_response_value#11] +- LocalRelation <empty>, [survey_response_value#11]

17:14:07.269 [main] DEBUG o.a.hadoop.hive.ql.exec.Utilities - Hive Conf not found or Session not initiated, use thread based class loader instead
17:14:07.582 [main] DEBUG o.a.h.h.serde2.lazy.LazySimpleSerDe - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe initialized with: columnNames=[srno, survey_question_text, survey_question, survey_response_sentiment, survey_received_date, discharge_date, discharge_service_key, discharge_service, nursing_unit, sex, age, survey_response_value, discharge_month_key, mrn] columnTypes=[int, string, string, string, date, date, int, string, string, string, int, string, int, string] separator=[[B@1a6dc5ea] nullstring=\N lastColumnTakesRest=false timestampFormats=null
17:14:07.896 [main] INFO o.a.spark.storage.memory.MemoryStore - Block broadcast_0 stored as values in memory (estimated size 284.3 KB, free 335.1 MB)
17:14:07.900 [main] DEBUG o.apache.spark.storage.BlockManager - Put block broadcast_0 locally took 259 ms
17:14:07.904 [main] DEBUG o.apache.spark.storage.BlockManager - Putting block broadcast_0 without replication took 263 ms
17:14:08.019 [main] INFO o.a.spark.storage.memory.MemoryStore - Block broadcast_0_piece0 stored as bytes in memory (estimated size 23.5 KB, free 335.1 MB)
17:14:08.023 [dispatcher-event-loop-0] INFO o.a.spark.storage.BlockManagerInfo - Added broadcast_0_piece0 in memory on 10.0.0.3:44157 (size: 23.5 KB, free: 335.4 MB)
17:14:08.024 [main] DEBUG o.a.spark.storage.BlockManagerMaster - Updated info of block broadcast_0_piece0
17:14:08.027 [main] DEBUG o.apache.spark.storage.BlockManager - Told master about block broadcast_0_piece0
17:14:08.029 [main] DEBUG o.apache.spark.storage.BlockManager - Put block broadcast_0_piece0 locally took 13 ms
17:14:08.029 [main] DEBUG o.apache.spark.storage.BlockManager - Putting block broadcast_0_piece0 without replication took 13 ms
17:14:08.045 [main] INFO org.apache.spark.SparkContext - Created broadcast 0 from show at queryhive.java:42
Exception in thread "main" java.lang.ExceptionInInitializerError
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113)
at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:225)
at org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:308)
at org.apache.spark.sql.execution.CollectLimitExec.executeCollect(limit.scala:38)
at org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$Dataset$$execute$1$1.apply(Dataset.scala:2371)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:57)
at org.apache.spark.sql.Dataset.withNewExecutionId(Dataset.scala:2765)
at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$execute$1(Dataset.scala:2370)
at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collect(Dataset.scala:2377)
at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:2113)
at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:2112)
at org.apache.spark.sql.Dataset.withTypedCallback(Dataset.scala:2795)
at org.apache.spark.sql.Dataset.head(Dataset.scala:2112)
at org.apache.spark.sql.Dataset.take(Dataset.scala:2327)
at org.apache.spark.sql.Dataset.showString(Dataset.scala:248)
at org.apache.spark.sql.Dataset.show(Dataset.scala:636)
at org.apache.spark.sql.Dataset.show(Dataset.scala:595)
at org.apache.spark.sql.Dataset.show(Dataset.scala:604)
at spark.sparkhive.queryhive.main(queryhive.java:42)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:147)
Caused by: com.fasterxml.jackson.databind.JsonMappingException: Jackson version is too old 2.5.1
at com.fasterxml.jackson.module.scala.JacksonModule$class.setupModule(JacksonModule.scala:56)
at com.fasterxml.jackson.module.scala.DefaultScalaModule.setupModule(DefaultScalaModule.scala:19)
at com.fasterxml.jackson.databind.ObjectMapper.registerModule(ObjectMapper.java:651)
at org.apache.spark.rdd.RDDOperationScope$.<init>(RDDOperationScope.scala:82)
at org.apache.spark.rdd.RDDOperationScope$.<clinit>(RDDOperationScope.scala)
... 25 more
17:14:08.212 [Thread-2] INFO org.apache.spark.SparkContext - Invoking stop() from shutdown hook
17:14:08.215 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.server.Server@514eedd8
17:14:08.218 [Thread-2] DEBUG o.spark_project.jetty.server.Server - Graceful shutdown org.spark_project.jetty.server.Server@514eedd8 by
17:14:08.218 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping ServerConnector@1b5bc39d{HTTP/1.1}{0.0.0.0:4040}
17:14:08.218 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.server.ServerConnector$ServerConnectorManager@1df98368
17:14:08.218 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.io.SelectorManager$ManagedSelector@2ef8a8c3 keys=0 selected=0
17:14:08.218 [Thread-2] DEBUG o.s.jetty.io.SelectorManager - Stopping org.spark_project.jetty.io.SelectorManager$ManagedSelector@2ef8a8c3 keys=0 selected=0
17:14:08.220 [Thread-2] DEBUG o.s.jetty.io.SelectorManager - Queued change org.spark_project.jetty.io.SelectorManager$ManagedSelector$Stop@77abc9f8
17:14:08.220 [SparkUI-36-selector-ServerConnectorManager@1df98368/0] DEBUG o.s.jetty.io.SelectorManager - Selector loop woken up from select, 0/0 selected
17:14:08.220 [SparkUI-36-selector-ServerConnectorManager@1df98368/0] DEBUG o.s.jetty.io.SelectorManager - Running change org.spark_project.jetty.io.SelectorManager$ManagedSelector$Stop@77abc9f8
17:14:08.221 [Thread-2] DEBUG o.s.jetty.io.SelectorManager - Stopped org.spark_project.jetty.io.SelectorManager$ManagedSelector@2ef8a8c3 keys=-1 selected=-1
17:14:08.221 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.io.SelectorManager$ManagedSelector@2ef8a8c3 keys=-1 selected=-1
17:14:08.221 [SparkUI-36-selector-ServerConnectorManager@1df98368/0] DEBUG o.s.jetty.io.SelectorManager - Stopped Thread[SparkUI-36-selector-ServerConnectorManager@1df98368/0,5,main] on org.spark_project.jetty.io.SelectorManager$ManagedSelector@2ef8a8c3 keys=-1 selected=-1
17:14:08.221 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.io.SelectorManager$ManagedSelector@1e11bc55 keys=0 selected=0
17:14:08.221 [Thread-2] DEBUG o.s.jetty.io.SelectorManager - Stopping org.spark_project.jetty.io.SelectorManager$ManagedSelector@1e11bc55 keys=0 selected=0
17:14:08.221 [Thread-2] DEBUG o.s.jetty.io.SelectorManager - Queued change org.spark_project.jetty.io.SelectorManager$ManagedSelector$Stop@52ab9dca
17:14:08.221 [SparkUI-37-selector-ServerConnectorManager@1df98368/1] DEBUG o.s.jetty.io.SelectorManager - Selector loop woken up from select, 0/0 selected
17:14:08.221 [SparkUI-37-selector-ServerConnectorManager@1df98368/1] DEBUG o.s.jetty.io.SelectorManager - Running change org.spark_project.jetty.io.SelectorManager$ManagedSelector$Stop@52ab9dca
17:14:08.221 [Thread-2] DEBUG o.s.jetty.io.SelectorManager - Stopped org.spark_project.jetty.io.SelectorManager$ManagedSelector@1e11bc55 keys=-1 selected=-1
17:14:08.221 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.io.SelectorManager$ManagedSelector@1e11bc55 keys=-1 selected=-1
17:14:08.221 [SparkUI-37-selector-ServerConnectorManager@1df98368/1] DEBUG o.s.jetty.io.SelectorManager - Stopped Thread[SparkUI-37-selector-ServerConnectorManager@1df98368/1,5,main] on org.spark_project.jetty.io.SelectorManager$ManagedSelector@1e11bc55 keys=-1 selected=-1
17:14:08.221 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.server.ServerConnector$ServerConnectorManager@1df98368
17:14:08.221 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping HttpConnectionFactory@5910de75{HTTP/1.1}
17:14:08.222 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED HttpConnectionFactory@5910de75{HTTP/1.1}
17:14:08.222 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.util.thread.ScheduledExecutorScheduler@655a5d9c
17:14:08.222 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.util.thread.ScheduledExecutorScheduler@655a5d9c
17:14:08.222 [Thread-2] INFO o.s.jetty.server.ServerConnector - Stopped ServerConnector@1b5bc39d{HTTP/1.1}{0.0.0.0:4040}
17:14:08.222 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED ServerConnector@1b5bc39d{HTTP/1.1}{0.0.0.0:4040}
17:14:08.222 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.server.Server@514eedd8
17:14:08.222 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a, org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff, org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2, org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d, org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1, org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a, org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b, o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,SHUTDOWN}, o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,SHUTDOWN}, o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,SHUTDOWN}, o.s.j.s.ServletContextHandler@43b0ade{/SQL/execution,null,SHUTDOWN}, o.s.j.s.ServletContextHandler@4fe01803{/SQL/execution/json,null,SHUTDOWN}, o.s.j.s.ServletContextHandler@6569dded{/static/sql,null,SHUTDOWN}]
17:14:08.222 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a, org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff, org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2, org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d, org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1, org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a, org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b, o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,SHUTDOWN}, o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,SHUTDOWN}, o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,SHUTDOWN}, o.s.j.s.ServletContextHandler@43b0ade{/SQL/execution,null,SHUTDOWN}, o.s.j.s.ServletContextHandler@4fe01803{/SQL/execution/json,null,SHUTDOWN}, o.s.j.s.ServletContextHandler@6569dded{/static/sql,null,SHUTDOWN}]
17:14:08.222 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b
17:14:08.222 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b
17:14:08.222 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@66ea1466{/stages/stage/kill,null,SHUTDOWN}
17:14:08.222 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@66ea1466{/stages/stage/kill,null,UNAVAILABLE}
17:14:08.222 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@1601e47
17:14:08.222 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@1601e47
17:14:08.222 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$4-3bffddff@1b10e45a==org.apache.spark.ui.JettyUtils$$anon$4,-1,true
17:14:08.223 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$4-3bffddff@1b10e45a==org.apache.spark.ui.JettyUtils$$anon$4,-1,true
17:14:08.226 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@1601e47
17:14:08.227 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@66ea1466{/stages/stage/kill,null,UNAVAILABLE}
17:14:08.227 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@66ea1466{/stages/stage/kill,null,UNAVAILABLE}
17:14:08.227 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b
17:14:08.227 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a
17:14:08.227 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a
17:14:08.227 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@4c36250e{/jobs/job/kill,null,SHUTDOWN}
17:14:08.227 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@4c36250e{/jobs/job/kill,null,UNAVAILABLE}
17:14:08.227 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@21526f6c
17:14:08.227 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@21526f6c
17:14:08.227 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$4-49f5c307@75ee116==org.apache.spark.ui.JettyUtils$$anon$4,-1,true
17:14:08.227 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$4-49f5c307@75ee116==org.apache.spark.ui.JettyUtils$$anon$4,-1,true
17:14:08.227 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@21526f6c
17:14:08.227 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@4c36250e{/jobs/job/kill,null,UNAVAILABLE}
17:14:08.227 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@4c36250e{/jobs/job/kill,null,UNAVAILABLE}
17:14:08.227 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a
17:14:08.227 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1
17:14:08.227 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@791cbf87{/api,null,SHUTDOWN}
17:14:08.228 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@791cbf87{/api,null,UNAVAILABLE}
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@a7e2d9d
17:14:08.228 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@a7e2d9d
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler$Default404Servlet-649f2009@a2b97056==org.spark_project.jetty.servlet.ServletHandler$Default404Servlet,-1,false
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler$Default404Servlet-649f2009@a2b97056==org.spark_project.jetty.servlet.ServletHandler$Default404Servlet,-1,false
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.glassfish.jersey.servlet.ServletContainer-7de0c6ae@83760c5d==org.glassfish.jersey.servlet.ServletContainer,-1,false
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.glassfish.jersey.servlet.ServletContainer-7de0c6ae@83760c5d==org.glassfish.jersey.servlet.ServletContainer,-1,false
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@a7e2d9d
17:14:08.228 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@791cbf87{/api,null,UNAVAILABLE}
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@791cbf87{/api,null,UNAVAILABLE}
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d
17:14:08.228 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@a1217f9{/,null,SHUTDOWN}
17:14:08.228 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@a1217f9{/,null,UNAVAILABLE}
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@3bde62ff
17:14:08.228 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@3bde62ff
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$4-523424b5@a5c8f39e==org.apache.spark.ui.JettyUtils$$anon$4,-1,true
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$4-523424b5@a5c8f39e==org.apache.spark.ui.JettyUtils$$anon$4,-1,true
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@3bde62ff
17:14:08.228 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@a1217f9{/,null,UNAVAILABLE}
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@a1217f9{/,null,UNAVAILABLE}
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2
17:14:08.228 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@b672aa8{/static,null,SHUTDOWN}
17:14:08.228 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@b672aa8{/static,null,UNAVAILABLE}
17:14:08.228 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@2fab4aff
17:14:08.228 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@2fab4aff
17:14:08.229 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.DefaultServlet-a77614d@22ce533d==org.spark_project.jetty.servlet.DefaultServlet,-1,true
17:14:08.229 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.DefaultServlet-a77614d@22ce533d==org.spark_project.jetty.servlet.DefaultServlet,-1,true
17:14:08.229 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@2fab4aff
17:14:08.229 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@b672aa8{/static,null,UNAVAILABLE}
17:14:08.229 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@b672aa8{/static,null,UNAVAILABLE}
17:14:08.229 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2
17:14:08.229 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff
17:14:08.229 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff
17:14:08.229 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@192f2f27{/executors/threadDump/json,null,SHUTDOWN}
17:14:08.229 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@192f2f27{/executors/threadDump/json,null,UNAVAILABLE}
17:14:08.229 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@8a589a2
17:14:08.229 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@8a589a2
17:14:08.229 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-c65a5ef@af24c98b==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.229 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-c65a5ef@af24c98b==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.229 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@8a589a2
17:14:08.229 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@192f2f27{/executors/threadDump/json,null,UNAVAILABLE}
17:14:08.229 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@192f2f27{/executors/threadDump/json,null,UNAVAILABLE}
17:14:08.229 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff
17:14:08.229 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a
17:14:08.229 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a
17:14:08.229 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@6fefce9e{/executors/threadDump,null,SHUTDOWN}
17:14:08.229 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@6fefce9e{/executors/threadDump,null,UNAVAILABLE}
17:14:08.229 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@4f8969b0
17:14:08.229 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@4f8969b0
17:14:08.229 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-1bdf8190@5386a2cf==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.229 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-1bdf8190@5386a2cf==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@4f8969b0
17:14:08.230 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@6fefce9e{/executors/threadDump,null,UNAVAILABLE}
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@6fefce9e{/executors/threadDump,null,UNAVAILABLE}
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a
17:14:08.230 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@209775a9{/executors/json,null,SHUTDOWN}
17:14:08.230 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@209775a9{/executors/json,null,UNAVAILABLE}
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@18e7143f
17:14:08.230 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@18e7143f
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-f9b7332@555d017e==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-f9b7332@555d017e==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@18e7143f
17:14:08.230 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@209775a9{/executors/json,null,UNAVAILABLE}
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@209775a9{/executors/json,null,UNAVAILABLE}
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920
17:14:08.230 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@33617539{/executors,null,SHUTDOWN}
17:14:08.230 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@33617539{/executors,null,UNAVAILABLE}
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@2c177f9e
17:14:08.230 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@2c177f9e
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-5db4c359@56c11761==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-5db4c359@56c11761==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@2c177f9e
17:14:08.230 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@33617539{/executors,null,UNAVAILABLE}
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@33617539{/executors,null,UNAVAILABLE}
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546
17:14:08.230 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@6e9319f{/environment/json,null,SHUTDOWN}
17:14:08.230 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@6e9319f{/environment/json,null,UNAVAILABLE}
17:14:08.230 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@72e34f77
17:14:08.230 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@72e34f77
17:14:08.231 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-7bf9b098@c3d05377==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.231 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-7bf9b098@c3d05377==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.231 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@72e34f77
17:14:08.231 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@6e9319f{/environment/json,null,UNAVAILABLE}
17:14:08.231 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@6e9319f{/environment/json,null,UNAVAILABLE}
17:14:08.231 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546
17:14:08.231 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a
17:14:08.231 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a
17:14:08.231 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@293bb8a5{/environment,null,SHUTDOWN}
17:14:08.231 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@293bb8a5{/environment,null,UNAVAILABLE}
17:14:08.231 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@2416a51
17:14:08.231 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@2416a51
17:14:08.231 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-6fa590ba@26b4dc66==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.231 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-6fa590ba@26b4dc66==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.231 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@2416a51
17:14:08.231 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@293bb8a5{/environment,null,UNAVAILABLE}
17:14:08.231 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@293bb8a5{/environment,null,UNAVAILABLE}
17:14:08.231 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a
17:14:08.231 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e
17:14:08.231 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e
17:14:08.231 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@332a7fce{/storage/rdd/json,null,SHUTDOWN}
17:14:08.231 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@332a7fce{/storage/rdd/json,null,UNAVAILABLE}
17:14:08.231 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@549621f3
17:14:08.231 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@549621f3
17:14:08.231 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-54361a9@2a2ca7fb==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.231 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-54361a9@2a2ca7fb==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.231 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@549621f3
17:14:08.231 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@332a7fce{/storage/rdd/json,null,UNAVAILABLE}
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@332a7fce{/storage/rdd/json,null,UNAVAILABLE}
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf
17:14:08.232 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@4985cbcb{/storage/rdd,null,SHUTDOWN}
17:14:08.232 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@4985cbcb{/storage/rdd,null,UNAVAILABLE}
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@72f46e16
17:14:08.232 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@72f46e16
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-3c9168dc@be32e55==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-3c9168dc@be32e55==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@72f46e16
17:14:08.232 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@4985cbcb{/storage/rdd,null,UNAVAILABLE}
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@4985cbcb{/storage/rdd,null,UNAVAILABLE}
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999
17:14:08.232 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@7876d598{/storage/json,null,SHUTDOWN}
17:14:08.232 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@7876d598{/storage/json,null,UNAVAILABLE}
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@4a3e3e8b
17:14:08.232 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@4a3e3e8b
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-5af28b27@beb2bd33==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-5af28b27@beb2bd33==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@4a3e3e8b
17:14:08.232 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@7876d598{/storage/json,null,UNAVAILABLE}
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@7876d598{/storage/json,null,UNAVAILABLE}
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3
17:14:08.232 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@664a9613{/storage,null,SHUTDOWN}
17:14:08.232 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@664a9613{/storage,null,UNAVAILABLE}
17:14:08.232 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@5118388b
17:14:08.232 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@5118388b
17:14:08.233 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-15a902e7@ff71f2fc==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.233 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-15a902e7@ff71f2fc==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.233 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@5118388b
17:14:08.233 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@664a9613{/storage,null,UNAVAILABLE}
17:14:08.233 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@664a9613{/storage,null,UNAVAILABLE}
17:14:08.233 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3
17:14:08.233 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7
17:14:08.233 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7
17:14:08.233 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@2f7a7219{/stages/pool/json,null,SHUTDOWN}
17:14:08.233 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@2f7a7219{/stages/pool/json,null,UNAVAILABLE}
17:14:08.233 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@669513d8
17:14:08.233 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@669513d8
17:14:08.233 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-3a1d593e@973e2b03==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.233 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-3a1d593e@973e2b03==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.233 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@669513d8
17:14:08.233 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@2f7a7219{/stages/pool/json,null,UNAVAILABLE}
17:14:08.233 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@2f7a7219{/stages/pool/json,null,UNAVAILABLE}
17:14:08.233 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7
17:14:08.233 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2
17:14:08.233 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2
17:14:08.233 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@403132fc{/stages/pool,null,SHUTDOWN}
17:14:08.233 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@403132fc{/stages/pool,null,UNAVAILABLE}
17:14:08.233 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@71c5b236
17:14:08.233 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@71c5b236
17:14:08.233 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-2cab9998@eaf7b57d==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.233 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-2cab9998@eaf7b57d==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.234 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@71c5b236
17:14:08.234 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@403132fc{/stages/pool,null,UNAVAILABLE}
17:14:08.234 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@403132fc{/stages/pool,null,UNAVAILABLE}
17:14:08.234 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2
17:14:08.234 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39
17:14:08.234 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39
17:14:08.234 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@7c041b41{/stages/stage/json,null,SHUTDOWN}
17:14:08.234 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@7c041b41{/stages/stage/json,null,UNAVAILABLE}
17:14:08.234 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@7f69d591
17:14:08.234 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@7f69d591
17:14:08.234 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-61078690@df88ed83==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.234 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-61078690@df88ed83==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.234 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@7f69d591
17:14:08.234 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@7c041b41{/stages/stage/json,null,UNAVAILABLE}
17:14:08.234 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@7c041b41{/stages/stage/json,null,UNAVAILABLE}
17:14:08.234 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39
17:14:08.234 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b
17:14:08.234 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b
17:14:08.234 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@9d157ff{/stages/stage,null,SHUTDOWN}
17:14:08.234 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@9d157ff{/stages/stage,null,UNAVAILABLE}
17:14:08.234 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@2f162cc0
17:14:08.234 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@2f162cc0
17:14:08.234 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-5df417a7@5d7dd3a5==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.234 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-5df417a7@5d7dd3a5==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.234 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@2f162cc0
17:14:08.234 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@9d157ff{/stages/stage,null,UNAVAILABLE}
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@9d157ff{/stages/stage,null,UNAVAILABLE}
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b
17:14:08.235 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@64bc21ac{/stages/json,null,SHUTDOWN}
17:14:08.235 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@64bc21ac{/stages/json,null,UNAVAILABLE}
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@493dfb8e
17:14:08.235 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@493dfb8e
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-5d25e6bb@4e78aaf==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-5d25e6bb@4e78aaf==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@493dfb8e
17:14:08.235 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@64bc21ac{/stages/json,null,UNAVAILABLE}
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@64bc21ac{/stages/json,null,UNAVAILABLE}
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b
17:14:08.235 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@593e824f{/stages,null,SHUTDOWN}
17:14:08.235 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@593e824f{/stages,null,UNAVAILABLE}
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@72ccd81a
17:14:08.235 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@72ccd81a
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-6d8792db@770deb30==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-6d8792db@770deb30==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@72ccd81a
17:14:08.235 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@593e824f{/stages,null,UNAVAILABLE}
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@593e824f{/stages,null,UNAVAILABLE}
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c
17:14:08.235 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@439a8f59{/jobs/job/json,null,SHUTDOWN}
17:14:08.235 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@439a8f59{/jobs/job/json,null,UNAVAILABLE}
17:14:08.235 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@61861a29
17:14:08.235 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@61861a29
17:14:08.236 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-31024624@a79d1f10==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.236 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-31024624@a79d1f10==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.236 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@61861a29
17:14:08.236 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@439a8f59{/jobs/job/json,null,UNAVAILABLE}
17:14:08.236 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@439a8f59{/jobs/job/json,null,UNAVAILABLE}
17:14:08.236 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c
17:14:08.236 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4
17:14:08.236 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4
17:14:08.236 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@3f23a3a0{/jobs/job,null,SHUTDOWN}
17:14:08.236 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@3f23a3a0{/jobs/job,null,UNAVAILABLE}
17:14:08.236 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@5ab14cb9
17:14:08.236 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@5ab14cb9
17:14:08.236 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-5fb97279@c0bfff91==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.236 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-5fb97279@c0bfff91==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.236 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@5ab14cb9
17:14:08.236 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@3f23a3a0{/jobs/job,null,UNAVAILABLE}
17:14:08.236 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@3f23a3a0{/jobs/job,null,UNAVAILABLE}
17:14:08.236 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4
17:14:08.236 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148
17:14:08.236 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148
17:14:08.236 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@303e3593{/jobs/json,null,SHUTDOWN}
17:14:08.236 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@303e3593{/jobs/json,null,UNAVAILABLE}
17:14:08.236 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@4ef27d66
17:14:08.236 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@4ef27d66
17:14:08.236 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-362a019c@b61a6609==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.236 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-362a019c@b61a6609==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.237 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@4ef27d66
17:14:08.237 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@303e3593{/jobs/json,null,UNAVAILABLE}
17:14:08.237 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@303e3593{/jobs/json,null,UNAVAILABLE}
17:14:08.237 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148
17:14:08.237 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e
17:14:08.237 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e
17:14:08.237 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping o.s.j.s.ServletContextHandler@60bdf15d{/jobs,null,SHUTDOWN}
17:14:08.237 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping o.s.j.s.ServletContextHandler@60bdf15d{/jobs,null,UNAVAILABLE}
17:14:08.237 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.servlet.ServletHandler@47da3952
17:14:08.237 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.servlet.ServletHandler@47da3952
17:14:08.237 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.apache.spark.ui.JettyUtils$$anon$3-4fcee388@5d14a26e==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.237 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.apache.spark.ui.JettyUtils$$anon$3-4fcee388@5d14a26e==org.apache.spark.ui.JettyUtils$$anon$3,-1,true
17:14:08.237 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlet.ServletHandler@47da3952
17:14:08.237 [Thread-2] INFO o.s.j.server.handler.ContextHandler - Stopped o.s.j.s.ServletContextHandler@60bdf15d{/jobs,null,UNAVAILABLE}
17:14:08.237 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED o.s.j.s.ServletContextHandler@60bdf15d{/jobs,null,UNAVAILABLE}
17:14:08.237 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e
17:14:08.237 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.server.handler.ContextHandlerCollection@cc6460c[org.spark_project.jetty.servlets.gzip.GzipHandler@25f9407e, org.spark_project.jetty.servlets.gzip.GzipHandler@363f6148, org.spark_project.jetty.servlets.gzip.GzipHandler@5e77f0f4, org.spark_project.jetty.servlets.gzip.GzipHandler@4940809c, org.spark_project.jetty.servlets.gzip.GzipHandler@379ab47b, org.spark_project.jetty.servlets.gzip.GzipHandler@4e4efc1b, org.spark_project.jetty.servlets.gzip.GzipHandler@44a2b17b, org.spark_project.jetty.servlets.gzip.GzipHandler@7e70bd39, org.spark_project.jetty.servlets.gzip.GzipHandler@388ffbc2, org.spark_project.jetty.servlets.gzip.GzipHandler@2187fff7, org.spark_project.jetty.servlets.gzip.GzipHandler@538613b3, org.spark_project.jetty.servlets.gzip.GzipHandler@3ec11999, org.spark_project.jetty.servlets.gzip.GzipHandler@2e77b8cf, org.spark_project.jetty.servlets.gzip.GzipHandler@6e57e95e, org.spark_project.jetty.servlets.gzip.GzipHandler@560cbf1a, org.spark_project.jetty.servlets.gzip.GzipHandler@64c2b546, org.spark_project.jetty.servlets.gzip.GzipHandler@7555b920, org.spark_project.jetty.servlets.gzip.GzipHandler@b5cc23a, org.spark_project.jetty.servlets.gzip.GzipHandler@182f1e9a, org.spark_project.jetty.servlets.gzip.GzipHandler@9cd25ff, org.spark_project.jetty.servlets.gzip.GzipHandler@db44aa2, org.spark_project.jetty.servlets.gzip.GzipHandler@4eeea57d, org.spark_project.jetty.servlets.gzip.GzipHandler@548e76f1, org.spark_project.jetty.servlets.gzip.GzipHandler@3dd69f5a, org.spark_project.jetty.servlets.gzip.GzipHandler@5003041b, o.s.j.s.ServletContextHandler@779de014{/metrics/json,null,SHUTDOWN}, o.s.j.s.ServletContextHandler@c9d82f9{/SQL,null,SHUTDOWN}, o.s.j.s.ServletContextHandler@d02f8d{/SQL/json,null,SHUTDOWN}, o.s.j.s.ServletContextHandler@43b0ade{/SQL/execution,null,SHUTDOWN}, o.s.j.s.ServletContextHandler@4fe01803{/SQL/execution/json,null,SHUTDOWN}, o.s.j.s.ServletContextHandler@6569dded{/static/sql,null,SHUTDOWN}]
17:14:08.237 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping org.spark_project.jetty.server.handler.ErrorHandler@641856
17:14:08.237 [Thread-2] DEBUG o.s.j.server.handler.AbstractHandler - stopping org.spark_project.jetty.server.handler.ErrorHandler@641856
17:14:08.237 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.server.handler.ErrorHandler@641856
17:14:08.238 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - stopping SparkUI{STARTED,8<=8<=200,i=8,q=0}
17:14:08.240 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED SparkUI{STOPPED,8<=8<=200,i=0,q=0}
17:14:08.240 [Thread-2] DEBUG o.s.j.u.component.AbstractLifeCycle - STOPPED org.spark_project.jetty.server.Server@514eedd8
17:14:08.241 [Thread-2] INFO org.apache.spark.ui.SparkUI - Stopped Spark web UI at http://10.0.0.3:4040
17:14:08.270 [dispatcher-event-loop-3] INFO o.a.s.MapOutputTrackerMasterEndpoint - MapOutputTrackerMasterEndpoint stopped!
17:14:08.305 [Thread-2] INFO o.a.spark.storage.memory.MemoryStore - MemoryStore cleared
17:14:08.306 [Thread-2] INFO o.apache.spark.storage.BlockManager - BlockManager stopped
17:14:08.315 [Thread-2] INFO o.a.spark.storage.BlockManagerMaster - BlockManagerMaster stopped
17:14:08.323 [dispatcher-event-loop-0] INFO o.a.s.s.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint - OutputCommitCoordinator stopped!
17:14:08.325 [Thread-2] INFO org.apache.spark.SparkContext - Successfully stopped SparkContext
17:14:08.326 [Thread-2] INFO o.a.spark.util.ShutdownHookManager - Shutdown hook called
17:14:08.328 [Thread-2] INFO o.a.spark.util.ShutdownHookManager - Deleting directory /tmp/spark-2fd05c82-76aa-42c3-9af6-825cdbf6cd29

Process finished with exit code 1