- This document is distributed under Apache 2 Licenses.
- All source code described in this document is from Apache Tez project.
- Apache Tez : Accelerating Hadoop Query Processing by Bikas Saha
# Default system properties included when running spark-submit. | |
# This is useful for setting default environmental settings. | |
# Example: | |
# spark.master spark://master:7077 | |
spark.eventLog.enabled true | |
spark.eventLog.dir file:///home/ozawa/sparkeventlogs | |
# spark.serializer org.apache.spark.serializer.KryoSerializer | |
# spark.driver.memory 5g | |
# spark.executor.extraJavaOptions -XX:+PrintGCDetails -Dkey=value -Dnumbers="one two three" |
<?xml version="1.0" encoding="UTF-8"?> | |
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> | |
<!-- | |
Licensed under the Apache License, Version 2.0 (the "License"); | |
you may not use this file except in compliance with the License. | |
You may obtain a copy of the License at | |
http://www.apache.org/licenses/LICENSE-2.0 | |
Unless required by applicable law or agreed to in writing, software |
14/11/28 06:34:24 INFO client.DAGClientImpl: VertexStatus: VertexName: 0 Progress: 100% TotalTasks: 2 Succeeded: 2 Running: 0 Failed: 0 Killed: 0 | |
14/11/28 06:34:24 INFO client.DAGClientImpl: VertexStatus: VertexName: 1 Progress: 100% TotalTasks: 2 Succeeded: 2 Running: 0 Failed: 0 Killed: 0 | |
14/11/28 06:34:24 INFO client.DAGClientImpl: VertexStatus: VertexName: 2 Progress: 0% TotalTasks: 6 Succeeded: 0 Running: 4 Failed: 0 Killed: 0 | |
14/11/28 06:34:27 INFO client.DAGClientImpl: DAG: State: FAILED Progress: 40% TotalTasks: 10 Succeeded: 4 Running: 0 Failed: 1 Killed: 5 FailedTaskAttempts: 1 | |
14/11/28 06:34:27 INFO client.DAGClientImpl: VertexStatus: VertexName: 0 Progress: 100% TotalTasks: 2 Succeeded: 2 Running: 0 Failed: 0 Killed: 0 | |
14/11/28 06:34:27 INFO client.DAGClientImpl: VertexStatus: VertexName: 1 Progress: 100% TotalTasks: 2 Succeeded: 2 Running: 0 Failed: 0 Killed: 0 | |
14/11/28 06:34:27 INFO client.DAGClientImpl: VertexStatus: VertexName: 2 Progress: 0% TotalTasks: 6 Succeeded: 0 Runnin |
Vertex failed, vertexName=0, vertexId=vertex_1417036912823_0059_1_02, diagnostics=[Task failed, taskId=task_1417036912823_0059_1_02_000014, diagnostics=[TaskAttempt 0 failed, info=[Error: Failure while running task:java.lang.IllegalStateException: Failed to execute processor for Vertex 2 | |
at org.apache.spark.tez.SparkTaskProcessor.run(SparkTaskProcessor.scala:55) | |
at org.apache.tez.runtime.library.processor.SimpleProcessor.run(SimpleProcessor.java:53) | |
at org.apache.tez.runtime.LogicalIOProcessorRuntimeTask.run(LogicalIOProcessorRuntimeTask.java:324) | |
at org.apache.tez.runtime.task.TezTaskRunner$TaskRunnerCallable$1.run(TezTaskRunner.java:176) | |
at org.apache.tez.runtime.task.TezTaskRunner$TaskRunnerCallable$1.run(TezTaskRunner.java:168) | |
at java.security.AccessController.doPrivileged(Native Method) | |
at javax.security.auth.Subject.doAs(Subject.java:415) | |
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1614) | |
at org.apache.tez.runtime.task.TezTaskRunner$TaskRunnerCallable.call(TezTaskRu |
14/11/27 15:50:26 INFO tez.DAGBuilder: DAG execution complete | |
14/11/27 15:50:26 ERROR tez.DAGBuilder: DAG diagnostics: [Vertex failed, vertexName=1, vertexId=vertex_1417036912823_0031_1_00, diagnostics=[Task failed, taskId=task_1417036912823_0031_1_00_000007, diagnostics=[TaskAttempt 0 failed, info=[Error: Failure | |
while running task:java.lang.IllegalArgumentException | |
at com.google.common.base.Preconditions.checkArgument(Preconditions.java:76) | |
{"entity":"task_1417036912823_0003_1_02_000027","entitytype":"TEZ_TASK_ID","events":[{"ts":1417051621649,"eventtype":"TASK_FINISHED"}],"otherinfo":{"startTime":1417051619126,"endTime":1417051621649,"timeTaken":2523,"status":"KILLED","diagnostics":"Task is terminated due to:OTHER_TASK_FAILURE","counters":{}}} | |
{"entity":"tez_container_1417036912823_0003_01_000007","entitytype":"TEZ_CONTAINER_ID","relatedEntities":[{"entity":"appattempt_1417036912823_0003_000001","entitytype":"TEZ_APPLICATION_ATTEMPT"},{"entity":"container_1417036912823_0003_01_000007","entitytype":"containerId"}],"events":[{"ts":1417051621649,"eventtype":"CONTAINER_STOPPED"}],"otherinfo":{"exitStatus":0}} | |
{"entity":"tez_container_1417036912823_0003_01_000006","entitytype":"TEZ_CONTAINER_ID","relatedEntities":[{"entity":"appattempt_1417036912823_0003_000001","entitytype":"TEZ_APPLICATION_ATTEMPT"},{"entity":"container_1417036912823_0003_01_000006","entitytype":"containerId"}],"events":[{"ts":1417051621650,"eventtype":"CONTAINER_STOPPED"}],"other |
<?xml version="1.0" encoding="UTF-8"?> | |
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> | |
<!-- | |
Licensed under the Apache License, Version 2.0 (the "License"); | |
you may not use this file except in compliance with the License. | |
You may obtain a copy of the License at | |
http://www.apache.org/licenses/LICENSE-2.0 | |
Unless required by applicable law or agreed to in writing, software |
// scala> conf.get("spark.shuffle.manager") | |
// res3: String = org.apache.spark.shuffle.sort.SortShuffleManager | |
new scala.testing.Benchmark { | |
def run() { | |
sc.textFile("hdfs:///user/ozawa/wordCountInput20GB") | |
.flatMap(line => line.split(" ")) | |
.map(word => (word, 1)) | |
.sortByKey(true, 37) | |
.saveAsTextFile("hdfs:///user/ozawa/outs/sort/3"); | |
} |
@Test (timeout = 20000) | |
public void testRetriedFinishApplicationMasterRequest() | |
throws Exception { | |
conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 1); | |
MemoryRMStateStore memStore = new MemoryRMStateStore(); | |
memStore.init(conf); | |
// start RM | |
rm1 = new MockRM(conf, memStore); | |
rm1.start(); |