Created
August 27, 2014 00:41
-
-
Save jayunit100/d424b6b825ce8517d68c to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Using as default JAVA_HOME. | |
Note, this will be overridden by -java-home if it is set. | |
[0m[[0minfo[0m] [0mLoading project definition from /home/jay/Development/spark/project/project[0m | |
[0m[[0minfo[0m] [0mLoading project definition from /home/jay/.sbt/0.13/staging/ec3aa8f39111944cc5f2/sbt-pom-reader/project[0m | |
[0m[[33mwarn[0m] [0mMultiple resolvers having different access mechanism configured with same name 'sbt-plugin-releases'. To avoid conflict, Remove duplicate project resolvers (`resolvers`) or rename publishing resolver (`publishTo`).[0m | |
[0m[[0minfo[0m] [0mLoading project definition from /home/jay/Development/spark/project[0m | |
[0m[[0minfo[0m] [0mSet current project to spark-parent (in build file:/home/jay/Development/spark/)[0m | |
[0m[[0minfo[0m] [0m[32mTriangleCountSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Count a single triangle[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Count two triangles[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Count two triangles with bi-directed edges[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Count a single triangle with duplicate edges[0m[0m | |
[0m[[0minfo[0m] [0m[32mGraphOpsSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- joinVertices[0m[0m | |
[0m[[0minfo[0m] [0m[32m- collectNeighborIds[0m[0m | |
[0m[[0minfo[0m] [0m[32m- filter[0m[0m | |
[0m[[0minfo[0m] [0m[32m- collectEdgesCycleDirectionOut[0m[0m | |
[0m[[0minfo[0m] [0m[32m- collectEdgesCycleDirectionIn[0m[0m | |
[0m[[0minfo[0m] [0m[32m- collectEdgesCycleDirectionEither[0m[0m | |
[0m[[0minfo[0m] [0m[32m- collectEdgesChainDirectionOut[0m[0m | |
[0m[[0minfo[0m] [0m[32m- collectEdgesChainDirectionIn[0m[0m | |
[0m[[0minfo[0m] [0m[32m- collectEdgesChainDirectionEither[0m[0m | |
[0m[[0minfo[0m] [0m[32mLabelPropagationSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Label Propagation[0m[0m | |
[0m[[0minfo[0m] [0m[32mPageRankSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Star PageRank[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Grid PageRank[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Chain PageRank[0m[0m | |
[0m[[0minfo[0m] [0m[32mShortestPathsSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Shortest Path Computations[0m[0m | |
[0m[[0minfo[0m] [0m[32mGraphSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Graph.fromEdgeTuples[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Graph.fromEdges[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Graph.apply[0m[0m | |
[0m[[0minfo[0m] [0m[32m- triplets[0m[0m | |
[0m[[0minfo[0m] [0m[32m- partitionBy[0m[0m | |
[0m[[0minfo[0m] [0m[32m- mapVertices[0m[0m | |
[0m[[0minfo[0m] [0m[32m- mapVertices changing type with same erased type[0m[0m | |
[0m[[0minfo[0m] [0m[32m- mapEdges[0m[0m | |
[0m[[0minfo[0m] [0m[32m- mapTriplets[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reverse[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reverse with join elimination[0m[0m | |
[0m[[0minfo[0m] [0m[32m- subgraph[0m[0m | |
[0m[[0minfo[0m] [0m[32m- mask[0m[0m | |
[0m[[0minfo[0m] [0m[32m- groupEdges[0m[0m | |
[0m[[0minfo[0m] [0m[32m- mapReduceTriplets[0m[0m | |
[0m[[0minfo[0m] [0m[32m- outerJoinVertices[0m[0m | |
[0m[[0minfo[0m] [0m[32m- more edge partitions than vertex partitions[0m[0m | |
[0m[[0minfo[0m] [0m[32mStronglyConnectedComponentsSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Island Strongly Connected Components[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Cycle Strongly Connected Components[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 2 Cycle Strongly Connected Components[0m[0m | |
[0m[[0minfo[0m] [0m[32mEdgeSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- compare[0m[0m | |
[0m[[0minfo[0m] [0m[32mSerializerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- IntAggMsgSerializer[0m[0m | |
[0m[[0minfo[0m] [0m[32m- LongAggMsgSerializer[0m[0m | |
[0m[[0minfo[0m] [0m[32m- DoubleAggMsgSerializer[0m[0m | |
[0m[[0minfo[0m] [0m[32m- variable long encoding[0m[0m | |
[0m[[0minfo[0m] [0m[32mEdgeTripletIteratorSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- iterator.toList[0m[0m | |
[0m[[0minfo[0m] [0m[32mPregelSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 1 iteration[0m[0m | |
[0m[[0minfo[0m] [0m[32m- chain propagation[0m[0m | |
[0m[[0minfo[0m] [0m[32mVertexRDDSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- filter[0m[0m | |
[0m[[0minfo[0m] [0m[32m- mapValues[0m[0m | |
[0m[[0minfo[0m] [0m[32m- diff[0m[0m | |
[0m[[0minfo[0m] [0m[32m- leftJoin[0m[0m | |
[0m[[0minfo[0m] [0m[32m- innerJoin[0m[0m | |
[0m[[0minfo[0m] [0m[32m- aggregateUsingIndex[0m[0m | |
[0m[[0minfo[0m] [0m[32mVertexPartitionSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- isDefined, filter[0m[0m | |
[0m[[0minfo[0m] [0m[32m- map[0m[0m | |
[0m[[0minfo[0m] [0m[32m- diff[0m[0m | |
[0m[[0minfo[0m] [0m[32m- leftJoin[0m[0m | |
[0m[[0minfo[0m] [0m[32m- innerJoin[0m[0m | |
[0m[[0minfo[0m] [0m[32m- createUsingIndex[0m[0m | |
[0m[[0minfo[0m] [0m[32m- innerJoinKeepLeft[0m[0m | |
[0m[[0minfo[0m] [0m[32m- aggregateUsingIndex[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reindex[0m[0m | |
[0m[[0minfo[0m] [0m[32m- serialization[0m[0m | |
[0m[[0minfo[0m] [0m[32mConnectedComponentsSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Grid Connected Components[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Reverse Grid Connected Components[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Chain Connected Components[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Reverse Chain Connected Components[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Connected Components on a Toy Connected Graph[0m[0m | |
[0m[[0minfo[0m] [0m[32mSVDPlusPlusSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Test SVD++ with mean square error on training set[0m[0m | |
[0m[[0minfo[0m] [0m[32mEdgePartitionSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reverse[0m[0m | |
[0m[[0minfo[0m] [0m[32m- map[0m[0m | |
[0m[[0minfo[0m] [0m[32m- filter[0m[0m | |
[0m[[0minfo[0m] [0m[32m- groupEdges[0m[0m | |
[0m[[0minfo[0m] [0m[32m- upgradeIterator[0m[0m | |
[0m[[0minfo[0m] [0m[32m- indexIterator[0m[0m | |
[0m[[0minfo[0m] [0m[32m- innerJoin[0m[0m | |
[0m[[0minfo[0m] [0m[32m- isActive, numActives, replaceActives[0m[0m | |
[0m[[0minfo[0m] [0m[32m- serialization[0m[0m | |
[0m[[0minfo[0m] [0m[32mBytecodeUtilsSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- closure invokes a method[0m[0m | |
[0m[[0minfo[0m] [0m[32m- closure inside a closure invokes a method[0m[0m | |
[0m[[0minfo[0m] [0m[32m- closure inside a closure inside a closure invokes a method[0m[0m | |
[0m[[0minfo[0m] [0m[32m- closure calling a function that invokes a method[0m[0m | |
[0m[[0minfo[0m] [0m[32m- closure calling a function that invokes a method which uses another closure[0m[0m | |
[0m[[0minfo[0m] [0m[32m- nested closure[0m[0m | |
[0m[[0minfo[0m] [0mScalaTest[0m | |
[0m[[0minfo[0m] [0m[36mRun completed in 1 minute, 37 seconds.[0m[0m | |
[0m[[0minfo[0m] [0m[36mTotal number of tests run: 83[0m[0m | |
[0m[[0minfo[0m] [0m[36mSuites: completed 17, aborted 0[0m[0m | |
[0m[[0minfo[0m] [0m[36mTests: succeeded 83, failed 0, canceled 0, ignored 0, pending 0[0m[0m | |
[0m[[0minfo[0m] [0m[32mAll tests passed.[0m[0m | |
[0m[[0minfo[0m] [0mPassed: Total 83, Failed 0, Errors 0, Passed 83[0m | |
[0m[[0minfo[0m] [0m[32mBagelSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- halting by voting[0m[0m | |
[0m[[0minfo[0m] [0m[32m- halting by message silence[0m[0m | |
[0m[[0minfo[0m] [0m[32m- large number of iterations[0m[0m | |
[0m[[0minfo[0m] [0m[32m- using non-default persistence level[0m[0m | |
[0m[[0minfo[0m] [0mScalaTest[0m | |
[0m[[0minfo[0m] [0m[36mRun completed in 1 minute, 50 seconds.[0m[0m | |
[0m[[0minfo[0m] [0m[36mTotal number of tests run: 4[0m[0m | |
[0m[[0minfo[0m] [0m[36mSuites: completed 1, aborted 0[0m[0m | |
[0m[[0minfo[0m] [0m[36mTests: succeeded 4, failed 0, canceled 0, ignored 0, pending 0[0m[0m | |
[0m[[0minfo[0m] [0m[32mAll tests passed.[0m[0m | |
[0m[[0minfo[0m] [0mPassed: Total 4, Failed 0, Errors 0, Passed 4[0m | |
[0m[[0minfo[0m] [0m[32mGeneratedEvaluationSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- literals[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 3VL Not[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 3VL AND[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 3VL OR[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 3VL =[0m[0m | |
[0m[[0minfo[0m] [0m[32m- IN[0m[0m | |
[0m[[0minfo[0m] [0m[32m- LIKE literal Regular Expression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- LIKE Non-literal Regular Expression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- RLIKE literal Regular Expression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- RLIKE Non-literal Regular Expression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- data type casting[0m[0m | |
[0m[[0minfo[0m] [0m[32m- timestamp[0m[0m | |
[0m[[0minfo[0m] [0m[32m- timestamp casting[0m[0m | |
[0m[[0minfo[0m] [0m[32m- null checking[0m[0m | |
[0m[[0minfo[0m] [0m[32m- case when[0m[0m | |
[0m[[0minfo[0m] [0m[32m- complex type[0m[0m | |
[0m[[0minfo[0m] [0m[32m- arithmetic[0m[0m | |
[0m[[0minfo[0m] [0m[32m- BinaryComparison[0m[0m | |
[0m[[0minfo[0m] [0m[32m- StringComparison[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Substring[0m[0m | |
[0m[[0minfo[0m] [0m[32m- multithreaded eval[0m[0m | |
[0m[[0minfo[0m] [0m[32mAnalysisSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- analyze project[0m[0m | |
[0m[[0minfo[0m] [0m[32m- resolve relations[0m[0m | |
[0m[[0minfo[0m] [0m[32m- throw errors for unresolved attributes during analysis[0m[0m | |
[0m[[0minfo[0m] [0m[32mHiveTypeCoercionSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- tightest common bound for numeric and boolean types[0m[0m | |
[0m[[0minfo[0m] [0m[32mFilterPushdownSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- eliminate subqueries[0m[0m | |
[0m[[0minfo[0m] [0m[32m- simple push down[0m[0m | |
[0m[[0minfo[0m] [0m[32m- can't push without rewrite[0m[0m | |
[0m[[0minfo[0m] [0m[32m- filters: combines filters[0m[0m | |
[0m[[0minfo[0m] [0m[32m- joins: push to either side[0m[0m | |
[0m[[0minfo[0m] [0m[32m- joins: push to one side[0m[0m | |
[0m[[0minfo[0m] [0m[32m- joins: rewrite filter to push to either side[0m[0m | |
[0m[[0minfo[0m] [0m[32m- joins: push down left outer join #1[0m[0m | |
[0m[[0minfo[0m] [0m[32m- joins: push down right outer join #1[0m[0m | |
[0m[[0minfo[0m] [0m[32m- joins: push down left outer join #2[0m[0m | |
[0m[[0minfo[0m] [0m[32m- joins: push down right outer join #2[0m[0m | |
[0m[[0minfo[0m] [0m[32m- joins: push down left outer join #3[0m[0m | |
[0m[[0minfo[0m] [0m[32m- joins: push down right outer join #3[0m[0m | |
[0m[[0minfo[0m] [0m[32m- joins: push down left outer join #4[0m[0m | |
[0m[[0minfo[0m] [0m[32m- joins: push down right outer join #4[0m[0m | |
[0m[[0minfo[0m] [0m[32m- joins: push down left outer join #5[0m[0m | |
[0m[[0minfo[0m] [0m[32m- joins: push down right outer join #5[0m[0m | |
[0m[[0minfo[0m] [0m[32m- joins: can't push down[0m[0m | |
[0m[[0minfo[0m] [0m[32m- joins: conjunctive predicates[0m[0m | |
[0m[[0minfo[0m] [0m[32m- joins: conjunctive predicates #2[0m[0m | |
[0m[[0minfo[0m] [0m[32m- joins: conjunctive predicates #3[0m[0m | |
[0m[[0minfo[0m] [0m[32mCombiningLimitsSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- limits: combines two limits[0m[0m | |
[0m[[0minfo[0m] [0m[32m- limits: combines three limits[0m[0m | |
[0m[[0minfo[0m] [0m[32mDistributionSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- HashPartitioning is the output partitioning[0m[0m | |
[0m[[0minfo[0m] [0m[32m- RangePartitioning is the output partitioning[0m[0m | |
[0m[[0minfo[0m] [0m[32mLikeSimplificationSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- simplify Like into StartsWith[0m[0m | |
[0m[[0minfo[0m] [0m[32m- simplify Like into EndsWith[0m[0m | |
[0m[[0minfo[0m] [0m[32m- simplify Like into Contains[0m[0m | |
[0m[[0minfo[0m] [0m[32m- simplify Like into EqualTo[0m[0m | |
[0m[[0minfo[0m] [0m[32mTreeNodeSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- top node changed[0m[0m | |
[0m[[0minfo[0m] [0m[32m- one child changed[0m[0m | |
[0m[[0minfo[0m] [0m[32m- no change[0m[0m | |
[0m[[0minfo[0m] [0m[32m- collect[0m[0m | |
[0m[[0minfo[0m] [0m[32m- pre-order transform[0m[0m | |
[0m[[0minfo[0m] [0m[32m- post-order transform[0m[0m | |
[0m[[0minfo[0m] [0m[32m- transform works on nodes with Option children[0m[0m | |
[0m[[0minfo[0m] [0m[32mConstantFoldingSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- eliminate subqueries[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Constant folding test: expressions only have literals[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Constant folding test: expressions have attribute references and literals in arithmetic operations[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Constant folding test: expressions have attribute references and literals in predicates[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Constant folding test: expressions have foldable functions[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Constant folding test: expressions have nonfoldable functions[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Constant folding test: expressions have null literals[0m[0m | |
[0m[[0minfo[0m] [0m[32mGeneratedMutableEvaluationSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- literals[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 3VL Not[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 3VL AND[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 3VL OR[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 3VL =[0m[0m | |
[0m[[0minfo[0m] [0m[32m- IN[0m[0m | |
[0m[[0minfo[0m] [0m[32m- LIKE literal Regular Expression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- LIKE Non-literal Regular Expression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- RLIKE literal Regular Expression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- RLIKE Non-literal Regular Expression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- data type casting[0m[0m | |
[0m[[0minfo[0m] [0m[32m- timestamp[0m[0m | |
[0m[[0minfo[0m] [0m[32m- timestamp casting[0m[0m | |
[0m[[0minfo[0m] [0m[32m- null checking[0m[0m | |
[0m[[0minfo[0m] [0m[32m- case when[0m[0m | |
[0m[[0minfo[0m] [0m[32m- complex type[0m[0m | |
[0m[[0minfo[0m] [0m[32m- arithmetic[0m[0m | |
[0m[[0minfo[0m] [0m[32m- BinaryComparison[0m[0m | |
[0m[[0minfo[0m] [0m[32m- StringComparison[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Substring[0m[0m | |
[0m[[0minfo[0m] [0m[32mExpressionEvaluationSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- literals[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 3VL Not[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 3VL AND[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 3VL OR[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 3VL =[0m[0m | |
[0m[[0minfo[0m] [0m[32m- IN[0m[0m | |
[0m[[0minfo[0m] [0m[32m- LIKE literal Regular Expression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- LIKE Non-literal Regular Expression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- RLIKE literal Regular Expression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- RLIKE Non-literal Regular Expression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- data type casting[0m[0m | |
[0m[[0minfo[0m] [0m[32m- timestamp[0m[0m | |
[0m[[0minfo[0m] [0m[32m- timestamp casting[0m[0m | |
[0m[[0minfo[0m] [0m[32m- null checking[0m[0m | |
[0m[[0minfo[0m] [0m[32m- case when[0m[0m | |
[0m[[0minfo[0m] [0m[32m- complex type[0m[0m | |
[0m[[0minfo[0m] [0m[32m- arithmetic[0m[0m | |
[0m[[0minfo[0m] [0m[32m- BinaryComparison[0m[0m | |
[0m[[0minfo[0m] [0m[32m- StringComparison[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Substring[0m[0m | |
[0m[[0minfo[0m] [0m[32mSimplifyCaseConversionExpressionsSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- simplify UPPER(UPPER(str))[0m[0m | |
[0m[[0minfo[0m] [0m[32m- simplify UPPER(LOWER(str))[0m[0m | |
[0m[[0minfo[0m] [0m[32m- simplify LOWER(UPPER(str))[0m[0m | |
[0m[[0minfo[0m] [0m[32m- simplify LOWER(LOWER(str))[0m[0m | |
[0m[[0minfo[0m] [0m[32mExpressionOptimizationSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- literals[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 3VL Not[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 3VL AND[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 3VL OR[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 3VL =[0m[0m | |
[0m[[0minfo[0m] [0m[32m- IN[0m[0m | |
[0m[[0minfo[0m] [0m[32m- LIKE literal Regular Expression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- LIKE Non-literal Regular Expression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- RLIKE literal Regular Expression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- RLIKE Non-literal Regular Expression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- data type casting[0m[0m | |
[0m[[0minfo[0m] [0m[32m- timestamp[0m[0m | |
[0m[[0minfo[0m] [0m[32m- timestamp casting[0m[0m | |
[0m[[0minfo[0m] [0m[32m- null checking[0m[0m | |
[0m[[0minfo[0m] [0m[32m- case when[0m[0m | |
[0m[[0minfo[0m] [0m[32m- complex type[0m[0m | |
[0m[[0minfo[0m] [0m[32m- arithmetic[0m[0m | |
[0m[[0minfo[0m] [0m[32m- BinaryComparison[0m[0m | |
[0m[[0minfo[0m] [0m[32m- StringComparison[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Substring[0m[0m | |
[0m[[0minfo[0m] [0m[32mRuleExecutorSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- only once[0m[0m | |
[0m[[0minfo[0m] [0m[32m- to fixed point[0m[0m | |
[0m[[0minfo[0m] [0m[32m- to maxIterations[0m[0m | |
[0m[[0minfo[0m] [0m[32mPlanTest:[0m[0m | |
[0m[[0minfo[0m] [0m[32mScalaReflectionSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- primitive data[0m[0m | |
[0m[[0minfo[0m] [0m[32m- nullable data[0m[0m | |
[0m[[0minfo[0m] [0m[32m- optinal data[0m[0m | |
[0m[[0minfo[0m] [0m[32m- complex data[0m[0m | |
[0m[[0minfo[0m] [0m[32m- generic data[0m[0m | |
[0m[[0minfo[0m] [0m[32m- tuple data[0m[0m | |
[0m[[0minfo[0m] [0m[32m- get data type of a value[0m[0m | |
[0m[[0minfo[0m] [0mScalaTest[0m | |
[0m[[0minfo[0m] [0m[36mRun completed in 2 minutes, 59 seconds.[0m[0m | |
[0m[[0minfo[0m] [0m[36mTotal number of tests run: 142[0m[0m | |
[0m[[0minfo[0m] [0m[36mSuites: completed 16, aborted 0[0m[0m | |
[0m[[0minfo[0m] [0m[36mTests: succeeded 142, failed 0, canceled 0, ignored 0, pending 0[0m[0m | |
[0m[[0minfo[0m] [0m[32mAll tests passed.[0m[0m | |
[0m[[0minfo[0m] [0mPassed: Total 142, Failed 0, Errors 0, Passed 142[0m | |
[0m[[0minfo[0m] [0m[32mFailureSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- multiple failures with map[0m[0m | |
[0m[[0minfo[0m] [0m[32m- multiple failures with updateStateByKey[0m[0m | |
[0m[[0minfo[0m] [0m[32mStreamingListenerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- batch info reporting[0m[0m | |
[0m[[0minfo[0m] [0m[32m- receiver info reporting[0m[0m | |
[0m[[0minfo[0m] [0m[32mNetworkReceiverSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- network receiver life cycle[0m[0m | |
[0m[[0minfo[0m] [0m[32m- block generator[0m[0m | |
[0m[[0minfo[0m] [0m[32m- block generator throttling[0m[0m | |
[0m[[0minfo[0m] [0m[32mUISuite:[0m[0m | |
[0m[[0minfo[0m] [0m[33m- streaming tab in spark UI !!! IGNORED !!![0m[0m | |
[0m[[0minfo[0m] [0m[32mRateLimitedOutputStreamSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- write[0m[0m | |
[0m[[0minfo[0m] [0m[32mStreamingContextSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- from no conf constructor[0m[0m | |
[0m[[0minfo[0m] [0m[32m- from no conf + spark home[0m[0m | |
[0m[[0minfo[0m] [0m[32m- from no conf + spark home + env[0m[0m | |
[0m[[0minfo[0m] [0m[32m- from conf with settings[0m[0m | |
[0m[[0minfo[0m] [0m[32m- from existing SparkContext[0m[0m | |
[0m[[0minfo[0m] [0m[32m- from existing SparkContext with settings[0m[0m | |
[0m[[0minfo[0m] [0m[32m- from checkpoint[0m[0m | |
[0m[[0minfo[0m] [0m[32m- start and stop state check[0m[0m | |
[0m[[0minfo[0m] [0m[32m- start multiple times[0m[0m | |
[0m[[0minfo[0m] [0m[32m- stop multiple times[0m[0m | |
[0m[[0minfo[0m] [0m[32m- stop before start and start after stop[0m[0m | |
[0m[[0minfo[0m] [0m[32m- stop only streaming context[0m[0m | |
[0m[[0minfo[0m] [0m[32m- stop gracefully[0m[0m | |
[0m[[0minfo[0m] [0m[32m- awaitTermination[0m[0m | |
[0m[[0minfo[0m] [0m[32m- awaitTermination after stop[0m[0m | |
[0m[[0minfo[0m] [0m[32m- awaitTermination with error in task[0m[0m | |
[0m[[0minfo[0m] [0m[32m- awaitTermination with error in job generation[0m[0m | |
[0m[[0minfo[0m] [0m[32mWindowOperationsSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- window - basic window[0m[0m | |
[0m[[0minfo[0m] [0m[32m- window - tumbling window[0m[0m | |
[0m[[0minfo[0m] [0m[32m- window - larger window[0m[0m | |
[0m[[0minfo[0m] [0m[32m- window - non-overlapping window[0m[0m | |
[0m[[0minfo[0m] [0m[32m- window - persistence level[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reduceByKeyAndWindow - basic reduction[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reduceByKeyAndWindow - key already in window and new value added into window[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reduceByKeyAndWindow - new key added into window[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reduceByKeyAndWindow - key removed from window[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reduceByKeyAndWindow - larger slide time[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reduceByKeyAndWindow - big test[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reduceByKeyAndWindow with inverse function - basic reduction[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reduceByKeyAndWindow with inverse function - key already in window and new value added into window[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reduceByKeyAndWindow with inverse function - new key added into window[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reduceByKeyAndWindow with inverse function - key removed from window[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reduceByKeyAndWindow with inverse function - larger slide time[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reduceByKeyAndWindow with inverse function - big test[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reduceByKeyAndWindow with inverse and filter functions - big test[0m[0m | |
[0m[[0minfo[0m] [0m[32m- groupByKeyAndWindow[0m[0m | |
[0m[[0minfo[0m] [0m[32m- countByWindow[0m[0m | |
[0m[[0minfo[0m] [0m[32m- countByValueAndWindow[0m[0m | |
[0m[[0minfo[0m] [0m[32mBasicOperationsSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- map[0m[0m | |
[0m[[0minfo[0m] [0m[32m- flatMap[0m[0m | |
[0m[[0minfo[0m] [0m[32m- filter[0m[0m | |
[0m[[0minfo[0m] [0m[32m- glom[0m[0m | |
[0m[[0minfo[0m] [0m[32m- mapPartitions[0m[0m | |
[0m[[0minfo[0m] [0m[32m- repartition (more partitions)[0m[0m | |
[0m[[0minfo[0m] [0m[32m- repartition (fewer partitions)[0m[0m | |
[0m[[0minfo[0m] [0m[32m- groupByKey[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reduceByKey[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reduce[0m[0m | |
[0m[[0minfo[0m] [0m[32m- count[0m[0m | |
[0m[[0minfo[0m] [0m[32m- countByValue[0m[0m | |
[0m[[0minfo[0m] [0m[32m- mapValues[0m[0m | |
[0m[[0minfo[0m] [0m[32m- flatMapValues[0m[0m | |
[0m[[0minfo[0m] [0m[32m- union[0m[0m | |
[0m[[0minfo[0m] [0m[32m- StreamingContext.union[0m[0m | |
[0m[[0minfo[0m] [0m[32m- transform[0m[0m | |
[0m[[0minfo[0m] [0m[32m- transformWith[0m[0m | |
[0m[[0minfo[0m] [0m[32m- StreamingContext.transform[0m[0m | |
[0m[[0minfo[0m] [0m[32m- cogroup[0m[0m | |
[0m[[0minfo[0m] [0m[32m- join[0m[0m | |
[0m[[0minfo[0m] [0m[32m- leftOuterJoin[0m[0m | |
[0m[[0minfo[0m] [0m[32m- rightOuterJoin[0m[0m | |
[0m[[0minfo[0m] [0m[32m- updateStateByKey[0m[0m | |
[0m[[0minfo[0m] [0m[32m- updateStateByKey - object lifecycle[0m[0m | |
[0m[[0minfo[0m] [0m[32m- slice[0m[0m | |
[0m[[0minfo[0m] [0m[32m- slice - has not been initialized[0m[0m | |
[0m[[0minfo[0m] [0m[32m- rdd cleanup - map and window[0m[0m | |
[0m[[0minfo[0m] [0m[32m- rdd cleanup - updateStateByKey[0m[0m | |
[0m[[0minfo[0m] [0m[32m- rdd cleanup - input blocks and persisted RDDs[0m[0m | |
[0m[[0minfo[0m] [0m[32mCheckpointSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- basic rdd checkpoints + dstream graph checkpoint recovery[0m[0m | |
[0m[[0minfo[0m] [0m[32m- persistence of conf through checkpoints[0m[0m | |
[0m[[0minfo[0m] [0m[32m- recovery with map and reduceByKey operations[0m[0m | |
[0m[[0minfo[0m] [0m[32m- recovery with invertible reduceByKeyAndWindow operation[0m[0m | |
[0m[[0minfo[0m] [0m[32m- recovery with updateStateByKey operation[0m[0m | |
[0m[[0minfo[0m] [0m[32m- recovery with file input stream[0m[0m | |
[0m[[0minfo[0m] [0m[32mInputStreamsSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- socket input stream[0m[0m | |
[0m[[0minfo[0m] [0m[32m- file input stream[0m[0m | |
[0m[[0minfo[0m] [0m[33m- actor input stream !!! IGNORED !!![0m[0m | |
[0m[[0minfo[0m] [0m[32m- multi-thread receiver[0m[0m | |
[0m[[0minfo[0m] [0m[32m- queue input stream - oneAtATime=true[0m[0m | |
[0m[[0minfo[0m] [0m[32m- queue input stream - oneAtATime=false[0m[0m | |
[0m[[0minfo[0m] [0m[34mTest run started[0m[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestMap[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestUpdateStateByKey[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestWindowWithSlideDuration[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestFilter[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestRepartitionMorePartitions[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestRepartitionFewerPartitions[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestGlom[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestTransformWith[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestVariousTransformWith[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestStreamingContextTransform[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestFlatMap[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestPairFlatMap[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestUnion[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestPairMap[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestInitialization[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestCombineByKey[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestCountByValue[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestGroupByKeyAndWindow[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestCountByValueAndWindow[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestPairTransform[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestPairToNormalRDDTransform[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestMapValues[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestFlatMapValues[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestCoGroup[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestJoin[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestLeftOuterJoin[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestCheckpointMasterRecovery[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestSocketTextStream[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestSocketString[0m started[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestTextFileStream[0m started[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestTextFileStream[0m failed: java.lang.[31mOutOfMemoryError[0m: unable to create new native thread[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start0(Native Method)[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start(Thread.java:714)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.ContextCleaner.start(ContextCleaner.scala:90)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext$$anonfun$22.apply(SparkContext.scala:332)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext$$anonfun$22.apply(SparkContext.scala:332)[0m | |
[0m[[31merror[0m] [0m at scala.Option.foreach(Option.scala:236)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext.<init>(SparkContext.scala:332)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:555)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:567)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:91)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.api.java.JavaStreamingContext.<init>(JavaStreamingContext.scala:61)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.setUp(LocalJavaStreamingContext.java:31)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestTextFileStream[0m failed: java.lang.[31mNullPointerException[0m: null[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.tearDown(LocalJavaStreamingContext.java:37)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestRawSocketStream[0m started[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestRawSocketStream[0m failed: java.lang.[31mOutOfMemoryError[0m: unable to create new native thread[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start0(Native Method)[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start(Thread.java:714)[0m | |
[0m[[31merror[0m] [0m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.GeneratedConstructorAccessor21.newInstance(Unknown Source)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m | |
[0m[[31merror[0m] [0m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m | |
[0m[[31merror[0m] [0m at scala.util.Try$.apply(Try.scala:161)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at scala.util.Success.flatMap(Try.scala:200)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.createScheduler(ActorSystem.scala:618)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:541)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446)[0m | |
[0m[[31merror[0m] [0m at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkEnv$.create(SparkEnv.scala:150)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext.<init>(SparkContext.scala:203)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:555)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:567)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:91)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.api.java.JavaStreamingContext.<init>(JavaStreamingContext.scala:61)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.setUp(LocalJavaStreamingContext.java:31)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestRawSocketStream[0m failed: java.lang.[31mNullPointerException[0m: null[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.tearDown(LocalJavaStreamingContext.java:37)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestMapPartitions[0m started[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestMapPartitions[0m failed: java.lang.[31mOutOfMemoryError[0m: unable to create new native thread[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start0(Native Method)[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start(Thread.java:714)[0m | |
[0m[[31merror[0m] [0m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.GeneratedConstructorAccessor21.newInstance(Unknown Source)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m | |
[0m[[31merror[0m] [0m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m | |
[0m[[31merror[0m] [0m at scala.util.Try$.apply(Try.scala:161)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at scala.util.Success.flatMap(Try.scala:200)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.createScheduler(ActorSystem.scala:618)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:541)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446)[0m | |
[0m[[31merror[0m] [0m at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkEnv$.create(SparkEnv.scala:150)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext.<init>(SparkContext.scala:203)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:555)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:567)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:91)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.api.java.JavaStreamingContext.<init>(JavaStreamingContext.scala:61)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.setUp(LocalJavaStreamingContext.java:31)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestMapPartitions[0m failed: java.lang.[31mNullPointerException[0m: null[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.tearDown(LocalJavaStreamingContext.java:37)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestReduce[0m started[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestReduce[0m failed: java.lang.[31mOutOfMemoryError[0m: unable to create new native thread[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start0(Native Method)[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start(Thread.java:714)[0m | |
[0m[[31merror[0m] [0m at scala.concurrent.forkjoin.ForkJoinPool.tryAddWorker(ForkJoinPool.java:1672)[0m | |
[0m[[31merror[0m] [0m at scala.concurrent.forkjoin.ForkJoinPool.signalWork(ForkJoinPool.java:1966)[0m | |
[0m[[31merror[0m] [0m at scala.concurrent.forkjoin.ForkJoinPool.externalPush(ForkJoinPool.java:1829)[0m | |
[0m[[31merror[0m] [0m at scala.concurrent.forkjoin.ForkJoinPool.execute(ForkJoinPool.java:2955)[0m | |
[0m[[31merror[0m] [0m at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinPool.execute(AbstractDispatcher.scala:374)[0m | |
[0m[[31merror[0m] [0m at akka.dispatch.ExecutorServiceDelegate$class.execute(ThreadPoolBuilder.scala:212)[0m | |
[0m[[31merror[0m] [0m at akka.dispatch.Dispatcher$LazyExecutorServiceDelegate.execute(Dispatcher.scala:43)[0m | |
[0m[[31merror[0m] [0m at akka.dispatch.Dispatcher.registerForExecution(Dispatcher.scala:118)[0m | |
[0m[[31merror[0m] [0m at akka.dispatch.Dispatcher.dispatch(Dispatcher.scala:59)[0m | |
[0m[[31merror[0m] [0m at akka.actor.dungeon.Dispatch$class.sendMessage(Dispatch.scala:120)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorCell.sendMessage(ActorCell.scala:338)[0m | |
[0m[[31merror[0m] [0m at akka.actor.Cell$class.sendMessage(ActorCell.scala:259)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorCell.sendMessage(ActorCell.scala:338)[0m | |
[0m[[31merror[0m] [0m at akka.actor.RepointableActorRef.$bang(RepointableActorRef.scala:157)[0m | |
[0m[[31merror[0m] [0m at akka.event.EventStream.publish(EventStream.scala:40)[0m | |
[0m[[31merror[0m] [0m at akka.event.EventStream.publish(EventStream.scala:26)[0m | |
[0m[[31merror[0m] [0m at akka.event.SubchannelClassification$$anonfun$publish$1.apply(EventBus.scala:168)[0m | |
[0m[[31merror[0m] [0m at akka.event.SubchannelClassification$$anonfun$publish$1.apply(EventBus.scala:168)[0m | |
[0m[[31merror[0m] [0m at scala.collection.immutable.Set$Set1.foreach(Set.scala:74)[0m | |
[0m[[31merror[0m] [0m at akka.event.SubchannelClassification$class.publish(EventBus.scala:168)[0m | |
[0m[[31merror[0m] [0m at akka.event.EventStream.publish(EventStream.scala:26)[0m | |
[0m[[31merror[0m] [0m at akka.event.BusLogging.notifyInfo(Logging.scala:1035)[0m | |
[0m[[31merror[0m] [0m at akka.event.LoggingAdapter$class.info(Logging.scala:908)[0m | |
[0m[[31merror[0m] [0m at akka.event.BusLogging.info(Logging.scala:1023)[0m | |
[0m[[31merror[0m] [0m at akka.remote.Remoting.start(Remoting.scala:163)[0m | |
[0m[[31merror[0m] [0m at akka.remote.RemoteActorRefProvider.init(RemoteActorRefProvider.scala:184)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl._start$lzycompute(ActorSystem.scala:579)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl._start(ActorSystem.scala:577)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.start(ActorSystem.scala:588)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446)[0m | |
[0m[[31merror[0m] [0m at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkEnv$.create(SparkEnv.scala:150)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext.<init>(SparkContext.scala:203)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:555)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:567)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:91)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.api.java.JavaStreamingContext.<init>(JavaStreamingContext.scala:61)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.setUp(LocalJavaStreamingContext.java:31)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestReduce[0m failed: java.lang.[31mNullPointerException[0m: null[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.tearDown(LocalJavaStreamingContext.java:37)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestReduceByWindow[0m started[0m | |
[ERROR] [08/26/2014 11:23:10.316] [spark-akka.actor.default-dispatcher-3] [ActorSystem(spark)] Uncaught fatal error from thread [spark-akka.actor.default-dispatcher-3] shutting down ActorSystem [spark] | |
java.lang.OutOfMemoryError: unable to create new native thread | |
at java.lang.Thread.start0(Native Method) | |
at java.lang.Thread.start(Thread.java:714) | |
at scala.concurrent.forkjoin.ForkJoinPool.tryAddWorker(ForkJoinPool.java:1672) | |
at scala.concurrent.forkjoin.ForkJoinPool.deregisterWorker(ForkJoinPool.java:1795) | |
at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:117) | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestReduceByWindow[0m failed: java.lang.[31mOutOfMemoryError[0m: unable to create new native thread[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start0(Native Method)[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start(Thread.java:714)[0m | |
[0m[[31merror[0m] [0m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.GeneratedConstructorAccessor21.newInstance(Unknown Source)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m | |
[0m[[31merror[0m] [0m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m | |
[0m[[31merror[0m] [0m at scala.util.Try$.apply(Try.scala:161)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at scala.util.Success.flatMap(Try.scala:200)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.createScheduler(ActorSystem.scala:618)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:541)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446)[0m | |
[0m[[31merror[0m] [0m at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkEnv$.create(SparkEnv.scala:150)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext.<init>(SparkContext.scala:203)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:555)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:567)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:91)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.api.java.JavaStreamingContext.<init>(JavaStreamingContext.scala:61)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.setUp(LocalJavaStreamingContext.java:31)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestReduceByWindow[0m failed: java.lang.[31mNullPointerException[0m: null[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.tearDown(LocalJavaStreamingContext.java:37)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestQueueStream[0m started[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestQueueStream[0m failed: java.lang.[31mOutOfMemoryError[0m: unable to create new native thread[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start0(Native Method)[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start(Thread.java:714)[0m | |
[0m[[31merror[0m] [0m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.GeneratedConstructorAccessor21.newInstance(Unknown Source)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m | |
[0m[[31merror[0m] [0m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m | |
[0m[[31merror[0m] [0m at scala.util.Try$.apply(Try.scala:161)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at scala.util.Success.flatMap(Try.scala:200)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.createScheduler(ActorSystem.scala:618)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:541)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446)[0m | |
[0m[[31merror[0m] [0m at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkEnv$.create(SparkEnv.scala:150)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext.<init>(SparkContext.scala:203)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:555)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:567)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:91)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.api.java.JavaStreamingContext.<init>(JavaStreamingContext.scala:61)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.setUp(LocalJavaStreamingContext.java:31)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestQueueStream[0m failed: java.lang.[31mNullPointerException[0m: null[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.tearDown(LocalJavaStreamingContext.java:37)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestTransform[0m started[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestTransform[0m failed: java.lang.[31mOutOfMemoryError[0m: unable to create new native thread[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start0(Native Method)[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start(Thread.java:714)[0m | |
[0m[[31merror[0m] [0m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.GeneratedConstructorAccessor21.newInstance(Unknown Source)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m | |
[0m[[31merror[0m] [0m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m | |
[0m[[31merror[0m] [0m at scala.util.Try$.apply(Try.scala:161)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at scala.util.Success.flatMap(Try.scala:200)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.createScheduler(ActorSystem.scala:618)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:541)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446)[0m | |
[0m[[31merror[0m] [0m at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkEnv$.create(SparkEnv.scala:150)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext.<init>(SparkContext.scala:203)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:555)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:567)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:91)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.api.java.JavaStreamingContext.<init>(JavaStreamingContext.scala:61)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.setUp(LocalJavaStreamingContext.java:31)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestTransform[0m failed: java.lang.[31mNullPointerException[0m: null[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.tearDown(LocalJavaStreamingContext.java:37)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestVariousTransform[0m started[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestVariousTransform[0m failed: java.lang.[31mOutOfMemoryError[0m: unable to create new native thread[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start0(Native Method)[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start(Thread.java:714)[0m | |
[0m[[31merror[0m] [0m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.GeneratedConstructorAccessor21.newInstance(Unknown Source)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m | |
[0m[[31merror[0m] [0m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m | |
[0m[[31merror[0m] [0m at scala.util.Try$.apply(Try.scala:161)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at scala.util.Success.flatMap(Try.scala:200)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.createScheduler(ActorSystem.scala:618)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:541)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446)[0m | |
[0m[[31merror[0m] [0m at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkEnv$.create(SparkEnv.scala:150)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext.<init>(SparkContext.scala:203)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:555)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:567)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:91)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.api.java.JavaStreamingContext.<init>(JavaStreamingContext.scala:61)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.setUp(LocalJavaStreamingContext.java:31)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestVariousTransform[0m failed: java.lang.[31mNullPointerException[0m: null[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.tearDown(LocalJavaStreamingContext.java:37)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestPairFilter[0m started[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestPairFilter[0m failed: java.lang.[31mOutOfMemoryError[0m: unable to create new native thread[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start0(Native Method)[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start(Thread.java:714)[0m | |
[0m[[31merror[0m] [0m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.GeneratedConstructorAccessor21.newInstance(Unknown Source)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m | |
[0m[[31merror[0m] [0m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m | |
[0m[[31merror[0m] [0m at scala.util.Try$.apply(Try.scala:161)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at scala.util.Success.flatMap(Try.scala:200)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.createScheduler(ActorSystem.scala:618)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:541)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446)[0m | |
[0m[[31merror[0m] [0m at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkEnv$.create(SparkEnv.scala:150)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext.<init>(SparkContext.scala:203)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:555)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:567)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:91)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.api.java.JavaStreamingContext.<init>(JavaStreamingContext.scala:61)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.setUp(LocalJavaStreamingContext.java:31)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestPairFilter[0m failed: java.lang.[31mNullPointerException[0m: null[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.tearDown(LocalJavaStreamingContext.java:37)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestPairMapPartitions[0m started[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestPairMapPartitions[0m failed: java.lang.[31mOutOfMemoryError[0m: unable to create new native thread[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start0(Native Method)[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start(Thread.java:714)[0m | |
[0m[[31merror[0m] [0m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.GeneratedConstructorAccessor21.newInstance(Unknown Source)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m | |
[0m[[31merror[0m] [0m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m | |
[0m[[31merror[0m] [0m at scala.util.Try$.apply(Try.scala:161)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at scala.util.Success.flatMap(Try.scala:200)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.createScheduler(ActorSystem.scala:618)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:541)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446)[0m | |
[0m[[31merror[0m] [0m at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkEnv$.create(SparkEnv.scala:150)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext.<init>(SparkContext.scala:203)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:555)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:567)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:91)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.api.java.JavaStreamingContext.<init>(JavaStreamingContext.scala:61)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.setUp(LocalJavaStreamingContext.java:31)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestPairMapPartitions[0m failed: java.lang.[31mNullPointerException[0m: null[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.tearDown(LocalJavaStreamingContext.java:37)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestPairMap2[0m started[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestPairMap2[0m failed: java.lang.[31mOutOfMemoryError[0m: unable to create new native thread[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start0(Native Method)[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start(Thread.java:714)[0m | |
[0m[[31merror[0m] [0m at scala.concurrent.forkjoin.ForkJoinPool.tryAddWorker(ForkJoinPool.java:1672)[0m | |
[0m[[31merror[0m] [0m at scala.concurrent.forkjoin.ForkJoinPool.signalWork(ForkJoinPool.java:1966)[0m | |
[0m[[31merror[0m] [0m at scala.concurrent.forkjoin.ForkJoinPool.fullExternalPush(ForkJoinPool.java:1905)[0m | |
[0m[[31merror[0m] [0m at scala.concurrent.forkjoin.ForkJoinPool.externalPush(ForkJoinPool.java:1834)[0m | |
[0m[[31merror[0m] [0m at scala.concurrent.forkjoin.ForkJoinPool.execute(ForkJoinPool.java:2955)[0m | |
[0m[[31merror[0m] [0m at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinPool.execute(AbstractDispatcher.scala:374)[0m | |
[0m[[31merror[0m] [0m at akka.dispatch.ExecutorServiceDelegate$class.execute(ThreadPoolBuilder.scala:212)[0m | |
[0m[[31merror[0m] [0m at akka.dispatch.Dispatcher$LazyExecutorServiceDelegate.execute(Dispatcher.scala:43)[0m | |
[0m[[31merror[0m] [0m at akka.dispatch.Dispatcher.registerForExecution(Dispatcher.scala:118)[0m | |
[0m[[31merror[0m] [0m at akka.dispatch.MessageDispatcher.attach(AbstractDispatcher.scala:134)[0m | |
[0m[[31merror[0m] [0m at akka.actor.dungeon.Dispatch$class.start(Dispatch.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorCell.start(ActorCell.scala:338)[0m | |
[0m[[31merror[0m] [0m at akka.actor.LocalActorRef.start(ActorRef.scala:321)[0m | |
[0m[[31merror[0m] [0m at akka.actor.LocalActorRefProvider.init(ActorRefProvider.scala:619)[0m | |
[0m[[31merror[0m] [0m at akka.remote.RemoteActorRefProvider.init(RemoteActorRefProvider.scala:157)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl._start$lzycompute(ActorSystem.scala:579)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl._start(ActorSystem.scala:577)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.start(ActorSystem.scala:588)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446)[0m | |
[0m[[31merror[0m] [0m at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkEnv$.create(SparkEnv.scala:150)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext.<init>(SparkContext.scala:203)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:555)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:567)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:91)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.api.java.JavaStreamingContext.<init>(JavaStreamingContext.scala:61)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.setUp(LocalJavaStreamingContext.java:31)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestPairMap2[0m failed: java.lang.[31mNullPointerException[0m: null[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.tearDown(LocalJavaStreamingContext.java:37)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestPairToPairFlatMapWithChangingTypes[0m started[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestPairToPairFlatMapWithChangingTypes[0m failed: java.lang.[31mOutOfMemoryError[0m: unable to create new native thread[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start0(Native Method)[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start(Thread.java:714)[0m | |
[0m[[31merror[0m] [0m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.GeneratedConstructorAccessor21.newInstance(Unknown Source)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m | |
[0m[[31merror[0m] [0m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m | |
[0m[[31merror[0m] [0m at scala.util.Try$.apply(Try.scala:161)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at scala.util.Success.flatMap(Try.scala:200)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.createScheduler(ActorSystem.scala:618)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:541)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446)[0m | |
[0m[[31merror[0m] [0m at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkEnv$.create(SparkEnv.scala:150)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext.<init>(SparkContext.scala:203)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:555)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:567)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:91)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.api.java.JavaStreamingContext.<init>(JavaStreamingContext.scala:61)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.setUp(LocalJavaStreamingContext.java:31)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestPairToPairFlatMapWithChangingTypes[0m failed: java.lang.[31mNullPointerException[0m: null[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.tearDown(LocalJavaStreamingContext.java:37)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestPairGroupByKey[0m started[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestPairGroupByKey[0m failed: java.lang.[31mOutOfMemoryError[0m: unable to create new native thread[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start0(Native Method)[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start(Thread.java:714)[0m | |
[0m[[31merror[0m] [0m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.GeneratedConstructorAccessor21.newInstance(Unknown Source)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m | |
[0m[[31merror[0m] [0m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m | |
[0m[[31merror[0m] [0m at scala.util.Try$.apply(Try.scala:161)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at scala.util.Success.flatMap(Try.scala:200)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.createScheduler(ActorSystem.scala:618)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:541)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446)[0m | |
[0m[[31merror[0m] [0m at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkEnv$.create(SparkEnv.scala:150)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext.<init>(SparkContext.scala:203)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:555)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:567)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:91)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.api.java.JavaStreamingContext.<init>(JavaStreamingContext.scala:61)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.setUp(LocalJavaStreamingContext.java:31)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestPairGroupByKey[0m failed: java.lang.[31mNullPointerException[0m: null[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.tearDown(LocalJavaStreamingContext.java:37)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestPairReduceByKey[0m started[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestPairReduceByKey[0m failed: java.lang.[31mOutOfMemoryError[0m: unable to create new native thread[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start0(Native Method)[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start(Thread.java:714)[0m | |
[0m[[31merror[0m] [0m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.GeneratedConstructorAccessor21.newInstance(Unknown Source)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m | |
[0m[[31merror[0m] [0m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m | |
[0m[[31merror[0m] [0m at scala.util.Try$.apply(Try.scala:161)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at scala.util.Success.flatMap(Try.scala:200)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.createScheduler(ActorSystem.scala:618)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:541)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446)[0m | |
[0m[[31merror[0m] [0m at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkEnv$.create(SparkEnv.scala:150)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext.<init>(SparkContext.scala:203)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:555)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:567)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:91)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.api.java.JavaStreamingContext.<init>(JavaStreamingContext.scala:61)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.setUp(LocalJavaStreamingContext.java:31)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestPairReduceByKey[0m failed: java.lang.[31mNullPointerException[0m: null[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.tearDown(LocalJavaStreamingContext.java:37)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestCount[0m started[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestCount[0m failed: java.lang.[31mOutOfMemoryError[0m: unable to create new native thread[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start0(Native Method)[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start(Thread.java:714)[0m | |
[0m[[31merror[0m] [0m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.GeneratedConstructorAccessor21.newInstance(Unknown Source)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m | |
[0m[[31merror[0m] [0m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m | |
[0m[[31merror[0m] [0m at scala.util.Try$.apply(Try.scala:161)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at scala.util.Success.flatMap(Try.scala:200)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.createScheduler(ActorSystem.scala:618)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:541)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446)[0m | |
[0m[[31merror[0m] [0m at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkEnv$.create(SparkEnv.scala:150)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext.<init>(SparkContext.scala:203)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:555)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:567)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:91)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.api.java.JavaStreamingContext.<init>(JavaStreamingContext.scala:61)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.setUp(LocalJavaStreamingContext.java:31)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestCount[0m failed: java.lang.[31mNullPointerException[0m: null[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.tearDown(LocalJavaStreamingContext.java:37)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestReduceByKeyAndWindowWithInverse[0m started[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestReduceByKeyAndWindowWithInverse[0m failed: java.lang.[31mOutOfMemoryError[0m: unable to create new native thread[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start0(Native Method)[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start(Thread.java:714)[0m | |
[0m[[31merror[0m] [0m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.GeneratedConstructorAccessor21.newInstance(Unknown Source)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m | |
[0m[[31merror[0m] [0m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m | |
[0m[[31merror[0m] [0m at scala.util.Try$.apply(Try.scala:161)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at scala.util.Success.flatMap(Try.scala:200)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.createScheduler(ActorSystem.scala:618)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:541)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446)[0m | |
[0m[[31merror[0m] [0m at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkEnv$.create(SparkEnv.scala:150)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext.<init>(SparkContext.scala:203)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:555)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:567)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:91)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.api.java.JavaStreamingContext.<init>(JavaStreamingContext.scala:61)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.setUp(LocalJavaStreamingContext.java:31)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestReduceByKeyAndWindowWithInverse[0m failed: java.lang.[31mNullPointerException[0m: null[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.tearDown(LocalJavaStreamingContext.java:37)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestWindow[0m started[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestWindow[0m failed: java.lang.[31mOutOfMemoryError[0m: unable to create new native thread[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start0(Native Method)[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start(Thread.java:714)[0m | |
[0m[[31merror[0m] [0m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.GeneratedConstructorAccessor21.newInstance(Unknown Source)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m | |
[0m[[31merror[0m] [0m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m | |
[0m[[31merror[0m] [0m at scala.util.Try$.apply(Try.scala:161)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at scala.util.Success.flatMap(Try.scala:200)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.createScheduler(ActorSystem.scala:618)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:541)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446)[0m | |
[0m[[31merror[0m] [0m at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkEnv$.create(SparkEnv.scala:150)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext.<init>(SparkContext.scala:203)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:555)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:567)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:91)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.api.java.JavaStreamingContext.<init>(JavaStreamingContext.scala:61)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.setUp(LocalJavaStreamingContext.java:31)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestWindow[0m failed: java.lang.[31mNullPointerException[0m: null[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.tearDown(LocalJavaStreamingContext.java:37)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[36mtestReduceByKeyAndWindow[0m started[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestReduceByKeyAndWindow[0m failed: java.lang.[31mOutOfMemoryError[0m: unable to create new native thread[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start0(Native Method)[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start(Thread.java:714)[0m | |
[0m[[31merror[0m] [0m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.GeneratedConstructorAccessor21.newInstance(Unknown Source)[0m | |
[0m[[31merror[0m] [0m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m | |
[0m[[31merror[0m] [0m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m | |
[0m[[31merror[0m] [0m at scala.util.Try$.apply(Try.scala:161)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at scala.util.Success.flatMap(Try.scala:200)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.createScheduler(ActorSystem.scala:618)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:541)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)[0m | |
[0m[[31merror[0m] [0m at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446)[0m | |
[0m[[31merror[0m] [0m at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkEnv$.create(SparkEnv.scala:150)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.SparkContext.<init>(SparkContext.scala:203)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:555)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:567)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:91)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.api.java.JavaStreamingContext.<init>(JavaStreamingContext.scala:61)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.setUp(LocalJavaStreamingContext.java:31)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaAPISuite[0m.[31mtestReduceByKeyAndWindow[0m failed: java.lang.[31mNullPointerException[0m: null[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.LocalJavaStreamingContext.tearDown(LocalJavaStreamingContext.java:37)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[0minfo[0m] [0m[34mTest run finished: [0m[31m36 failed[0m[34m, [0m[34m0 ignored[0m[34m, 47 total, 38.262s[0m[0m | |
[0m[[0minfo[0m] [0m[34mTest run started[0m[0m | |
[0m[[0minfo[0m] [0mTest org.apache.spark.streaming.[33mJavaReceiverAPISuite[0m.[36mtestReceiver[0m started[0m | |
[0m[[31merror[0m] [0mTest org.apache.spark.streaming.[33mJavaReceiverAPISuite[0m.[31mtestReceiver[0m failed: java.lang.[31mOutOfMemoryError[0m: unable to create new native thread[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start0(Native Method)[0m | |
[0m[[31merror[0m] [0m at java.lang.Thread.start(Thread.java:714)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.TestServer.start(InputStreamsSuite.scala:372)[0m | |
[0m[[31merror[0m] [0m at org.apache.spark.streaming.JavaReceiverAPISuite.testReceiver([35mJavaReceiverAPISuite.java[0m:[33m57[0m)[0m | |
[0m[[31merror[0m] [0m ...[0m | |
[0m[[0minfo[0m] [0m[34mTest run finished: [0m[31m1 failed[0m[34m, [0m[34m0 ignored[0m[34m, 1 total, 0.001s[0m[0m | |
[0m[[0minfo[0m] [0mScalaTest[0m | |
[0m[[0minfo[0m] [0m[36mRun completed in 8 minutes, 18 seconds.[0m[0m | |
[0m[[0minfo[0m] [0m[36mTotal number of tests run: 87[0m[0m | |
[0m[[0minfo[0m] [0m[36mSuites: completed 10, aborted 0[0m[0m | |
[0m[[0minfo[0m] [0m[36mTests: succeeded 87, failed 0, canceled 0, ignored 2, pending 0[0m[0m | |
[0m[[0minfo[0m] [0m[32mAll tests passed.[0m[0m | |
[0m[[31merror[0m] [0mFailed: Total 135, Failed 19, Errors 0, Passed 116, Ignored 2[0m | |
[0m[[31merror[0m] [0mFailed tests:[0m | |
[0m[[31merror[0m] [0m org.apache.spark.streaming.JavaAPISuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.streaming.JavaReceiverAPISuite[0m | |
[0m[[0minfo[0m] [0m[32mTimeStampedHashMapSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- HashMap - basic test[0m[0m | |
[0m[[0minfo[0m] [0m[32m- TimeStampedHashMap - basic test[0m[0m | |
[0m[[0minfo[0m] [0m[32m- TimeStampedHashMap - threading safety test[0m[0m | |
[0m[[0minfo[0m] [0m[32m- TimeStampedWeakValueHashMap - basic test[0m[0m | |
[0m[[0minfo[0m] [0m[32m- TimeStampedWeakValueHashMap - threading safety test[0m[0m | |
[0m[[0minfo[0m] [0m[32m- TimeStampedHashMap - clearing by timestamp[0m[0m | |
[0m[[0minfo[0m] [0m[32m- TimeStampedWeakValueHashMap - clearing by timestamp[0m[0m | |
[0m[[0minfo[0m] [0m[32m- TimeStampedWeakValueHashMap - clearing weak references[0m[0m | |
[0m[[0minfo[0m] [0m[32mTaskResultGetterSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- handling results smaller than Akka frame size[0m[0m | |
[0m[[0minfo[0m] [0m[32m- handling results larger than Akka frame size[0m[0m | |
[0m[[0minfo[0m] [0m[32m- task retried if result missing from block manager[0m[0m | |
[0m[[0minfo[0m] [0m[32mTaskSchedulerImplSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- FIFO Scheduler Test[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Fair Scheduler Test[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Nested Pool Test[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Scheduler does not always schedule tasks on the same workers[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Scheduler correctly accounts for multiple CPUs per task[0m[0m | |
[0m[[0minfo[0m] [0m[32mAppendOnlyMapSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- initialization[0m[0m | |
[0m[[0minfo[0m] [0m[32m- object keys and values[0m[0m | |
[0m[[0minfo[0m] [0m[32m- primitive keys and values[0m[0m | |
[0m[[0minfo[0m] [0m[32m- null keys[0m[0m | |
[0m[[0minfo[0m] [0m[32m- null values[0m[0m | |
[0m[[0minfo[0m] [0m[32m- changeValue[0m[0m | |
[0m[[0minfo[0m] [0m[32m- inserting in capacity-1 map[0m[0m | |
[0m[[0minfo[0m] [0m[32m- destructive sort[0m[0m | |
[0m[[0minfo[0m] [0m[32mMapOutputTrackerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- compressSize[0m[0m | |
[0m[[0minfo[0m] [0m[32m- decompressSize[0m[0m | |
[0m[[0minfo[0m] [0m[32m- master start and stop[0m[0m | |
[0m[[0minfo[0m] [0m[32m- master register shuffle and fetch[0m[0m | |
[0m[[0minfo[0m] [0m[32m- master register and unregister shuffle[0m[0m | |
[0m[[0minfo[0m] [0m[32m- master register shuffle and unregister map output and fetch[0m[0m | |
[0m[[0minfo[0m] [0m[32m- remote fetch[0m[0m | |
[0m[[0minfo[0m] [0m[32m- remote fetch below akka frame size[0m[0m | |
[INFO] [08/26/2014 11:23:30.915] [test-akka.actor.default-dispatcher-4] [akka://test/deadLetters] Message [[B] from TestActor[akka://test/user/$$a] to Actor[akka://test/deadLetters] was not delivered. [1] dead letters encountered. This logging can be turned off or adjusted with configuration settings 'akka.log-dead-letters' and 'akka.log-dead-letters-during-shutdown'. | |
[0m[[0minfo[0m] [0m[32m- remote fetch exceeds akka frame size[0m[0m | |
[0m[[0minfo[0m] [0m[32mFileAppenderSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- basic file appender[0m[0m | |
[0m[[0minfo[0m] [0m[32m- rolling file appender - time-based rolling[0m[0m | |
[0m[[0minfo[0m] [0m[32m- rolling file appender - size-based rolling[0m[0m | |
[0m[[0minfo[0m] [0m[32m- rolling file appender - cleaning[0m[0m | |
[0m[[0minfo[0m] [0m[32m- file appender selection[0m[0m | |
[0m[[0minfo[0m] [0m[32mWholeTextFileRecordReaderSuite:[0m[0m | |
Local disk address is /tmp/1409066618815-0. | |
[0m[[0minfo[0m] [0m[32m- Correctness of WholeTextFileRecordReader.[0m[0m | |
[0m[[0minfo[0m] [0m[32mLocalDirsSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Utils.getLocalDir() returns a valid directory, even if some local dirs are missing[0m[0m | |
[0m[[0minfo[0m] [0m[32m- SPARK_LOCAL_DIRS override also affects driver[0m[0m | |
[0m[[0minfo[0m] [0m[32mContextCleanerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- cleanup RDD[0m[0m | |
[0m[[0minfo[0m] [0m[32m- cleanup shuffle[0m[0m | |
[0m[[0minfo[0m] [0m[32m- cleanup broadcast[0m[0m | |
[0m[[0minfo[0m] [0m[32m- automatically cleanup RDD[0m[0m | |
[0m[[0minfo[0m] [0m[32m- automatically cleanup shuffle[0m[0m | |
[0m[[0minfo[0m] [0m[32m- automatically cleanup broadcast[0m[0m | |
[0m[[0minfo[0m] [0m[32m- automatically cleanup RDD + shuffle + broadcast[0m[0m | |
[0m[[0minfo[0m] [0m[32m- automatically cleanup RDD + shuffle + broadcast in distributed mode[0m[0m | |
[0m[[0minfo[0m] [0m[32mSortShuffleContextCleanerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- cleanup shuffle[0m[0m | |
[0m[[0minfo[0m] [0m[32m- automatically cleanup shuffle[0m[0m | |
[0m[[0minfo[0m] [0m[32m- automatically cleanup RDD + shuffle + broadcast in distributed mode[0m[0m | |
[0m[[0minfo[0m] [0m[32mBroadcastSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Using HttpBroadcast locally[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Accessing HttpBroadcast variables from multiple threads[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Accessing HttpBroadcast variables in a local cluster[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Using TorrentBroadcast locally[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Accessing TorrentBroadcast variables from multiple threads[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Accessing TorrentBroadcast variables in a local cluster[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Unpersisting HttpBroadcast on executors only in local mode[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Unpersisting HttpBroadcast on executors and driver in local mode[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Unpersisting HttpBroadcast on executors only in distributed mode[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Unpersisting HttpBroadcast on executors and driver in distributed mode[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Unpersisting TorrentBroadcast on executors only in local mode[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Unpersisting TorrentBroadcast on executors and driver in local mode[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Unpersisting TorrentBroadcast on executors only in distributed mode[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Unpersisting TorrentBroadcast on executors and driver in distributed mode[0m[0m | |
[0m[[0minfo[0m] [0m[32mClosureCleanerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- closures inside an object[0m[0m | |
[0m[[0minfo[0m] [0m[32m- closures inside a class[0m[0m | |
[0m[[0minfo[0m] [0m[32m- closures inside a class with no default constructor[0m[0m | |
[0m[[0minfo[0m] [0m[32m- closures that don't use fields of the outer class[0m[0m | |
[0m[[0minfo[0m] [0m[32m- nested closures inside an object[0m[0m | |
[0m[[0minfo[0m] [0m[32m- nested closures inside a class[0m[0m | |
[0m[[0minfo[0m] [0m[32m- toplevel return statements in closures are identified at cleaning time[0m[0m | |
[0m[[0minfo[0m] [0m[32m- return statements from named functions nested in closures don't raise exceptions[0m[0m | |
[0m[[0minfo[0m] [0m[32mKryoSerializerDistributedSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- kryo objects are serialised consistently in different processes[0m[0m | |
[0m[[0minfo[0m] [0m[32mZippedPartitionsSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- print sizes[0m[0m | |
[0m[[0minfo[0m] [0m[32mStorageStatusListenerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- block manager added/removed[0m[0m | |
[0m[[0minfo[0m] [0m[32m- task end without updated blocks[0m[0m | |
[0m[[0minfo[0m] [0m[32m- task end with updated blocks[0m[0m | |
[0m[[0minfo[0m] [0m[32m- unpersist RDD[0m[0m | |
[0m[[0minfo[0m] [0m[32mUISuite:[0m[0m | |
[0m[[0minfo[0m] [0m[33m- basic ui visibility !!! IGNORED !!![0m[0m | |
[0m[[0minfo[0m] [0m[33m- visibility at localhost:4040 !!! IGNORED !!![0m[0m | |
[0m[[0minfo[0m] [0m[33m- attaching a new tab !!! IGNORED !!![0m[0m | |
[0m[[0minfo[0m] [0m[32m- jetty selects different port under contention[0m[0m | |
[0m[[0minfo[0m] [0m[32m- jetty binds to port 0 correctly[0m[0m | |
[0m[[0minfo[0m] [0m[32m- verify appUIAddress contains the scheme[0m[0m | |
[0m[[0minfo[0m] [0m[32m- verify appUIAddress contains the port[0m[0m | |
[0m[[0minfo[0m] [0m[32mShuffleSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- groupByKey without compression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- shuffle non-zero block size[0m[0m | |
[0m[[0minfo[0m] [0m[32m- shuffle serializer[0m[0m | |
[0m[[0minfo[0m] [0m[32m- zero sized blocks[0m[0m | |
[0m[[0minfo[0m] [0m[32m- zero sized blocks without kryo[0m[0m | |
[0m[[0minfo[0m] [0m[32m- shuffle on mutable pairs[0m[0m | |
[0m[[0minfo[0m] [0m[32m- sorting on mutable pairs[0m[0m | |
[0m[[0minfo[0m] [0m[32m- cogroup using mutable pairs[0m[0m | |
[0m[[0minfo[0m] [0m[32m- subtract mutable pairs[0m[0m | |
[0m[[0minfo[0m] [0m[32m- sort with Java non serializable class - Kryo[0m[0m | |
[0m[[0minfo[0m] [0m[32m- sort with Java non serializable class - Java[0m[0m | |
[0m[[0minfo[0m] [0m[32mBitSetSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- basic set and get[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 100% full bit set[0m[0m | |
[0m[[0minfo[0m] [0m[32m- nextSetBit[0m[0m | |
[0m[[0minfo[0m] [0m[32m- xor len(bitsetX) < len(bitsetY)[0m[0m | |
[0m[[0minfo[0m] [0m[32m- xor len(bitsetX) > len(bitsetY)[0m[0m | |
[0m[[0minfo[0m] [0m[32m- andNot len(bitsetX) < len(bitsetY)[0m[0m | |
[0m[[0minfo[0m] [0m[32m- andNot len(bitsetX) > len(bitsetY)[0m[0m | |
[0m[[0minfo[0m] [0m[32mOpenHashMapSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- size for specialized, primitive value (int)[0m[0m | |
[0m[[0minfo[0m] [0m[32m- initialization[0m[0m | |
[0m[[0minfo[0m] [0m[32m- primitive value[0m[0m | |
[0m[[0minfo[0m] [0m[32m- non-primitive value[0m[0m | |
[0m[[0minfo[0m] [0m[32m- null keys[0m[0m | |
[0m[[0minfo[0m] [0m[32m- null values[0m[0m | |
[0m[[0minfo[0m] [0m[32m- changeValue[0m[0m | |
[0m[[0minfo[0m] [0m[32m- inserting in capacity-1 map[0m[0m | |
[0m[[0minfo[0m] [0m[32mFailureSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- failure in a single-stage job[0m[0m | |
[0m[[0minfo[0m] [0m[32m- failure in a two-stage job[0m[0m | |
[0m[[0minfo[0m] [0m[32m- failure in a map stage[0m[0m | |
[0m[[0minfo[0m] [0m[32m- failure because task results are not serializable[0m[0m | |
[0m[[0minfo[0m] [0m[32m- failure because task closure is not serializable[0m[0m | |
[0m[[0minfo[0m] [0m[32mDistributedSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- task throws not serializable exception[0m[0m | |
[0m[[0minfo[0m] [0m[32m- local-cluster format[0m[0m | |
[0m[[0minfo[0m] [0m[32m- simple groupByKey[0m[0m | |
[0m[[0minfo[0m] [0m[32m- groupByKey where map output sizes exceed maxMbInFlight[0m[0m | |
[0m[[0minfo[0m] [0m[32m- accumulators[0m[0m | |
[0m[[0minfo[0m] [0m[32m- broadcast variables[0m[0m | |
[0m[[0minfo[0m] [0m[32m- repeatedly failing task[0m[0m | |
Job aborted due to stage failure: Task 0 in stage 0.0 failed 4 times, most recent failure: Lost task 0.3 in stage 0.0 (TID 4, localhost): ExecutorLostFailure (executor lost) | |
Driver stacktrace: | |
[0m[[0minfo[0m] [0m[32m- repeatedly failing task that crashes JVM[0m[0m | |
[0m[[0minfo[0m] [0m[32m- caching[0m[0m | |
[0m[[0minfo[0m] [0m[32m- caching on disk[0m[0m | |
[0m[[0minfo[0m] [0m[32m- caching in memory, replicated[0m[0m | |
[0m[[0minfo[0m] [0m[32m- caching in memory, serialized, replicated[0m[0m | |
[0m[[0minfo[0m] [0m[32m- caching on disk, replicated[0m[0m | |
[0m[[0minfo[0m] [0m[32m- caching in memory and disk, replicated[0m[0m | |
[0m[[0minfo[0m] [0m[32m- caching in memory and disk, serialized, replicated[0m[0m | |
[0m[[0minfo[0m] [0m[32m- compute without caching when no partitions fit in memory[0m[0m | |
[0m[[0minfo[0m] [0m[32m- compute when only some partitions fit in memory[0m[0m | |
[0m[[0minfo[0m] [0m[32m- passing environment variables to cluster[0m[0m | |
[0m[[0minfo[0m] [0m[32m- recover from node failures[0m[0m | |
[0m[[0minfo[0m] [0m[32m- recover from repeated node failures during shuffle-map[0m[0m | |
[0m[[0minfo[0m] [0m[32m- recover from repeated node failures during shuffle-reduce[0m[0m | |
[0m[[0minfo[0m] [0m[32m- recover from node failures with replication[0m[0m | |
[0m[[0minfo[0m] [0m[32m- unpersist RDDs[0m[0m | |
[0m[[0minfo[0m] [0m[32mServerClientIntegrationSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- fetch a ByteBuffer block[0m[0m | |
[0m[[0minfo[0m] [0m[32m- fetch a FileSegment block via zero-copy send[0m[0m | |
[0m[[0minfo[0m] [0m[32m- fetch a non-existent block[0m[0m | |
[0m[[0minfo[0m] [0m[32m- fetch both ByteBuffer block and FileSegment block[0m[0m | |
[0m[[0minfo[0m] [0m[32m- fetch both ByteBuffer block and a non-existent block[0m[0m | |
[0m[[0minfo[0m] [0m[32mAkkaUtilsSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- remote fetch security bad password[0m[0m | |
[0m[[0minfo[0m] [0m[32m- remote fetch security off[0m[0m | |
[0m[[0minfo[0m] [0m[32m- remote fetch security pass[0m[0m | |
[0m[[0minfo[0m] [0m[32m- remote fetch security off client[0m[0m | |
[0m[[0minfo[0m] [0m[32mExternalSorterSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- empty data stream[0m[0m | |
[0m[[0minfo[0m] [0m[32m- few elements per partition[0m[0m | |
[0m[[0minfo[0m] [0m[32m- empty partitions with spilling[0m[0m | |
[0m[[0minfo[0m] [0m[32m- empty partitions with spilling, bypass merge-sort[0m[0m | |
[0m[[0minfo[0m] [0m[32m- spilling in local cluster[0m[0m | |
[0m[[0minfo[0m] [0m[32m- spilling in local cluster with many reduce tasks[0m[0m | |
[0m[[0minfo[0m] [0m[32m- cleanup of intermediate files in sorter[0m[0m | |
[0m[[0minfo[0m] [0m[32m- cleanup of intermediate files in sorter, bypass merge-sort[0m[0m | |
[0m[[0minfo[0m] [0m[32m- cleanup of intermediate files in sorter if there are errors[0m[0m | |
[0m[[0minfo[0m] [0m[32m- cleanup of intermediate files in sorter if there are errors, bypass merge-sort[0m[0m | |
[0m[[0minfo[0m] [0m[32m- cleanup of intermediate files in shuffle[0m[0m | |
[0m[[0minfo[0m] [0m[32m- cleanup of intermediate files in shuffle with errors[0m[0m | |
[0m[[0minfo[0m] [0m[32m- no partial aggregation or sorting[0m[0m | |
[0m[[0minfo[0m] [0m[32m- partial aggregation without spill[0m[0m | |
[0m[[0minfo[0m] [0m[32m- partial aggregation with spill, no ordering[0m[0m | |
[0m[[0minfo[0m] [0m[32m- partial aggregation with spill, with ordering[0m[0m | |
[0m[[0minfo[0m] [0m[32m- sorting without aggregation, no spill[0m[0m | |
[0m[[0minfo[0m] [0m[32m- sorting without aggregation, with spill[0m[0m | |
[0m[[0minfo[0m] [0m[32m- spilling with hash collisions[0m[0m | |
[0m[[0minfo[0m] [0m[32m- spilling with many hash collisions[0m[0m | |
[0m[[0minfo[0m] [0m[32m- spilling with hash collisions using the Int.MaxValue key[0m[0m | |
[0m[[0minfo[0m] [0m[32m- spilling with null keys and values[0m[0m | |
[0m[[0minfo[0m] [0m[32m- conditions for bypassing merge-sort[0m[0m | |
[0m[[0minfo[0m] [0m[32mBlockServerHandlerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- ByteBuffer block[0m[0m | |
[0m[[0minfo[0m] [0m[32m- FileSegment block via zero-copy[0m[0m | |
[0m[[0minfo[0m] [0m[32m- pipeline exception propagation[0m[0m | |
[0m[[0minfo[0m] [0m[32mBlockManagerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- StorageLevel object caching[0m[0m | |
[0m[[0minfo[0m] [0m[32m- BlockManagerId object caching[0m[0m | |
[0m[[0minfo[0m] [0m[32m- master + 1 manager interaction[0m[0m | |
[0m[[0minfo[0m] [0m[32m- master + 2 managers interaction[0m[0m | |
[0m[[0minfo[0m] [0m[32m- removing block[0m[0m | |
[0m[[0minfo[0m] [0m[32m- removing rdd[0m[0m | |
[0m[[0minfo[0m] [0m[32m- removing broadcast[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reregistration on heart beat[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reregistration on block update[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reregistration doesn't dead lock[0m[0m | |
Some(org.apache.spark.storage.BlockResult@40020e3) | |
[0m[[0minfo[0m] [0m[32m- correct BlockResult returned from get() calls[0m[0m | |
[0m[[0minfo[0m] [0m[32m- in-memory LRU storage[0m[0m | |
[0m[[0minfo[0m] [0m[32m- in-memory LRU storage with serialization[0m[0m | |
[0m[[0minfo[0m] [0m[32m- in-memory LRU for partitions of same RDD[0m[0m | |
[0m[[0minfo[0m] [0m[32m- in-memory LRU for partitions of multiple RDDs[0m[0m | |
[0m[[0minfo[0m] [0m[32m- tachyon storage[0m[0m | |
[0m[[0minfo[0m] [0m[32m + tachyon storage test disabled. [0m[0m | |
[0m[[0minfo[0m] [0m[32m- on-disk storage[0m[0m | |
[0m[[0minfo[0m] [0m[32m- disk and memory storage[0m[0m | |
[0m[[0minfo[0m] [0m[32m- disk and memory storage with getLocalBytes[0m[0m | |
[0m[[0minfo[0m] [0m[32m- disk and memory storage with serialization[0m[0m | |
[0m[[0minfo[0m] [0m[32m- disk and memory storage with serialization and getLocalBytes[0m[0m | |
[0m[[0minfo[0m] [0m[32m- LRU with mixed storage levels[0m[0m | |
[0m[[0minfo[0m] [0m[32m- in-memory LRU with streams[0m[0m | |
[0m[[0minfo[0m] [0m[32m- LRU with mixed storage levels and streams[0m[0m | |
[0m[[0minfo[0m] [0m[32m- negative byte values in ByteBufferInputStream[0m[0m | |
[0m[[0minfo[0m] [0m[32m- overly large block[0m[0m | |
[0m[[0minfo[0m] [0m[32m- block compression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- block store put failure[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reads of memory-mapped and non memory-mapped files are equivalent[0m[0m | |
[0m[[0minfo[0m] [0m[32m- updated block statuses[0m[0m | |
[0m[[0minfo[0m] [0m[32m- query block statuses[0m[0m | |
[0m[[0minfo[0m] [0m[32m- get matching blocks[0m[0m | |
[0m[[0minfo[0m] [0m[32m- SPARK-1194 regression: fix the same-RDD rule for cache replacement[0m[0m | |
[0m[[0minfo[0m] [0m[32m- return error message when error occurred in BlockManagerWorker#onBlockMessageReceive[0m[0m | |
[0m[[0minfo[0m] [0m[32m- return ack message when no error occurred in BlocManagerWorker#onBlockMessageReceive[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reserve/release unroll memory[0m[0m | |
[0m[[0minfo[0m] [0m[32m- safely unroll blocks[0m[0m | |
[0m[[0minfo[0m] [0m[32m- safely unroll blocks through putIterator[0m[0m | |
[0m[[0minfo[0m] [0m[32m- safely unroll blocks through putIterator (disk)[0m[0m | |
[0m[[0minfo[0m] [0m[32m- multiple unrolls by the same thread[0m[0m | |
[0m[[0minfo[0m] [0m[32mDiskBlockManagerSuite:[0m[0m | |
Created root dirs: /tmp/1409067066245-0,/tmp/1409067066245-1 | |
[0m[[0minfo[0m] [0m[32m- basic block creation[0m[0m | |
[0m[[0minfo[0m] [0m[32m- enumerating blocks[0m[0m | |
[0m[[0minfo[0m] [0m[32m- block appending[0m[0m | |
[0m[[0minfo[0m] [0m[32m- block remapping[0m[0m | |
[0m[[0minfo[0m] [0m[32m- consolidated shuffle can write to shuffle group without messing existing offsets/lengths[0m[0m | |
[0m[[0minfo[0m] [0m[32mJdbcRDDSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- basic functionality[0m[0m | |
[0m[[0minfo[0m] [0m[32mCheckpointSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- basic checkpointing[0m[0m | |
[0m[[0minfo[0m] [0m[32m- RDDs with one-to-one dependencies[0m[0m | |
[0m[[0minfo[0m] [0m[32m- ParallelCollection[0m[0m | |
[0m[[0minfo[0m] [0m[32m- BlockRDD[0m[0m | |
[0m[[0minfo[0m] [0m[32m- ShuffledRDD[0m[0m | |
[0m[[0minfo[0m] [0m[32m- UnionRDD[0m[0m | |
[0m[[0minfo[0m] [0m[32m- CartesianRDD[0m[0m | |
[0m[[0minfo[0m] [0m[32m- CoalescedRDD[0m[0m | |
[0m[[0minfo[0m] [0m[32m- CoGroupedRDD[0m[0m | |
[0m[[0minfo[0m] [0m[32m- ZippedPartitionsRDD[0m[0m | |
[0m[[0minfo[0m] [0m[32m- PartitionerAwareUnionRDD[0m[0m | |
[0m[[0minfo[0m] [0m[32m- CheckpointRDD with zero partitions[0m[0m | |
[0m[[0minfo[0m] [0m[32mFileSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- text files[0m[0m | |
[0m[[0minfo[0m] [0m[32m- text files (compressed)[0m[0m | |
[0m[[0minfo[0m] [0m[32m- SequenceFiles[0m[0m | |
[0m[[0minfo[0m] [0m[32m- SequenceFile (compressed)[0m[0m | |
[0m[[0minfo[0m] [0m[32m- SequenceFile with writable key[0m[0m | |
[0m[[0minfo[0m] [0m[32m- SequenceFile with writable value[0m[0m | |
[0m[[0minfo[0m] [0m[32m- SequenceFile with writable key and value[0m[0m | |
[0m[[0minfo[0m] [0m[32m- implicit conversions in reading SequenceFiles[0m[0m | |
[0m[[0minfo[0m] [0m[32m- object files of ints[0m[0m | |
[0m[[0minfo[0m] [0m[32m- object files of complex types[0m[0m | |
[0m[[0minfo[0m] [0m[32m- object files of classes from a JAR[0m[0m | |
[0m[[0minfo[0m] [0m[32m- write SequenceFile using new Hadoop API[0m[0m | |
[0m[[0minfo[0m] [0m[32m- read SequenceFile using new Hadoop API[0m[0m | |
[0m[[0minfo[0m] [0m[32m- file caching[0m[0m | |
[0m[[0minfo[0m] [0m[32m- prevent user from overwriting the empty directory (old Hadoop API)[0m[0m | |
[0m[[0minfo[0m] [0m[32m- prevent user from overwriting the non-empty directory (old Hadoop API)[0m[0m | |
[0m[[0minfo[0m] [0m[32m- allow user to disable the output directory existence checking (old Hadoop API[0m[0m | |
[0m[[0minfo[0m] [0m[32m- prevent user from overwriting the empty directory (new Hadoop API)[0m[0m | |
[0m[[0minfo[0m] [0m[32m- prevent user from overwriting the non-empty directory (new Hadoop API)[0m[0m | |
[0m[[0minfo[0m] [0m[32m- allow user to disable the output directory existence checking (new Hadoop API[0m[0m | |
[0m[[0minfo[0m] [0m[32m- save Hadoop Dataset through old Hadoop API[0m[0m | |
[0m[[0minfo[0m] [0m[32m- save Hadoop Dataset through new Hadoop API[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Get input files via old Hadoop API[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Get input files via new Hadoop API[0m[0m | |
[0m[[0minfo[0m] [0m[32mPartitioningSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- HashPartitioner equality[0m[0m | |
[0m[[0minfo[0m] [0m[32m- RangePartitioner equality[0m[0m | |
[0m[[0minfo[0m] [0m[32m- RangePartitioner getPartition[0m[0m | |
[0m[[0minfo[0m] [0m[32m- RangePartitioner for keys that are not Comparable (but with Ordering)[0m[0m | |
[0m[[0minfo[0m] [0m[32m- RangPartitioner.sketch[0m[0m | |
[0m[[0minfo[0m] [0m[32m- RangePartitioner.determineBounds[0m[0m | |
[0m[[0minfo[0m] [0m[32m- RangePartitioner should run only one job if data is roughly balanced[0m[0m | |
[0m[[0minfo[0m] [0m[32m- RangePartitioner should work well on unbalanced data[0m[0m | |
[0m[[0minfo[0m] [0m[32m- RangePartitioner should return a single partition for empty RDDs[0m[0m | |
[0m[[0minfo[0m] [0m[32m- HashPartitioner not equal to RangePartitioner[0m[0m | |
[0m[[0minfo[0m] [0m[32m- partitioner preservation[0m[0m | |
[0m[[0minfo[0m] [0m[32m- partitioning Java arrays should fail[0m[0m | |
[0m[[0minfo[0m] [0m[32m- zero-length partitions should be correctly handled[0m[0m | |
[0m[[0minfo[0m] [0m[32mBlockIdSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- test-bad-deserialization[0m[0m | |
[0m[[0minfo[0m] [0m[32m- rdd[0m[0m | |
[0m[[0minfo[0m] [0m[32m- shuffle[0m[0m | |
[0m[[0minfo[0m] [0m[32m- broadcast[0m[0m | |
[0m[[0minfo[0m] [0m[32m- taskresult[0m[0m | |
[0m[[0minfo[0m] [0m[32m- stream[0m[0m | |
[0m[[0minfo[0m] [0m[32m- test[0m[0m | |
[0m[[0minfo[0m] [0m[32mStorageSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- storage status add non-RDD blocks[0m[0m | |
[0m[[0minfo[0m] [0m[32m- storage status update non-RDD blocks[0m[0m | |
[0m[[0minfo[0m] [0m[32m- storage status remove non-RDD blocks[0m[0m | |
[0m[[0minfo[0m] [0m[32m- storage status add RDD blocks[0m[0m | |
[0m[[0minfo[0m] [0m[32m- storage status update RDD blocks[0m[0m | |
[0m[[0minfo[0m] [0m[32m- storage status remove RDD blocks[0m[0m | |
[0m[[0minfo[0m] [0m[32m- storage status containsBlock[0m[0m | |
[0m[[0minfo[0m] [0m[32m- storage status getBlock[0m[0m | |
[0m[[0minfo[0m] [0m[32m- storage status num[Rdd]Blocks[0m[0m | |
[0m[[0minfo[0m] [0m[32m- storage status memUsed, diskUsed, tachyonUsed[0m[0m | |
[0m[[0minfo[0m] [0m[32m- StorageUtils.updateRddInfo[0m[0m | |
[0m[[0minfo[0m] [0m[32m- StorageUtils.getRddBlockLocations[0m[0m | |
[0m[[0minfo[0m] [0m[32m- StorageUtils.getRddBlockLocations with multiple locations[0m[0m | |
[0m[[0minfo[0m] [0m[32mPairRDDFunctionsSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- aggregateByKey[0m[0m | |
[0m[[0minfo[0m] [0m[32m- groupByKey[0m[0m | |
[0m[[0minfo[0m] [0m[32m- groupByKey with duplicates[0m[0m | |
[0m[[0minfo[0m] [0m[32m- groupByKey with negative key hash codes[0m[0m | |
[0m[[0minfo[0m] [0m[32m- groupByKey with many output partitions[0m[0m | |
[0m[[0minfo[0m] [0m[32m- sampleByKey[0m[0m | |
[0m[[0minfo[0m] [0m[32m- sampleByKeyExact[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reduceByKey[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reduceByKey with collectAsMap[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reduceByKey with many output partitons[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reduceByKey with partitioner[0m[0m | |
[0m[[0minfo[0m] [0m[32m- countApproxDistinctByKey[0m[0m | |
[0m[[0minfo[0m] [0m[32m- join[0m[0m | |
[0m[[0minfo[0m] [0m[32m- join all-to-all[0m[0m | |
[0m[[0minfo[0m] [0m[32m- leftOuterJoin[0m[0m | |
[0m[[0minfo[0m] [0m[32m- rightOuterJoin[0m[0m | |
[0m[[0minfo[0m] [0m[32m- join with no matches[0m[0m | |
[0m[[0minfo[0m] [0m[32m- join with many output partitions[0m[0m | |
[0m[[0minfo[0m] [0m[32m- groupWith[0m[0m | |
[0m[[0minfo[0m] [0m[32m- groupWith3[0m[0m | |
[0m[[0minfo[0m] [0m[32m- groupWith4[0m[0m | |
[0m[[0minfo[0m] [0m[32m- zero-partition RDD[0m[0m | |
[0m[[0minfo[0m] [0m[32m- keys and values[0m[0m | |
[0m[[0minfo[0m] [0m[32m- default partitioner uses partition size[0m[0m | |
[0m[[0minfo[0m] [0m[32m- default partitioner uses largest partitioner[0m[0m | |
[0m[[0minfo[0m] [0m[32m- subtract[0m[0m | |
[0m[[0minfo[0m] [0m[32m- subtract with narrow dependency[0m[0m | |
[0m[[0minfo[0m] [0m[32m- subtractByKey[0m[0m | |
[0m[[0minfo[0m] [0m[32m- subtractByKey with narrow dependency[0m[0m | |
[0m[[0minfo[0m] [0m[32m- foldByKey[0m[0m | |
[0m[[0minfo[0m] [0m[32m- foldByKey with mutable result type[0m[0m | |
[0m[[0minfo[0m] [0m[32m- saveNewAPIHadoopFile should call setConf if format is configurable[0m[0m | |
[0m[[0minfo[0m] [0m[32m- lookup[0m[0m | |
[0m[[0minfo[0m] [0m[32m- lookup with partitioner[0m[0m | |
[0m[[0minfo[0m] [0m[32m- lookup with bad partitioner[0m[0m | |
[0m[[0minfo[0m] [0m[32mUnpersistSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- unpersist RDD[0m[0m | |
[0m[[0minfo[0m] [0m[32mProactiveClosureSerializationSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- throws expected serialization exceptions on actions[0m[0m | |
[0m[[0minfo[0m] [0m[32m- mapPartitions transformations throw proactive serialization exceptions[0m[0m | |
[0m[[0minfo[0m] [0m[32m- map transformations throw proactive serialization exceptions[0m[0m | |
[0m[[0minfo[0m] [0m[32m- mapPartitionsWithContext transformations throw proactive serialization exceptions[0m[0m | |
[0m[[0minfo[0m] [0m[32m- filter transformations throw proactive serialization exceptions[0m[0m | |
[0m[[0minfo[0m] [0m[32m- flatMap transformations throw proactive serialization exceptions[0m[0m | |
[0m[[0minfo[0m] [0m[32m- mapPartitionsWithIndex transformations throw proactive serialization exceptions[0m[0m | |
[0m[[0minfo[0m] [0m[32mTaskContextSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Calls executeOnCompleteCallbacks after failure[0m[0m | |
[0m[[0minfo[0m] [0m[32mSorterSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- equivalent to Arrays.sort[0m[0m | |
[0m[[0minfo[0m] [0m[32m- KVArraySorter[0m[0m | |
[0m[[0minfo[0m] [0m[33m- Sorter benchmark !!! IGNORED !!![0m[0m | |
[0m[[0minfo[0m] [0m[32mEventLoggingListenerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Parse names of special files[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Verify special files exist[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Verify special files exist with compression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Parse event logging info[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Parse event logging info with compression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Basic event logging[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Basic event logging with compression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- End-to-end event logging[0m[0m | |
[0m[[0minfo[0m] [0m[32m- End-to-end event logging with compression[0m[0m | |
[0m[[0minfo[0m] [0m[32mKryoSerializerResizableOutputSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- kryo without resizable output buffer should fail on large array[0m[0m | |
[0m[[0minfo[0m] [0m[32m- kryo with resizable output buffer should succeed on large array[0m[0m | |
[0m[[0minfo[0m] [0m[32mJsonProtocolSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- SparkListenerEvent[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Dependent Classes[0m[0m | |
[0m[[0minfo[0m] [0m[32m- StageInfo backward compatibility[0m[0m | |
[0m[[0minfo[0m] [0m[32m- InputMetrics backward compatibility[0m[0m | |
[0m[[0minfo[0m] [0m[32mSizeEstimatorSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- simple classes[0m[0m | |
[0m[[0minfo[0m] [0m[32m- strings[0m[0m | |
[0m[[0minfo[0m] [0m[32m- primitive arrays[0m[0m | |
[0m[[0minfo[0m] [0m[32m- object arrays[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 32-bit arch[0m[0m | |
[0m[[0minfo[0m] [0m[32m- 64-bit arch with no compressed oops[0m[0m | |
[0m[[0minfo[0m] [0m[32mJobProgressListenerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- test LRU eviction of stages[0m[0m | |
[0m[[0minfo[0m] [0m[32m- test executor id to summary[0m[0m | |
[0m[[0minfo[0m] [0m[32m- test task success vs failure counting for different task end reasons[0m[0m | |
[0m[[0minfo[0m] [0m[32m- test update metrics[0m[0m | |
[0m[[0minfo[0m] [0m[32mPipedRDDSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- basic pipe[0m[0m | |
[0m[[0minfo[0m] [0m[32m- advanced pipe[0m[0m | |
[0m[[0minfo[0m] [0m[32m- pipe with env variable[0m[0m | |
[0m[[0minfo[0m] [0m[32m- pipe with non-zero exit status[0m[0m | |
[0m[[0minfo[0m] [0m[32m- basic pipe with separate working directory[0m[0m | |
[0m[[0minfo[0m] [0m[32m- test pipe exports map_input_file[0m[0m | |
[0m[[0minfo[0m] [0m[32m- test pipe exports mapreduce_map_input_file[0m[0m | |
[0m[[0minfo[0m] [0m[32mShuffleNettySuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- groupByKey without compression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- shuffle non-zero block size[0m[0m | |
[0m[[0minfo[0m] [0m[32m- shuffle serializer[0m[0m | |
[0m[[0minfo[0m] [0m[32m- zero sized blocks[0m[0m | |
[0m[[0minfo[0m] [0m[32m- zero sized blocks without kryo[0m[0m | |
[0m[[0minfo[0m] [0m[32m- shuffle on mutable pairs[0m[0m | |
[0m[[0minfo[0m] [0m[32m- sorting on mutable pairs[0m[0m | |
[0m[[0minfo[0m] [0m[32m- cogroup using mutable pairs[0m[0m | |
[0m[[0minfo[0m] [0m[32m- subtract mutable pairs[0m[0m | |
[0m[[0minfo[0m] [0m[32m- sort with Java non serializable class - Kryo[0m[0m | |
[0m[[0minfo[0m] [0m[32m- sort with Java non serializable class - Java[0m[0m | |
[0m[[0minfo[0m] [0m[32mSparkContextInfoSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- getPersistentRDDs only returns RDDs that are marked as cached[0m[0m | |
[0m[[0minfo[0m] [0m[32m- getPersistentRDDs returns an immutable map[0m[0m | |
[0m[[0minfo[0m] [0m[32m- getRDDStorageInfo only reports on RDDs that actually persist data[0m[0m | |
[0m[[0minfo[0m] [0m[32m- call sites report correct locations[0m[0m | |
[0m[[0minfo[0m] [0m[32mKryoSerializerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- basic types[0m[0m | |
[0m[[0minfo[0m] [0m[32m- pairs[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Scala data structures[0m[0m | |
[0m[[0minfo[0m] [0m[32m- ranges[0m[0m | |
[0m[[0minfo[0m] [0m[32m- asJavaIterable[0m[0m | |
[0m[[0minfo[0m] [0m[32m- custom registrator[0m[0m | |
[0m[[0minfo[0m] [0m[32m- kryo with collect[0m[0m | |
[0m[[0minfo[0m] [0m[32m- kryo with parallelize[0m[0m | |
[0m[[0minfo[0m] [0m[32m- kryo with parallelize for specialized tuples[0m[0m | |
[0m[[0minfo[0m] [0m[32m- kryo with parallelize for primitive arrays[0m[0m | |
[0m[[0minfo[0m] [0m[32m- kryo with collect for specialized tuples[0m[0m | |
[0m[[0minfo[0m] [0m[32m- kryo with SerializableHyperLogLog[0m[0m | |
[0m[[0minfo[0m] [0m[32m- kryo with reduce[0m[0m | |
[0m[[0minfo[0m] [0m[33m- kryo with fold !!! IGNORED !!![0m[0m | |
[0m[[0minfo[0m] [0m[32m- kryo with nonexistent custom registrator should fail[0m[0m | |
[0m[[0minfo[0m] [0m[32m- default class loader can be set by a different thread[0m[0m | |
[0m[[0minfo[0m] [0m[32mMetricsConfigSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- MetricsConfig with default properties[0m[0m | |
[0m[[0minfo[0m] [0m[32m- MetricsConfig with properties set[0m[0m | |
[0m[[0minfo[0m] [0m[32m- MetricsConfig with subProperties[0m[0m | |
[0m[[0minfo[0m] [0m[32mSparkListenerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- basic creation and shutdown of LiveListenerBus[0m[0m | |
[0m[[0minfo[0m] [0m[32m- bus.stop() waits for the event queue to completely drain[0m[0m | |
[0m[[0minfo[0m] [0m[32m- basic creation of StageInfo[0m[0m | |
[0m[[0minfo[0m] [0m[32m- basic creation of StageInfo with shuffle[0m[0m | |
[0m[[0minfo[0m] [0m[32m- StageInfo with fewer tasks than partitions[0m[0m | |
[0m[[0minfo[0m] [0m[32m- local metrics[0m[0m | |
[0m[[0minfo[0m] [0m[32m- onTaskGettingResult() called when result fetched remotely[0m[0m | |
[0m[[0minfo[0m] [0m[32m- onTaskGettingResult() not called when result sent directly[0m[0m | |
[0m[[0minfo[0m] [0m[32m- onTaskEnd() should be called for all started tasks, even after job has been killed[0m[0m | |
[0m[[0minfo[0m] [0m[32m- SparkListener moves on if a listener throws an exception[0m[0m | |
[0m[[0minfo[0m] [0m[32mBlockFetcherIteratorSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- block fetch from local fails using BasicBlockFetcherIterator[0m[0m | |
[0m[[0minfo[0m] [0m[32m- block fetch from local succeed using BasicBlockFetcherIterator[0m[0m | |
[0m[[0minfo[0m] [0m[32m- block fetch from remote fails using BasicBlockFetcherIterator[0m[0m | |
[0m[[0minfo[0m] [0m[32m- block fetch from remote succeed using BasicBlockFetcherIterator[0m[0m | |
[0m[[0minfo[0m] [0m[32mSamplingUtilsSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reservoirSampleAndCount[0m[0m | |
[0m[[0minfo[0m] [0m[32m- computeFraction[0m[0m | |
[0m[[0minfo[0m] [0m[32mBlockHeaderEncoderSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- encode normal block data[0m[0m | |
[0m[[0minfo[0m] [0m[32m- encode error message[0m[0m | |
[0m[[0minfo[0m] [0m[32mStorageTabSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- stage submitted / completed[0m[0m | |
[0m[[0minfo[0m] [0m[32m- unpersist[0m[0m | |
[0m[[0minfo[0m] [0m[32m- task end[0m[0m | |
[0m[[0minfo[0m] [0m[32mRandomSamplerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- BernoulliSamplerWithRange[0m[0m | |
[0m[[0minfo[0m] [0m[32m- BernoulliSamplerWithRangeInverse[0m[0m | |
[0m[[0minfo[0m] [0m[32m- BernoulliSamplerWithRatio[0m[0m | |
[0m[[0minfo[0m] [0m[32m- BernoulliSamplerWithComplement[0m[0m | |
[0m[[0minfo[0m] [0m[32m- BernoulliSamplerSetSeed[0m[0m | |
[0m[[0minfo[0m] [0m[32m- PoissonSampler[0m[0m | |
[0m[[0minfo[0m] [0m[32mPythonRDDSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Writing large strings to the worker[0m[0m | |
[0m[[0minfo[0m] [0m[32mVectorSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- random with default random number generator[0m[0m | |
[0m[[0minfo[0m] [0m[32m- random with given random number generator[0m[0m | |
[0m[[0minfo[0m] [0m[32mShuffleMemoryManagerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- single thread requesting memory[0m[0m | |
[0m[[0minfo[0m] [0m[32m- two threads requesting full memory[0m[0m | |
[0m[[0minfo[0m] [0m[32m- threads cannot grow past 1 / N[0m[0m | |
[0m[[0minfo[0m] [0m[32m- threads can block to get at least 1 / 2N memory[0m[0m | |
[0m[[0minfo[0m] [0m[32m- releaseMemoryForThisThread[0m[0m | |
[0m[[0minfo[0m] [0m[32mCompressionCodecSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- default compression codec[0m[0m | |
[0m[[0minfo[0m] [0m[32m- lz4 compression codec[0m[0m | |
[0m[[0minfo[0m] [0m[32m- lz4 compression codec short form[0m[0m | |
[0m[[0minfo[0m] [0m[32m- lzf compression codec[0m[0m | |
[0m[[0minfo[0m] [0m[32m- lzf compression codec short form[0m[0m | |
[0m[[0minfo[0m] [0m[32m- snappy compression codec[0m[0m | |
[0m[[0minfo[0m] [0m[32m- snappy compression codec short form[0m[0m | |
[0m[[0minfo[0m] [0m[32mClientSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- correctly validates driver jar URL's[0m[0m | |
[0m[[0minfo[0m] [0m[32mSparkContextSchedulerCreationSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- bad-master[0m[0m | |
[0m[[0minfo[0m] [0m[32m- local[0m[0m | |
[0m[[0minfo[0m] [0m[32m- local-*[0m[0m | |
[0m[[0minfo[0m] [0m[32m- local-n[0m[0m | |
[0m[[0minfo[0m] [0m[32m- local-*-n-failures[0m[0m | |
[0m[[0minfo[0m] [0m[32m- local-n-failures[0m[0m | |
[0m[[0minfo[0m] [0m[32m- bad-local-n[0m[0m | |
[0m[[0minfo[0m] [0m[32m- bad-local-n-failures[0m[0m | |
[0m[[0minfo[0m] [0m[32m- local-default-parallelism[0m[0m | |
[0m[[0minfo[0m] [0m[32m- simr[0m[0m | |
[0m[[0minfo[0m] [0m[32m- local-cluster[0m[0m | |
[0m[[0minfo[0m] [0m[31m- yarn-cluster *** FAILED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m unable to create new native thread (SparkContextSchedulerCreationSuite.scala:134)[0m[0m | |
[0m[[0minfo[0m] [0m[31m- yarn-standalone *** FAILED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m unable to create new native thread (SparkContextSchedulerCreationSuite.scala:134)[0m[0m | |
[0m[[0minfo[0m] [0m[31m- yarn-client *** FAILED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m unable to create new native thread (SparkContextSchedulerCreationSuite.scala:134)[0m[0m | |
[0m[[0minfo[0m] [0m[31m- mesos fine-grained *** FAILED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m unable to create new native thread (SparkContextSchedulerCreationSuite.scala:158)[0m[0m | |
[0m[[0minfo[0m] [0m[31m- mesos coarse-grained *** FAILED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m unable to create new native thread (SparkContextSchedulerCreationSuite.scala:158)[0m[0m | |
[0m[[0minfo[0m] [0m[31m- mesos with zookeeper *** FAILED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m unable to create new native thread (SparkContextSchedulerCreationSuite.scala:158)[0m[0m | |
[0m[[0minfo[0m] [0m[32mAsyncRDDActionsSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.rdd.AsyncRDDActionsSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.util.Try$.apply(Try.scala:161)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mPrimitiveKeyOpenHashMapSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- size for specialized, primitive key, value (int, int)[0m[0m | |
[0m[[0minfo[0m] [0m[32m- initialization[0m[0m | |
[0m[[0minfo[0m] [0m[32m- basic operations[0m[0m | |
[0m[[0minfo[0m] [0m[32m- null values[0m[0m | |
[0m[[0minfo[0m] [0m[32m- changeValue[0m[0m | |
[0m[[0minfo[0m] [0m[32m- inserting in capacity-1 map[0m[0m | |
[0m[[0minfo[0m] [0m[32mRDDSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.rdd.RDDSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.util.Try$.apply(Try.scala:161)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mCacheManagerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.CacheManagerSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.util.Try$.apply(Try.scala:161)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mWorkerWatcherSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.deploy.worker.WorkerWatcherSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.util.Try$.apply(Try.scala:161)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mCoarseGrainedSchedulerBackendSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.scheduler.CoarseGrainedSchedulerBackendSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.util.Try$.apply(Try.scala:161)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mSortShuffleSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.SortShuffleSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.util.Try$.apply(Try.scala:161)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mJsonProtocolSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- writeApplicationInfo[0m[0m | |
[0m[[0minfo[0m] [0m[32m- writeWorkerInfo[0m[0m | |
[0m[[0minfo[0m] [0m[32m- writeApplicationDescription[0m[0m | |
[0m[[0minfo[0m] [0m[32m- writeExecutorRunner[0m[0m | |
[0m[[0minfo[0m] [0m[32m- writeDriverInfo[0m[0m | |
[0m[[0minfo[0m] [0m[32m- writeMasterState[0m[0m | |
[0m[[0minfo[0m] [0m[32m- writeWorkerState[0m[0m | |
[0m[[0minfo[0m] [0m[32mReplayListenerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Simple replay[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Simple replay with compression[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.scheduler.ReplayListenerSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.forkjoin.ForkJoinPool.tryAddWorker(ForkJoinPool.java:1672)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.forkjoin.ForkJoinPool.signalWork(ForkJoinPool.java:1966)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.forkjoin.ForkJoinPool.fullExternalPush(ForkJoinPool.java:1905)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.forkjoin.ForkJoinPool.externalPush(ForkJoinPool.java:1834)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.forkjoin.ForkJoinPool.execute(ForkJoinPool.java:2955)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinPool.execute(AbstractDispatcher.scala:374)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.dispatch.ExecutorServiceDelegate$class.execute(ThreadPoolBuilder.scala:212)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.dispatch.Dispatcher$LazyExecutorServiceDelegate.execute(Dispatcher.scala:43)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mDistributionSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- summary[0m[0m | |
[0m[[0minfo[0m] [0m[32mParallelCollectionSplitSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- one element per slice[0m[0m | |
[0m[[0minfo[0m] [0m[32m- one slice[0m[0m | |
[0m[[0minfo[0m] [0m[32m- equal slices[0m[0m | |
[0m[[0minfo[0m] [0m[32m- non-equal slices[0m[0m | |
[0m[[0minfo[0m] [0m[32m- splitting exclusive range[0m[0m | |
[0m[[0minfo[0m] [0m[32m- splitting inclusive range[0m[0m | |
[0m[[0minfo[0m] [0m[32m- empty data[0m[0m | |
[0m[[0minfo[0m] [0m[32m- zero slices[0m[0m | |
[0m[[0minfo[0m] [0m[32m- negative number of slices[0m[0m | |
[0m[[0minfo[0m] [0m[32m- exclusive ranges sliced into ranges[0m[0m | |
[0m[[0minfo[0m] [0m[32m- inclusive ranges sliced into ranges[0m[0m | |
[0m[[0minfo[0m] [0m[32m- identical slice sizes between Range and NumericRange[0m[0m | |
[0m[[0minfo[0m] [0m[32m- identical slice sizes between List and NumericRange[0m[0m | |
[0m[[0minfo[0m] [0m[32m- large ranges don't overflow[0m[0m | |
[0m[[0minfo[0m] [0m[32m- random array tests[0m[0m | |
[0m[[0minfo[0m] [0m[32m- random exclusive range tests[0m[0m | |
[0m[[0minfo[0m] [0m[32m- random inclusive range tests[0m[0m | |
[0m[[0minfo[0m] [0m[32m- exclusive ranges of longs[0m[0m | |
[0m[[0minfo[0m] [0m[32m- inclusive ranges of longs[0m[0m | |
[0m[[0minfo[0m] [0m[32m- exclusive ranges of doubles[0m[0m | |
[0m[[0minfo[0m] [0m[32m- inclusive ranges of doubles[0m[0m | |
[0m[[0minfo[0m] [0m[32mDriverRunnerTest:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Process succeeds instantly[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Process failing several times and then succeeding[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Process doesn't restart if not supervised[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Process doesn't restart if killed[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Reset of backoff counter[0m[0m | |
[0m[[0minfo[0m] [0m[32mConnectionManagerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.network.ConnectionManagerSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.util.Timer.<init>(Timer.java:176)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.network.ConnectionManager.<init>(ConnectionManager.scala:71)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.network.ConnectionManagerSuite$$anonfun$1.apply$mcV$sp(ConnectionManagerSuite.scala:44)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.network.ConnectionManagerSuite$$anonfun$1.apply(ConnectionManagerSuite.scala:41)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.network.ConnectionManagerSuite$$anonfun$1.apply(ConnectionManagerSuite.scala:41)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.scalatest.Transformer$$anonfun$apply$1.apply(Transformer.scala:22)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.scalatest.Transformer$$anonfun$apply$1.apply(Transformer.scala:22)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mAccumulatorSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.AccumulatorSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.util.Try$.apply(Try.scala:161)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mExecutorRunnerTest:[0m[0m | |
[0m[[0minfo[0m] [0m[31m- command includes appId *** FAILED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.io.IOException: Cannot run program "/home/jay/Development/spark/bin/compute-classpath.sh" (in directory "."): error=11, Resource temporarily unavailable[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.ProcessBuilder.start(ProcessBuilder.java:1041)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.util.Utils$.executeAndGetOutput(Utils.scala:852)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.deploy.worker.CommandUtils$.buildJavaOpts(CommandUtils.scala:71)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.deploy.worker.CommandUtils$.buildCommandSeq(CommandUtils.scala:37)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.deploy.worker.ExecutorRunner.getCommandSeq(ExecutorRunner.scala:125)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.deploy.worker.ExecutorRunnerTest$$anonfun$1.apply$mcV$sp(ExecutorRunnerTest.scala:37)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.deploy.worker.ExecutorRunnerTest$$anonfun$1.apply(ExecutorRunnerTest.scala:28)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.deploy.worker.ExecutorRunnerTest$$anonfun$1.apply(ExecutorRunnerTest.scala:28)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.scalatest.Transformer$$anonfun$apply$1.apply(Transformer.scala:22)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.scalatest.Transformer$$anonfun$apply$1.apply(Transformer.scala:22)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[31m Cause: java.io.IOException: error=11, Resource temporarily unavailable[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.UNIXProcess.forkAndExec(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.UNIXProcess.<init>(UNIXProcess.java:186)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.ProcessImpl.start(ProcessImpl.java:130)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.ProcessBuilder.start(ProcessBuilder.java:1022)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.util.Utils$.executeAndGetOutput(Utils.scala:852)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.deploy.worker.CommandUtils$.buildJavaOpts(CommandUtils.scala:71)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.deploy.worker.CommandUtils$.buildCommandSeq(CommandUtils.scala:37)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.deploy.worker.ExecutorRunner.getCommandSeq(ExecutorRunner.scala:125)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.deploy.worker.ExecutorRunnerTest$$anonfun$1.apply$mcV$sp(ExecutorRunnerTest.scala:37)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.deploy.worker.ExecutorRunnerTest$$anonfun$1.apply(ExecutorRunnerTest.scala:28)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mThreadingSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.ThreadingSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.util.Try$.apply(Try.scala:161)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mBlockFetchingClientHandlerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- handling block data (successful fetch)[0m[0m | |
[0m[[0minfo[0m] [0m[32m- handling error message (failed fetch)[0m[0m | |
[0m[[0minfo[0m] [0m[32mExternalAppendOnlyMapSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.util.collection.ExternalAppendOnlyMapSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.util.Try$.apply(Try.scala:161)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mJobCancellationSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.JobCancellationSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.util.Try$.apply(Try.scala:161)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mOpenHashSetSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- size for specialized, primitive int[0m[0m | |
[0m[[0minfo[0m] [0m[32m- primitive int[0m[0m | |
[0m[[0minfo[0m] [0m[32m- primitive long[0m[0m | |
[0m[[0minfo[0m] [0m[32m- non-primitive[0m[0m | |
[0m[[0minfo[0m] [0m[32m- non-primitive set growth[0m[0m | |
[0m[[0minfo[0m] [0m[32m- primitive set growth[0m[0m | |
[0m[[0minfo[0m] [0m[32mXORShiftRandomSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- XORShift generates valid random numbers[0m[0m | |
[0m[[0minfo[0m] [0m[32m- XORShift with zero seed[0m[0m | |
[0m[[0minfo[0m] [0m[32mPrimitiveVectorSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- primitive value[0m[0m | |
[0m[[0minfo[0m] [0m[32m- non-primitive value[0m[0m | |
[0m[[0minfo[0m] [0m[32m- ideal growth[0m[0m | |
[0m[[0minfo[0m] [0m[32m- ideal size[0m[0m | |
[0m[[0minfo[0m] [0m[32m- resizing[0m[0m | |
[0m[[0minfo[0m] [0m[32mCompactBufferSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- empty buffer[0m[0m | |
[0m[[0minfo[0m] [0m[32m- basic inserts[0m[0m | |
[0m[[0minfo[0m] [0m[32m- adding sequences[0m[0m | |
[0m[[0minfo[0m] [0m[32m- adding the same buffer to itself[0m[0m | |
[0m[[0minfo[0m] [0m[32mFlatmapIteratorSuite:[0m[0m | |
[ERROR] [08/26/2014 11:33:59.983] [pool-1-thread-1-ScalaTest-running-FlatmapIteratorSuite] [Remoting] Remoting error: [Startup timed out] [ | |
akka.remote.RemoteTransportException: Startup timed out | |
at akka.remote.Remoting.akka$remote$Remoting$$notifyError(Remoting.scala:129) | |
at akka.remote.Remoting.start(Remoting.scala:191) | |
at akka.remote.RemoteActorRefProvider.init(RemoteActorRefProvider.scala:184) | |
at akka.actor.ActorSystemImpl._start$lzycompute(ActorSystem.scala:579) | |
at akka.actor.ActorSystemImpl._start(ActorSystem.scala:577) | |
at akka.actor.ActorSystemImpl.start(ActorSystem.scala:588) | |
at akka.actor.ActorSystem$.apply(ActorSystem.scala:111) | |
at akka.actor.ActorSystem$.apply(ActorSystem.scala:104) | |
at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121) | |
at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54) | |
at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53) | |
at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446) | |
at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141) | |
at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442) | |
at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56) | |
at org.apache.spark.SparkEnv$.create(SparkEnv.scala:150) | |
at org.apache.spark.SparkContext.<init>(SparkContext.scala:203) | |
at org.apache.spark.storage.FlatmapIteratorSuite$$anonfun$1.apply$mcV$sp(FlatmapIteratorSuite.scala:37) | |
at org.apache.spark.storage.FlatmapIteratorSuite$$anonfun$1.apply(FlatmapIteratorSuite.scala:35) | |
at org.apache.spark.storage.FlatmapIteratorSuite$$anonfun$1.apply(FlatmapIteratorSuite.scala:35) | |
at org.scalatest.Transformer$$anonfun$apply$1.apply(Transformer.scala:22) | |
at org.scalatest.Transformer$$anonfun$apply$1.apply(Transformer.scala:22) | |
at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) | |
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) | |
at org.scalatest.Transformer.apply(Transformer.scala:22) | |
at org.scalatest.Transformer.apply(Transformer.scala:20) | |
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:158) | |
at org.scalatest.Suite$class.withFixture(Suite.scala:1121) | |
at org.scalatest.FunSuite.withFixture(FunSuite.scala:1559) | |
at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:155) | |
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:167) | |
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:167) | |
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) | |
at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:167) | |
at org.apache.spark.storage.FlatmapIteratorSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(FlatmapIteratorSuite.scala:23) | |
at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:255) | |
at org.apache.spark.storage.FlatmapIteratorSuite.runTest(FlatmapIteratorSuite.scala:23) | |
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:200) | |
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:200) | |
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) | |
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) | |
at scala.collection.immutable.List.foreach(List.scala:318) | |
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) | |
at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) | |
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) | |
at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:200) | |
at org.scalatest.FunSuite.runTests(FunSuite.scala:1559) | |
at org.scalatest.Suite$class.run(Suite.scala:1423) | |
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1559) | |
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:204) | |
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:204) | |
at org.scalatest.SuperEngine.runImpl(Engine.scala:545) | |
at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:204) | |
at org.apache.spark.storage.FlatmapIteratorSuite.org$scalatest$BeforeAndAfterAll$$super$run(FlatmapIteratorSuite.scala:23) | |
at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) | |
at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) | |
at org.apache.spark.storage.FlatmapIteratorSuite.run(FlatmapIteratorSuite.scala:23) | |
at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:444) | |
at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:651) | |
at sbt.ForkMain$Run$2.call(ForkMain.java:294) | |
at sbt.ForkMain$Run$2.call(ForkMain.java:284) | |
at java.util.concurrent.FutureTask.run(FutureTask.java:262) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) | |
at java.lang.Thread.run(Thread.java:745) | |
Caused by: java.util.concurrent.TimeoutException: Futures timed out after [10000 milliseconds] | |
at scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:219) | |
at scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223) | |
at scala.concurrent.Await$$anonfun$result$1.apply(package.scala:107) | |
at scala.concurrent.BlockContext$DefaultBlockContext$.blockOn(BlockContext.scala:53) | |
at scala.concurrent.Await$.result(package.scala:107) | |
at akka.remote.Remoting.start(Remoting.scala:173) | |
... 63 more | |
] | |
[0m[[0minfo[0m] [0m[31m- Flatmap Iterator to Disk *** FAILED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.util.concurrent.TimeoutException: Futures timed out after [10000 milliseconds][0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:219)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.Await$$anonfun$result$1.apply(package.scala:107)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.BlockContext$DefaultBlockContext$.blockOn(BlockContext.scala:53)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.Await$.result(package.scala:107)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.remote.Remoting.start(Remoting.scala:173)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.remote.RemoteActorRefProvider.init(RemoteActorRefProvider.scala:184)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ActorSystemImpl._start$lzycompute(ActorSystem.scala:579)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ActorSystemImpl._start(ActorSystem.scala:577)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ActorSystemImpl.start(ActorSystem.scala:588)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[ERROR] [08/26/2014 11:34:09.995] [pool-1-thread-1-ScalaTest-running-FlatmapIteratorSuite] [Remoting] Remoting error: [Startup timed out] [ | |
akka.remote.RemoteTransportException: Startup timed out | |
at akka.remote.Remoting.akka$remote$Remoting$$notifyError(Remoting.scala:129) | |
at akka.remote.Remoting.start(Remoting.scala:191) | |
at akka.remote.RemoteActorRefProvider.init(RemoteActorRefProvider.scala:184) | |
at akka.actor.ActorSystemImpl._start$lzycompute(ActorSystem.scala:579) | |
at akka.actor.ActorSystemImpl._start(ActorSystem.scala:577) | |
at akka.actor.ActorSystemImpl.start(ActorSystem.scala:588) | |
at akka.actor.ActorSystem$.apply(ActorSystem.scala:111) | |
at akka.actor.ActorSystem$.apply(ActorSystem.scala:104) | |
at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121) | |
at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54) | |
at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53) | |
at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446) | |
at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141) | |
at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442) | |
at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56) | |
at org.apache.spark.SparkEnv$.create(SparkEnv.scala:150) | |
at org.apache.spark.SparkContext.<init>(SparkContext.scala:203) | |
at org.apache.spark.storage.FlatmapIteratorSuite$$anonfun$3.apply$mcV$sp(FlatmapIteratorSuite.scala:48) | |
at org.apache.spark.storage.FlatmapIteratorSuite$$anonfun$3.apply(FlatmapIteratorSuite.scala:46) | |
at org.apache.spark.storage.FlatmapIteratorSuite$$anonfun$3.apply(FlatmapIteratorSuite.scala:46) | |
at org.scalatest.Transformer$$anonfun$apply$1.apply(Transformer.scala:22) | |
at org.scalatest.Transformer$$anonfun$apply$1.apply(Transformer.scala:22) | |
at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) | |
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) | |
at org.scalatest.Transformer.apply(Transformer.scala:22) | |
at org.scalatest.Transformer.apply(Transformer.scala:20) | |
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:158) | |
at org.scalatest.Suite$class.withFixture(Suite.scala:1121) | |
at org.scalatest.FunSuite.withFixture(FunSuite.scala:1559) | |
at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:155) | |
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:167) | |
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:167) | |
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) | |
at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:167) | |
at org.apache.spark.storage.FlatmapIteratorSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(FlatmapIteratorSuite.scala:23) | |
at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:255) | |
at org.apache.spark.storage.FlatmapIteratorSuite.runTest(FlatmapIteratorSuite.scala:23) | |
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:200) | |
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:200) | |
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) | |
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) | |
at scala.collection.immutable.List.foreach(List.scala:318) | |
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) | |
at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) | |
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) | |
at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:200) | |
at org.scalatest.FunSuite.runTests(FunSuite.scala:1559) | |
at org.scalatest.Suite$class.run(Suite.scala:1423) | |
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1559) | |
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:204) | |
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:204) | |
at org.scalatest.SuperEngine.runImpl(Engine.scala:545) | |
at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:204) | |
at org.apache.spark.storage.FlatmapIteratorSuite.org$scalatest$BeforeAndAfterAll$$super$run(FlatmapIteratorSuite.scala:23) | |
at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) | |
at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) | |
at org.apache.spark.storage.FlatmapIteratorSuite.run(FlatmapIteratorSuite.scala:23) | |
at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:444) | |
at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:651) | |
at sbt.ForkMain$Run$2.call(ForkMain.java:294) | |
at sbt.ForkMain$Run$2.call(ForkMain.java:284) | |
at java.util.concurrent.FutureTask.run(FutureTask.java:262) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) | |
at java.lang.Thread.run(Thread.java:745) | |
Caused by: java.util.concurrent.TimeoutException: Futures timed out after [10000 milliseconds] | |
at scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:219) | |
at scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223) | |
at scala.concurrent.Await$$anonfun$result$1.apply(package.scala:107) | |
at scala.concurrent.BlockContext$DefaultBlockContext$.blockOn(BlockContext.scala:53) | |
at scala.concurrent.Await$.result(package.scala:107) | |
at akka.remote.Remoting.start(Remoting.scala:173) | |
... 63 more | |
] | |
[0m[[0minfo[0m] [0m[31m- Flatmap Iterator to Memory *** FAILED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.util.concurrent.TimeoutException: Futures timed out after [10000 milliseconds][0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:219)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.Await$$anonfun$result$1.apply(package.scala:107)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.BlockContext$DefaultBlockContext$.blockOn(BlockContext.scala:53)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.Await$.result(package.scala:107)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.remote.Remoting.start(Remoting.scala:173)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.remote.RemoteActorRefProvider.init(RemoteActorRefProvider.scala:184)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ActorSystemImpl._start$lzycompute(ActorSystem.scala:579)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ActorSystemImpl._start(ActorSystem.scala:577)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ActorSystemImpl.start(ActorSystem.scala:588)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[31m- Serializer Reset *** FAILED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.util.concurrent.TimeoutException: Futures timed out after [10000 milliseconds][0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:219)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.Await$$anonfun$result$1.apply(package.scala:107)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.BlockContext$DefaultBlockContext$.blockOn(BlockContext.scala:53)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.concurrent.Await$.result(package.scala:107)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.remote.Remoting.start(Remoting.scala:173)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.remote.RemoteActorRefProvider.init(RemoteActorRefProvider.scala:184)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ActorSystemImpl._start$lzycompute(ActorSystem.scala:579)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ActorSystemImpl._start(ActorSystem.scala:577)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ActorSystemImpl.start(ActorSystem.scala:588)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mDriverSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31m- driver should exit after finishing *** FAILED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m OutOfMemoryError was thrown during property evaluation. (DriverSuite.scala:40)[0m[0m | |
[0m[[0minfo[0m] [0m[31m Message: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m Occurred at table row 0 (zero based, not counting headings), which had values ([0m[0m | |
[0m[[0minfo[0m] [0m[31m master = local[0m[0m | |
[0m[[0minfo[0m] [0m[31m )[0m[0m | |
[0m[[0minfo[0m] [0m[32mNextIteratorSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- one iteration[0m[0m | |
[0m[[0minfo[0m] [0m[32m- two iterations[0m[0m | |
[0m[[0minfo[0m] [0m[32m- empty iteration[0m[0m | |
[0m[[0minfo[0m] [0m[32m- close is called once for empty iterations[0m[0m | |
[0m[[0minfo[0m] [0m[32m- close is called once for non-empty iterations[0m[0m | |
[0m[[0minfo[0m] [0m[32mUtilsSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- bytesToString[0m[0m | |
[0m[[0minfo[0m] [0m[32m- copyStream[0m[0m | |
[0m[[0minfo[0m] [0m[32m- memoryStringToMb[0m[0m | |
[0m[[0minfo[0m] [0m[32m- splitCommandString[0m[0m | |
[0m[[0minfo[0m] [0m[32m- string formatting of time durations[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reading offset bytes of a file[0m[0m | |
[0m[[0minfo[0m] [0m[32m- reading offset bytes across multiple files[0m[0m | |
[0m[[0minfo[0m] [0m[32m- deserialize long value[0m[0m | |
[0m[[0minfo[0m] [0m[32m- get iterator size[0m[0m | |
[0m[[0minfo[0m] [0m[32m- findOldFiles[0m[0m | |
[0m[[0minfo[0m] [0m[32m- resolveURI[0m[0m | |
[0m[[0minfo[0m] [0m[32m- nonLocalPaths[0m[0m | |
[0m[[0minfo[0m] [0m[32m- isBindCollision[0m[0m | |
[0m[[0minfo[0m] [0m[32mSizeTrackerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- vector fixed size insertions[0m[0m | |
[0m[[0minfo[0m] [0m[32m- vector variable size insertions[0m[0m | |
[0m[[0minfo[0m] [0m[32m- map fixed size insertions[0m[0m | |
[0m[[0minfo[0m] [0m[32m- map variable size insertions[0m[0m | |
[0m[[0minfo[0m] [0m[32m- map updates[0m[0m | |
[0m[[0minfo[0m] [0m[32mExecutorURLClassLoaderSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- child first[0m[0m | |
[0m[[0minfo[0m] [0m[32m- parent first[0m[0m | |
[0m[[0minfo[0m] [0m[32m- child first can fall back[0m[0m | |
[0m[[0minfo[0m] [0m[32m- child first can fail[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.executor.ExecutorURLClassLoaderSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.util.Try$.apply(Try.scala:161)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mImplicitOrderingSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.ImplicitOrderingSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.util.Try$.apply(Try.scala:161)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[31merror[0m] [0mUncaught exception when running org.apache.spark.scheduler.DAGSchedulerSuite: java.lang.OutOfMemoryError: unable to create new native thread[0m | |
sbt.ForkMain$ForkError: unable to create new native thread | |
at java.lang.Thread.start0(Native Method) | |
at java.lang.Thread.start(Thread.java:714) | |
at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425) | |
at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source) | |
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) | |
at java.lang.reflect.Constructor.newInstance(Constructor.java:526) | |
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78) | |
at scala.util.Try$.apply(Try.scala:161) | |
at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73) | |
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84) | |
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84) | |
at scala.util.Success.flatMap(Try.scala:200) | |
at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84) | |
at akka.actor.ActorSystemImpl.createScheduler(ActorSystem.scala:618) | |
at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:541) | |
at akka.actor.ActorSystem$.apply(ActorSystem.scala:111) | |
at akka.actor.ActorSystem$.apply(ActorSystem.scala:93) | |
at org.apache.spark.scheduler.DAGSchedulerSuite.<init>(DAGSchedulerSuite.scala:68) | |
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) | |
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) | |
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) | |
at java.lang.reflect.Constructor.newInstance(Constructor.java:526) | |
at java.lang.Class.newInstance(Class.java:374) | |
at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:621) | |
at sbt.ForkMain$Run$2.call(ForkMain.java:294) | |
at sbt.ForkMain$Run$2.call(ForkMain.java:284) | |
at java.util.concurrent.FutureTask.run(FutureTask.java:262) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) | |
at java.lang.Thread.run(Thread.java:745) | |
[0m[[0minfo[0m] [0m[32mSecurityManagerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- set security with conf[0m[0m | |
[0m[[0minfo[0m] [0m[32m- set security with api[0m[0m | |
[0m[[0minfo[0m] [0m[32m- set security modify acls[0m[0m | |
[0m[[0minfo[0m] [0m[32m- set security admin acls[0m[0m | |
[0m[[0minfo[0m] [0m[32mSparkConfSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- loading from system properties[0m[0m | |
[0m[[0minfo[0m] [0m[32m- initializing without loading defaults[0m[0m | |
[0m[[0minfo[0m] [0m[32m- named set methods[0m[0m | |
[0m[[0minfo[0m] [0m[32m- basic get and set[0m[0m | |
[0m[[0minfo[0m] [0m[32m- creating SparkContext without master and app name[0m[0m | |
[0m[[0minfo[0m] [0m[32m- creating SparkContext without master[0m[0m | |
[0m[[0minfo[0m] [0m[32m- creating SparkContext without app name[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.SparkConfSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.util.Try$.apply(Try.scala:161)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mSortingSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.rdd.SortingSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.util.Try$.apply(Try.scala:161)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mPartitionwiseSampledRDDSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.rdd.PartitionwiseSampledRDDSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.util.Try$.apply(Try.scala:161)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mSparkSubmitSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.deploy.SparkSubmitSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.deploy.SparkSubmitSuite.testPrematureExit(SparkSubmitSuite.scala:64)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.deploy.SparkSubmitSuite$$anonfun$1.apply$mcV$sp(SparkSubmitSuite.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.deploy.SparkSubmitSuite$$anonfun$1.apply(SparkSubmitSuite.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.apache.spark.deploy.SparkSubmitSuite$$anonfun$1.apply(SparkSubmitSuite.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.scalatest.Transformer$$anonfun$apply$1.apply(Transformer.scala:22)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.scalatest.Transformer$$anonfun$apply$1.apply(Transformer.scala:22)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mMetricsSystemSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- MetricsSystem with default config[0m[0m | |
[0m[[0minfo[0m] [0m[32m- MetricsSystem with sources add[0m[0m | |
[0m[[0minfo[0m] [0m[32mPartitionPruningRDDSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.rdd.PartitionPruningRDDSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.util.Try$.apply(Try.scala:161)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mFileServerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.FileServerSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.util.Try$.apply(Try.scala:161)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mBlockObjectWriterSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- verify write metrics[0m[0m | |
[0m[[0minfo[0m] [0m[32m- verify write metrics on revert[0m[0m | |
[0m[[0minfo[0m] [0m[32mTaskSetManagerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.scheduler.TaskSetManagerSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.util.Try$.apply(Try.scala:161)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mDoubleRDDSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[31mException encountered when attempting to run a suite with class name: org.apache.spark.rdd.DoubleRDDSuite *** ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m java.lang.OutOfMemoryError: unable to create new native thread[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start0(Native Method)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.Thread.start(Thread.java:714)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.LightArrayRevolverScheduler.<init>(Scheduler.scala:425)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.GeneratedConstructorAccessor9.newInstance(Unknown Source)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at java.lang.reflect.Constructor.newInstance(Constructor.java:526)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at scala.util.Try$.apply(Try.scala:161)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)[0m[0m | |
[0m[[0minfo[0m] [0m[31m at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)[0m[0m | |
[0m[[0minfo[0m] [0m[31m ...[0m[0m | |
[0m[[0minfo[0m] [0m[32mPythonRunnerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- format path[0m[0m | |
[0m[[0minfo[0m] [0m[32m- format paths[0m[0m | |
[0m[[0minfo[0m] [0m[32mFileLoggerSuite:[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Simple logging[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Simple logging with compression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Logging multiple files[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Logging multiple files with compression[0m[0m | |
[0m[[0minfo[0m] [0m[32m- Logging when directory already exists[0m[0m | |
[0m[[0minfo[0m] [0mScalaTest[0m | |
[0m[[0minfo[0m] [0m[36mRun completed in 19 minutes, 32 seconds.[0m[0m | |
[0m[[0minfo[0m] [0m[36mTotal number of tests run: 574[0m[0m | |
[0m[[0minfo[0m] [0m[36mSuites: completed 81, aborted 22[0m[0m | |
[0m[[0minfo[0m] [0m[36mTests: succeeded 563, failed 11, canceled 0, ignored 5, pending 0[0m[0m | |
[0m[[0minfo[0m] [0m[31m*** 22 SUITES ABORTED ***[0m[0m | |
[0m[[0minfo[0m] [0m[31m*** 11 TESTS FAILED ***[0m[0m | |
[0m[[31merror[0m] [0mError: Total 597, Failed 11, Errors 23, Passed 563, Ignored 5[0m | |
[0m[[31merror[0m] [0mFailed tests:[0m | |
[0m[[31merror[0m] [0m org.apache.spark.deploy.worker.ExecutorRunnerTest[0m | |
[0m[[31merror[0m] [0m org.apache.spark.SparkContextSchedulerCreationSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.DriverSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.storage.FlatmapIteratorSuite[0m | |
[0m[[31merror[0m] [0mError during tests:[0m | |
[0m[[31merror[0m] [0m org.apache.spark.scheduler.DAGSchedulerSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.rdd.RDDSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.rdd.SortingSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.executor.ExecutorURLClassLoaderSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.ImplicitOrderingSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.scheduler.TaskSetManagerSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.CacheManagerSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.SparkConfSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.rdd.AsyncRDDActionsSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.JobCancellationSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.SortShuffleSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.scheduler.CoarseGrainedSchedulerBackendSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.AccumulatorSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.deploy.SparkSubmitSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.rdd.PartitionwiseSampledRDDSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.ThreadingSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.rdd.PartitionPruningRDDSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.FileServerSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.deploy.worker.WorkerWatcherSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.network.ConnectionManagerSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.rdd.DoubleRDDSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.scheduler.ReplayListenerSuite[0m | |
[0m[[31merror[0m] [0m org.apache.spark.util.collection.ExternalAppendOnlyMapSuite[0m | |
[0m[[0minfo[0m] [0m[32mZeroMQStreamSuite:[0m[0m |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment