Created
July 16, 2014 20:19
-
-
Save nathangoulding/8603690b44234ffa85b7 to your computer and use it in GitHub Desktop.
pig partition key
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
ubuntu@ip-172-31-28-116:~$ dse pig | |
2014-07-16 20:00:15,574 [main] INFO org.apache.pig.Main - Apache Pig version 0.10.1 (r1426282) compiled Dec 27 2012, 11:24:26 | |
2014-07-16 20:00:15,575 [main] INFO org.apache.pig.Main - Logging error messages to: /home/ubuntu/pig_1405540815569.log | |
2014-07-16 20:00:15,954 [main] INFO org.apache.pig.backend.hadoop.executionengine.HExecutionEngine - Connecting to hadoop file system at: cfs://172.31.28.116/ | |
2014-07-16 20:00:16,870 [main] INFO com.datastax.bdp.snitch.Workload - Setting my workload to Cassandra | |
2014-07-16 20:00:18,245 [main] INFO org.apache.pig.backend.hadoop.executionengine.HExecutionEngine - Connecting to map-reduce job tracker at: 172.31.31.145:8012 | |
grunt> pageviews = LOAD 'cql://pageviews/pageviews_v4_mzcztspeog?page_size=5000&where_clause=d%3D20140702' USING CqlStorage(); | |
grunt> illustrate pageviews; | |
2014-07-16 20:01:02,808 [main] INFO org.apache.pig.backend.hadoop.executionengine.HExecutionEngine - Connecting to hadoop file system at: cfs://172.31.28.116/ | |
2014-07-16 20:01:02,809 [main] INFO org.apache.pig.backend.hadoop.executionengine.HExecutionEngine - Connecting to map-reduce job tracker at: 172.31.31.145:8012 | |
2014-07-16 20:01:03,032 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRCompiler - File concatenation threshold: 100 optimistic? false | |
2014-07-16 20:01:03,056 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MultiQueryOptimizer - MR plan size before optimization: 1 | |
2014-07-16 20:01:03,056 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MultiQueryOptimizer - MR plan size after optimization: 1 | |
2014-07-16 20:01:03,072 [main] INFO org.apache.pig.tools.pigstats.ScriptState - Pig script settings are added to the job | |
2014-07-16 20:01:03,086 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.JobControlCompiler - mapred.job.reduce.markreset.buffer.percent is not set, set to default 0.3 | |
java.lang.RuntimeException | |
at org.apache.cassandra.hadoop.cql3.CqlPagingRecordReader$RowIterator.executeQuery(CqlPagingRecordReader.java:662) | |
at org.apache.cassandra.hadoop.cql3.CqlPagingRecordReader$RowIterator.<init>(CqlPagingRecordReader.java:298) | |
at org.apache.cassandra.hadoop.cql3.CqlPagingRecordReader.initialize(CqlPagingRecordReader.java:164) | |
at org.apache.pig.impl.io.ReadToEndLoader.initializeReader(ReadToEndLoader.java:174) | |
at org.apache.pig.impl.io.ReadToEndLoader.getNextHelper(ReadToEndLoader.java:209) | |
at org.apache.pig.impl.io.ReadToEndLoader.getNext(ReadToEndLoader.java:190) | |
at org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POLoad.getNext(POLoad.java:131) | |
at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapBase.runPipeline(PigGenericMapBase.java:271) | |
at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapBase.map(PigGenericMapBase.java:266) | |
at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapBase.map(PigGenericMapBase.java:64) | |
at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:144) | |
at org.apache.pig.pen.LocalMapReduceSimulator.launchPig(LocalMapReduceSimulator.java:194) | |
at org.apache.pig.pen.ExampleGenerator.getData(ExampleGenerator.java:257) | |
at org.apache.pig.pen.ExampleGenerator.readBaseData(ExampleGenerator.java:222) | |
at org.apache.pig.pen.ExampleGenerator.getExamples(ExampleGenerator.java:154) | |
at org.apache.pig.PigServer.getExamples(PigServer.java:1210) | |
at org.apache.pig.tools.grunt.GruntParser.processIllustrate(GruntParser.java:726) | |
at org.apache.pig.tools.pigscript.parser.PigScriptParser.Illustrate(PigScriptParser.java:591) | |
at org.apache.pig.tools.pigscript.parser.PigScriptParser.parse(PigScriptParser.java:306) | |
at org.apache.pig.tools.grunt.GruntParser.parseStopOnError(GruntParser.java:190) | |
at org.apache.pig.tools.grunt.GruntParser.parseStopOnError(GruntParser.java:166) | |
at org.apache.pig.tools.grunt.Grunt.run(Grunt.java:69) | |
at org.apache.pig.Main.run(Main.java:490) | |
at org.apache.pig.Main.main(Main.java:111) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:606) | |
at org.apache.hadoop.util.RunJar.main(RunJar.java:156) | |
Caused by: InvalidRequestException(why:d cannot be restricted by more than one relation if it includes an Equal) | |
at org.apache.cassandra.thrift.Cassandra$prepare_cql3_query_result$prepare_cql3_query_resultStandardScheme.read(Cassandra.java:51017) | |
at org.apache.cassandra.thrift.Cassandra$prepare_cql3_query_result$prepare_cql3_query_resultStandardScheme.read(Cassandra.java:50994) | |
at org.apache.cassandra.thrift.Cassandra$prepare_cql3_query_result.read(Cassandra.java:50933) | |
at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:78) | |
at org.apache.cassandra.thrift.Cassandra$Client.recv_prepare_cql3_query(Cassandra.java:1756) | |
at org.apache.cassandra.thrift.Cassandra$Client.prepare_cql3_query(Cassandra.java:1742) | |
at org.apache.cassandra.hadoop.cql3.CqlPagingRecordReader$RowIterator.prepareQuery(CqlPagingRecordReader.java:602) | |
at org.apache.cassandra.hadoop.cql3.CqlPagingRecordReader$RowIterator.executeQuery(CqlPagingRecordReader.java:632) | |
... 28 more | |
2014-07-16 20:01:04,388 [main] ERROR org.apache.pig.tools.grunt.Grunt - ERROR 2997: Encountered IOException. Exception : null | |
Details at logfile: /home/ubuntu/pig_1405540815569.log |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment