I hereby claim:
- I am paulfryzel on github.
- I am paulfryzel (https://keybase.io/paulfryzel) on keybase.
- I have a public key whose fingerprint is D94F 8B6B 1A65 73E0 6B98 176D 8E25 9C22 50E6 1BF1
To claim this, I am signing this object:
[info] welcome to sbt 1.3.13 (AdoptOpenJDK Java 11.0.7) | |
[info] loading global plugins from C:\Users\paul\.sbt\1.0\plugins | |
[info] loading settings for project sparksql-scalapb-build from plugins.sbt ... | |
[info] loading project definition from C:\Users\paul\Documents\GitHub\forks\sparksql-scalapb\project | |
[info] loading settings for project root from build.sbt,sonatype.sbt,version.sbt ... | |
[info] set current project to root (in build file:/C:/Users/paul/Documents/GitHub/forks/sparksql-scalapb/) | |
[info] Executing in batch mode. For better performance use sbt's shell | |
[success] Total time: 0 s, completed Sep 29, 2020, 4:12:35 PM | |
[info] Compiling 9 Scala sources to C:\Users\paul\Documents\GitHub\forks\sparksql-scalapb\sparksql-scalapb\target\scala-2.12\classes ... | |
[info] Done compiling. |
[info] PersonSpec: | |
[info] mapping datasets | |
[info] - should work *** FAILED *** | |
[info] org.apache.spark.sql.AnalysisException: Try to map struct<name:string,age:int,tags:array<string>,address:struct<street:string,city:string>,nums:array<int>> to Tuple1, but failed as the number of fields does not line up.; | |
[info] at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveDeserializer$.fail(Analyzer.scala:3057) | |
[info] at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveDeserializer$.org$apache$spark$sql$catalyst$analysis$Analyzer$ResolveDeserializer$$validateTopLevelTupleFields(Analyzer.scala:3074) | |
[info] at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveDeserializer$$anonfun$apply$31$$anonfun$applyOrElse$172.applyOrElse(Analyzer.scala:3026) | |
[info] at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveDeserializer$$anonfun$apply$31$$anonfun$applyOrElse$172.applyOrElse(Analyzer.scala:3018) | |
[info] at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$1(TreeNode.scala: |
$ sbt clean compile | |
[info] welcome to sbt 1.3.13 (Private Build Java 1.8.0_265) | |
[info] loading global plugins from /home/pfryzel/.sbt/1.0/plugins | |
[info] loading settings for project sparksql-scalapb-build from plugins.sbt ... | |
[info] loading project definition from /home/pfryzel/workspace/github/sparksql-scalapb/project | |
[info] loading settings for project root from version.sbt,build.sbt,sonatype.sbt ... | |
[info] set current project to root (in build file:/home/pfryzel/workspace/github/sparksql-scalapb/) | |
[info] Executing in batch mode. For better performance use sbt's shell | |
[success] Total time: 0 s, completed Sep 3, 2020 2:20:15 PM | |
[info] Compiling 9 Scala sources to /home/pfryzel/workspace/github/sparksql-scalapb/sparksql-scalapb/target/scala-2.12/classes ... |
I hereby claim:
To claim this, I am signing this object:
$ wasm hello-world.wasm | |
/Users/paulfryzel/.nvm/versions/node/v4.2.0/lib/node_modules/wasm-cli/node_modules/wasm-jit/node_modules/wasm-cfg/lib/wasm-cfg/builder.js:488 | |
index = entry.index; | |
^ | |
TypeError: Cannot read property 'index' of undefined | |
at CFGBuilder.buildCall (/Users/paulfryzel/.nvm/versions/node/v4.2.0/lib/node_modules/wasm-cli/node_modules/wasm-jit/node_modules/wasm-cfg/lib/wasm-cfg/builder.js:488:18) | |
at CFGBuilder.buildExpression (/Users/paulfryzel/.nvm/versions/node/v4.2.0/lib/node_modules/wasm-cli/node_modules/wasm-jit/node_modules/wasm-cfg/lib/wasm-cfg/builder.js:257:17) | |
at CFGBuilder.buildStatement (/Users/paulfryzel/.nvm/versions/node/v4.2.0/lib/node_modules/wasm-cli/node_modules/wasm-jit/node_modules/wasm-cfg/lib/wasm-cfg/builder.js:213:15) | |
at CFGBuilder.buildBlock (/Users/paulfryzel/.nvm/versions/node/v4.2.0/lib/node_modules/wasm-cli/node_modules/wasm-jit/node_modules/wasm-cfg/lib/wasm-cfg/builder.js:180:10) |
language: node_js | |
sudo: false | |
env: | |
- CXX=g++-4.8 | |
node_js: | |
- 4.1 | |
addons: | |
apt: | |
sources: | |
- ubuntu-toolchain-r-test |
'use strict'; | |
function foo(a, b, c) { | |
return 0; | |
} | |
function Obj() {}; | |
Obj.prototype.wrap = function(a, b, c) { | |
return foo.call(this, a, b, c); |
sudo yum install ld-linux.so.2 libstdc++.so.6 libz.so.1 libXext.so.6 libXrender.so.1 libXtst.so.6 | |
sudo setenforce 0 | |
sudo update-alternatives \ | |
--install /usr/bin/java java /usr/java32/jre1.8.0_31/bin/java 10 \ | |
--slave /usr/bin/jjs jjs /usr/java32/jre1.8.0_31/bin/jjs \ | |
--slave /usr/bin/keytool keytool /usr/java32/jre1.8.0_31/bin/keytool \ | |
--slave /usr/bin/orbd orbd /usr/java32/jre1.8.0_31/bin/orbd \ | |
--slave /usr/bin/pack200 pack200 /usr/java32/jre1.8.0_31/bin/pack200 \ |
lstat("tr2.js\0", 0x7FFF582EC468, 0x1) = 0 0 | |
open("tr2.js\0", 0x0, 0x10C3FFBD0) = 3 0 [35/9138] | |
fstat(0x3, 0x7FFF582EC410, 0x10C3FFBD0) = 0 0 | |
read(0x3, " var console = require(\"lib/console\");\n var ffi = require(\"lib/ffi\");\n var std = require(\"lib/stdlib\");\n\n console.log(ffi.c.fork());\n\0", 0x8A) | |
= 137 0 | |
read(0x3, "\313\a\0", 0x1) = 0 0 | |
close(0x3) = 0 0 | |
lstat("lib/stdio.js\0", 0x7FFF582EC2A8, 0x1) = 0 0 | |
open("lib/stdio.js\0", 0x0, 0x10C4539F0) = 3 0 | |
fstat(0x3, 0x7FFF582EC250, 0x10C4539F0) = 0 0 |
(function(file) { | |
var ffi = require("lib/ffi"); | |
var std = require("lib/stdlib"); | |
var pid = ffi.c.fork(); | |
})('tests/00-runtime/es5-cmp.js'); |