I hereby claim:
- I am conorgriffin on github.
- I am conorgriffin (https://keybase.io/conorgriffin) on keybase.
- I have a public key ASCObXLCZ4WviMwabz9-5lHEfbRyBf9Dy5gH77jhxDHGFAo
To claim this, I am signing this object:
/** | |
* Closes the compressor when garbage is collected. | |
*/ | |
protected void finalize() { | |
end(); | |
} |
/** | |
* Writes remaining compressed data to the output stream and closes the | |
* underlying stream. | |
* @exception IOException if an I/O error has occurred | |
*/ | |
public void close() throws IOException { | |
if (!closed) { | |
finish(); | |
if (usesDefaultDeflater) | |
def.end(); |
/** | |
* Creates a new output stream with the specified compressor and | |
* buffer size. | |
* | |
* <p>The new output stream instance is created as if by invoking | |
* the 4-argument constructor DeflaterOutputStream(out, def, size, false). | |
* | |
* @param out the output stream | |
* @param def the compressor ("deflater") | |
* @param size the output buffer size |
/** | |
* Creates a new output stream with a default compressor and buffer size. | |
* | |
* <p>The new output stream instance is created as if by invoking | |
* the 2-argument constructor DeflaterOutputStream(out, false). | |
* | |
* @param out the output stream | |
*/ | |
public DeflaterOutputStream(OutputStream out) { | |
this(out, false); |
outputStream = new DeflaterOutputStream(outputStream, new Deflater(), BLOCK_SIZE); |
private static native void initIDs(); | |
private native static long init(int level, int strategy, boolean nowrap); | |
private native static void setDictionary(long addr, byte[] b, int off, int len); | |
private native int deflateBytes(long addr, byte[] b, int off, int len, | |
int flush); | |
private native static int getAdler(long addr); | |
private native static void reset(long addr); | |
private native static void end(long addr); |
I hereby claim:
To claim this, I am signing this object:
package spark.example | |
import org.apache.spark.SparkContext | |
import org.apache.spark.SparkContext._ | |
import org.apache.spark.SparkConf | |
object SparkGrep { | |
def main(args: Array[String]) { | |
if (args.length < 3) { | |
System.err.println("Usage: SparkGrep <host> <input_file> <match_term>") |
import UIKit | |
class AdTableViewCell: UITableViewCell { | |
// MARK: Subviews | |
// Add subviews and constraints... | |
} |
#!/usr/bin/ruby | |
class IPGenerator | |
public | |
def initialize(session_count, session_length) | |
@session_count = session_count | |
@session_length = session_length | |
@sessions = {} | |
end |