Skip to content

Instantly share code, notes, and snippets.

import java.io.FileInputStream;
import java.io.IOException;
import java.net.InetAddress;
import java.util.Properties;
import java.util.UUID;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.amazonaws.AmazonClientException;
import java.io.InputStream;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessor;
import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessorFactory;
/**
* Used to create new record processors.
*/
public class MemorycraftKinesisLoggingProcessorFactory implements IRecordProcessorFactory {
/**
* Constructor.
import java.io.FileInputStream;
import java.io.IOException;
import java.net.InetAddress;
import java.util.Properties;
import java.util.UUID;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.amazonaws.AmazonClientException;
public class Hoge {
public String user_id;
public String datetime;
public String x;
public String y;
}
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import java.nio.ByteBuffer;
import java.util.Locale;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.lang.RandomStringUtils;
import net.arnx.jsonic.JSON;
SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0;
SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0;
/*drop table warehouse;
*/
create table warehouse (
w_id smallint not null,
w_name varchar(10),
w_street_1 varchar(20),
SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0;
SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0;
CREATE INDEX idx_customer ON customer (c_w_id,c_d_id,c_last,c_first);
CREATE INDEX idx_orders ON orders (o_w_id,o_d_id,o_c_id,o_id);
CREATE INDEX fkey_stock_2 ON stock (s_i_id);
CREATE INDEX fkey_order_line_2 ON order_line (ol_supply_w_id,ol_i_id);
create index fkey_district_1 ON district (d_w_id);
#!/bin/bash
echo "export HADOOP_JOBTRACKER_HEAPSIZE=512" > /home/hadoop/conf/hadoop-user-env.sh
echo "export HADOOP_NAMENODE_HEAPSIZE=768" >> /home/hadoop/conf/hadoop-user-env.sh
echo "export HADOOP_TASKTRACKER_HEAPSIZE=256" >> /home/hadoop/conf/hadoop-user-env.sh
echo "export HADOOP_DATANODE_HEAPSIZE=128" >> /home/hadoop/conf/hadoop-user-env.sh
<?php
//ライブラリのロード
require_once __DIR__."/lib/autoload.php";
//コネクションプールの使用
use phpcassa\Connection\ConnectionPool;
$pool = new ConnectionPool("Hogebook", array("10.0.1.10"));
//接続の取得
$raw = $pool->get();