Skip to content

Instantly share code, notes, and snippets.

@tbatchelli
Created March 9, 2011 21:22
Show Gist options
  • Save tbatchelli/863032 to your computer and use it in GitHub Desktop.
Save tbatchelli/863032 to your computer and use it in GitHub Desktop.
cat > /tmp/hadoop-site.xml.new <<EOFpallet
<?xml version="1.0" encoding="UTF-8"?><configuration><property><name>dfs.datanode.du.reserved</name><value>1073741824</value><final>true</final></property><property><name>mapred.child.ulimit</name><value>1126400</value><final>true</final></property><property><name>mapred.local.dir</name><value>/tmp/hadoop//hadoop/hdfs/mapred/local</value><final>true</final></property><property><name>dfs.name.dir</name><value>/tmp/hadoop//hadoop/hdfs/name</value><final>true</final></property><property><name>dfs.permissions</name><value>true</value><final>true</final></property><property><name>io.file.buffer.size</name><value>65536</value></property><property><name>mapred.child.java.opts</name><value>-Xmx550m</value></property><property><name>ndfs.block.size</name><value>134217728</value></property><property><name>dfs.replication</name><value></value></property><property><name>dfs.data.dir</name><value>/tmp/hadoop//hadoop/hdfs/data</value><final>true</final></property><property><name>tasktracker.http.threads</name><value>46</value><final>true</final></property><property><name>hadoop.rpc.socket.factory.class.JobSubmissionProtocol</name><value></value><final>true</final></property><property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value><final>true</final></property><property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property><property><name>mapred.reduce.parallel.copies</name><value>10</value></property><property><name>mapred.output.compression.type</name><value>BLOCK</value></property><property><name>hadoop.tmp.dir</name><value>/tmp/hadoop//tmp/hadoop-${user.name}</value><final>true</final></property><property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec</value></property><property><name>fs.checkpoint.dir</name><value>/tmp/hadoop//hadoop/hdfs/secondary</value><final>true</final></property><property><name>hadoop.rpc.socket.factory.class.ClientProtocol</name><value></value><final>true</final></property><property><name>dfs.datanode.handler.count</name><value>3</value><final>true</final></property><property><name>mapred.reduce.tasks</name><value>10</value></property><property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>1</value><final>true</final></property><property><name>mapred.job.tracker.handler.count</name><value>5</value><final>true</final></property><property><name>mapred.job.tracker</name><value>job-tracker-name:8021</value></property><property><name>mapred.reduce.tasks.speculative.execution</name><value>false</value></property><property><name>mapred.submit.replication</name><value>10</value></property><property><name>fs.trash.interval</name><value>1440</value><final>true</final></property><property><name>mapred.compress.map.output</name><value>true</value></property><property><name>mapred.system.dir</name><value>/tmp/hadoop//hadoop/hdfs/system/mapred</value></property><property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value><final>true</final></property><property><name>dfs.namenode.handler.count</name><value>5</value><final>true</final></property><property><name>fs.default.name</name><value>hdfs://name-node-name:8020/</value></property></configuration>
EOFpallet
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment