Note: I configured this thinkfan setup for my old Thinkpad w520 on Ubuntu 17.10.
Install lm-sensors
and thinkfan
.
sudo apt-get install lm-sensors thinkfan
import openml | |
from arbok.bench import Benchmark | |
# We create a benchmark setup where we specify the headers, the interpreter we | |
# want to use, the directory to where we store the jobs (.sh-files), and we give | |
# it the config-file we created earlier. | |
bench = Benchmark( | |
headers="#PBS -lnodes=1:cpu3\n#PBS -lwalltime=15:00:00", | |
python_interpreter="/home/jhoof/python/python36/bin/python3", # Path to interpreter |
import openml | |
from arbok.bench import Benchmark | |
# We create a benchmark setup where we specify the headers, the interpreter we | |
# want to use, the directory to where we store the jobs (.sh-files), and we give | |
# it the config-file we created earlier. | |
bench = Benchmark( | |
headers="#PBS -lnodes=1:cpu3\n#PBS -lwalltime=15:00:00", |
import openml | |
from arbok.bench import Benchmark | |
# We create a benchmark setup where we specify the headers, the interpreter we | |
# want to use, the directory to where we store the jobs (.sh-files), and we give | |
# it the config-file we created earlier. | |
bench = Benchmark( | |
headers="#PBS -lnodes=1:cpu3\n#PBS -lwalltime=15:00:00", |
# /home/jhoof/python/python36/bin/python3 bench.py | |
import openml | |
from arbok.bench import Benchmark | |
# We create a benchmark setup where we specify the headers, the interpreter we | |
# want to use, the directory to where we store the jobs (.sh-files), and we give | |
# it the config-file we created earlier. |
from tqdm import tqdm | |
import string | |
import json | |
import collections | |
import numpy as np | |
import os | |
DATA_PATH = "data" | |
################################################################################################ |
from words import words | |
minionese_to_english = {v: k for k, v in words.items()} | |
def translate(sentence, minionese=False): | |
dictionary = words if not minionese else minionese_to_english | |
result = "" | |
for word in sentence.split(" "): | |
if word in dictionary.keys(): |
from words import words | |
minionese_to_english = {v: k for k, v in words.items()} | |
def translate(sentence, minionese=False): | |
dictionary = words if not minionese else minionese_to_english | |
result = "" | |
for word in sentence.split(" "): | |
if word in dictionary.keys(): |
function randomIndex(list) { | |
return Math.floor(Math.random() * list.length) - 1 | |
} | |
function triggerUpdate(el) { | |
el.click() | |
el.dispatchEvent(new Event('change')); | |
} | |
function fillRandom() { |
import pymysql.cursors | |
connection = pymysql.connect(host='localhost', user='slayer', password='', db='rumor_slayer', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor) | |
with connection.cursor() as cursor: | |
sql = "SELECT text FROM tweets" | |
cursor.execute(sql) | |
result = cursor.fetchall() | |
for tweet in result: |