This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def format_sequence(sequence: str): | |
"""Format a DNA sequence as genbank file""" | |
counter = 1 | |
print(f"{counter}".rjust(4, " "), end=" ") | |
for a,b in zip(range(0,len(sequence)+1, 10), range(10, len(sequence)+1, 10)): | |
print(sequence[a:b], end="") | |
if ((counter % 6) == 0) and (b < (len(sequence)-1)): | |
print("") | |
print(f'{((counter*10) + 1)}'.rjust(4, " "), end=" ") | |
else: |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
cat /proc/cpuinfo | grep MHz | cut -d":" -f2 | tr -d " " | tr "\n" "," | sed 's/[^0-9\.,]//g' | rev | cut -c2- | rev | (echo -n "cpu," && cat) | termgraph |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from pyspark.sql import SparkSession | |
from pyspark import SparkConf | |
conf = ( | |
SparkConf() | |
.setAppName("Connect AWS") | |
.setMaster("local[*]") | |
) | |
conf.set("spark.jars.packages","org.apache.hadoop:hadoop-aws:3.3.2") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
sed -n '/^\^SAMPLE/,/sample_table_end/p' GSE27219_family.soft > out2.txt |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
series = [] | |
d = {} | |
for row in raw: | |
try: | |
if len(row) == 0: | |
continue | |
if row.startswith("!sample_table_end"): | |
for k in arr: | |
if k in d: | |
d[k] = [d[k]] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import asyncio | |
from random import randint | |
async def download(code): | |
wait_time = randint(1, 3) | |
print('downloading {} will take {} second(s)'.format(code, wait_time)) | |
await asyncio.sleep(wait_time) # I/O, context will switch to main function | |
print('downloaded {}'.format(code)) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -days 365 -nodes |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from IPython.core.display import HTML | |
display(HTML("<style>pre { white-space: pre !important; }</style>")) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
df.select( | |
F.array( | |
F.create_map( | |
F.lit("k1"), F.col("c1"), F.lit("k2"), F.col("c2"), F.lit("k3"), F.col("c3") | |
) | |
).alias("losing_bids") | |
).select( | |
F.transform( | |
"losing_bids", | |
lambda m: F.transform_values(m, lambda k, v: v.isNull().cast("integer")), |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import pyspark.sql.functions as F | |
from pyspark.sql import DataFrame | |
from pyspark.sql import Window as W | |
from pyspark.sql.window import WindowSpec | |
__all__ = ["forward_fill"] | |
def _window_all_previous_rows(partition, order) -> WindowSpec: | |
"""Select the window on which values are filled in a forward manner.""" |
NewerOlder