Used to investigate a segfault in a Python C extension within a Python package.
-
Install debug Python build using pyenv then check is available for use:
pyenv install -g 3.8.5 pyenv versions
import os | |
from typing import Dict | |
import numpy as np | |
def export_data(data: Dict[np.ndarray], | |
filename: str, | |
cols_per_line: int = 8) -> None: | |
"""Export dictionary of 1d float arrays to a file. | |
#!/usr/bin/tclsh | |
set sge_root $env(SGE_ROOT) | |
source "$sge_root/util/resources/jsv/jsv_include.tcl" | |
proc jsv_on_start {} { | |
jsv_send_env | |
} | |
proc lexists name { |
import tempfile | |
import os | |
import time | |
import paramiko | |
# Hostname of a 'submission host' in the Grid Engine cluster we want to submit a job to | |
hostname = 'sharc.shef.ac.uk' | |
# Username for logging in to the cluster via SSH | |
username = "te1st" |
#!/bin/bash | |
# Will Furnass | |
# Oct 2017 | |
if [[ $# -lt 1 ]]; then | |
echo 1>&2 "Extract list of user email addresses from Synology DSS '.dss' config dump" | |
exit 1 | |
fi | |
dss_path="$1" |
Initialise a git repository:
cd somedir
git init
Install git subrepo if you haven't done so already:
https://github.com/ingydotnet/git-subrepo#installation
The following work with Son of Grid Engine (SGE) 8.1.9 as configured on the University of Sheffield's ShARC and Iceberg clusters.
You can use the -hold_jid <<job-name or job-name>>
option to make jobs run
only when other jobs have finished, rather than having jobs start and sit
waiting for other tasks to complete.
Default prefix: <ctrl>b
tmux new -s session_name
- creates a new tmux session named session_name
tmux a
- attach to first available sessiontmux attach -t session_name
- attaches to an existing tmux session named session_name
tmux switch -t session_name
- switches to an existing session named session_name
#!/bin/sh | |
# David Jones and Will Furnass (University of Sheffield) | |
# `qrsh` is often the most convenient utility for starting interactive shell | |
# sessions on Sun/Son of Grid Engine (SGE) clusters but it has some limitations (as | |
# configured on the ShARC/Iceberg clusters): | |
# | |
# - none of the standard SGE environment vars (e.g. JOB_ID, PE_HOSTFILE, | |
# NSLOTS) that are defined in qsub/qsh batch/interactive sessions are set | |
# (annoying but not the end of the world); |
#!/usr/bin/env python | |
from __future__ import print_function | |
from ruffus import transform, suffix, pipeline_run | |
# A test of using Ruffus to locally run a very simple pipeline comprised of | |
# tasks defined as Python functions | |
# Can run this from a conda environnment created using | |
# conda create -n drmaatest -c bioconda python=2.7 ruffus drmaa |