Here are some helpful commands for managing ZFS and ZPool on Ubuntu
Useful for populating /etc/zfs/vdev_id.conf
:
printDisks() {
for i in /dev/sd[b-i]; do
fdisk -l $i
def df_to_sql_fast(df, table_name, numeric_columns, date_columns, append_or_replace, conn): | |
""" | |
Appends or overwrites a SQL Server table | |
using data from a Pandas DataFrame. | |
Submits df records at once for faster performance | |
compared to df_to_sql. | |
Parameters: | |
df (DataFrame): df used to create/append table | |
table_name (str): Name of existing SQL Server table |
SELECT tbl::TEXT, HASH(tbl::TEXT), MD5(tbl::TEXT) FROM tbl; | |
D create table tbl as (select 1 as a, 2 as b, 3 as c); | |
D select tbl::text, hash(tbl::text), md5(tbl::text) from tbl; | |
┌──────────────────────────┬────────────────────────────┬──────────────────────────────────┐ | |
│ CAST(tbl AS VARCHAR) │ hash(CAST(tbl AS VARCHAR)) │ md5(CAST(tbl AS VARCHAR)) │ | |
│ varchar │ uint64 │ varchar │ | |
├──────────────────────────┼────────────────────────────┼──────────────────────────────────┤ | |
│ {'a': 1, 'b': 2, 'c': 3} │ 6764392534128998287 │ e31681d6e7ab078c9679fcd4f50136eb │ | |
└──────────────────────────┴────────────────────────────┴──────────────────────────────────┘ |
Windows Registry Editor Version 5.00 | |
[HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows\CurrentVersion\App Paths\npp.exe] | |
@="C:\\Program Files (x86)\\Notepad++\\notepad++.exe" |
# View diffs using delta | |
# Via https://github.com/jonas/tig/issues/26#issuecomment-1923835137 | |
bind diff D >sh -c "git show %(commit) | delta --paging always" | |
bind diff S >sh -c "git show %(commit) | delta --paging always --side-by-side" | |
bind stage D >sh -c "git diff HEAD -- %(file) | delta --paging always" | |
bind stage S >sh -c "git diff HEAD -- %(file) | delta --paging always --side-by-side" | |
bind status D >sh -c "git diff HEAD -- %(file) | delta --paging always" | |
bind status S >sh -c "git diff HEAD -- %(file) | delta --paging always --side-by-side" |
set-option -g status-right '#{client_tty} (#(TZ=US/Mountain date +%%H:%%M)MT #(TZ=UTC date +%%H:%%M)Z) %Y-%m-%d %H:%M' |
# To configure for MinGW instead of nmake | |
# | |
# C:\TDM-GCC-64\mingwvars.bat | |
# cmake . -G "MinGw Makefiles" | |
# | |
# Makefile: cmake ... -DCMAKE_TOOLCHAIN_FILE=TDM.cmake | |
set(CMAKE_SYSTEM_NAME Windows) | |
set(CMAKE_C_COMPILER C:/TDM-GCC-64/bin/gcc.exe) |
[cmdletbinding()] | |
Param( | |
[Parameter(Mandatory=$true)] | |
[string[]]$servers, | |
[ValidateScript({ | |
if(-Not ($_ | Test-Path )) { | |
throw "Folder does not exist" | |
} | |
return $true |
# Thrift is not finding Delta jars in Ivy2 cache, even when specified in spark-defaults.conf (spark.sql.catalog.spark_catalog) ? | |
# Probably a bad solution... | |
wget https://repo1.maven.org/maven2/io/delta/delta-core_2.12/2.4.0/delta-core_2.12-2.4.0.jar && \ | |
mv delta-core_2.12-2.4.0.jar jars/ | |
wget https://repo1.maven.org/maven2/io/delta/delta-storage/2.4.0/delta-storage-2.4.0.jar && \ | |
mv delta-storage-2.4.0.jar jars/ |
#!/bin/bash | |
# build/build-spark-pip.sh | |
# https://spark.apache.org/docs/3.4.1/building-spark.html | |
export MAVEN_OPTS="-Xss64m -Xmx2g -XX:ReservedCodeCacheSize=1g" | |
#./build/mvn -DskipTests clean package | |
pushd .. |