Skip to content

Instantly share code, notes, and snippets.

@jrson83
Forked from TClark1011/Action.ts
Created March 2, 2023 23:21
Show Gist options
  • Save jrson83/b181268b3162be16006d2cc5238f789f to your computer and use it in GitHub Desktop.
Save jrson83/b181268b3162be16006d2cc5238f789f to your computer and use it in GitHub Desktop.
TS - Helpful Utilities
// branded types to allow for better type inference
// with default generic types
/* eslint-disable @typescript-eslint/naming-convention */
type NO_PAYLOAD = {
JgJES6BF8uyaOwF1: "FY7eBhPYJlqOxuVp";
};
type OPTIONAL_PAYLOAD = {
A7nWdXs0r5RLuHRf: "zPcrRNRIl4r5IHbA";
};
/* eslint-enable @typescript-eslint/naming-convention */
// Wrap your payload type in this if you want it to be optionalß
export type OptionalPayload<Payload> = Payload & OPTIONAL_PAYLOAD;
type ExtractOptionalPayloadType<Payload> = Payload extends OptionalPayload<
infer Type
>
? Type
: Payload;
// If only type is provided, there will be no payload field.
// Wrap payload with `OptionalPayload` generic type to make
// it optional
export type Action<
Type extends string,
Payload = NO_PAYLOAD,
> = Payload extends NO_PAYLOAD
? {
type: Type;
}
: Payload extends OPTIONAL_PAYLOAD
? {
type: Type;
payload?: ExtractOptionalPayloadType<Payload>;
}
: Required<{
type: Type;
payload: ExtractOptionalPayloadType<Payload>;
}>;
type EventListenerAdderWithRemover = <
EventName extends keyof (HTMLElementEventMap | DocumentEventMap),
>(
element: HTMLElement | Document | undefined,
type: EventName,
callback: (e: (HTMLElementEventMap | DocumentEventMap)[EventName]) => void,
options?: AddEventListenerOptions | boolean,
) => () => void;
// Adds an event listener to an element and returns a callback
// to remove that event listener
export const addEventListenerWithRemover: EventListenerAdderWithRemover = (
element,
eventName,
callback,
options,
) => {
element?.addEventListener(eventName, callback as never, options);
return () =>
element?.removeEventListener(eventName, callback as never, options);
};
export const getArrayMaxIndex = (arr: unknown[]): number => arr.length - 1;
export const getMiddleIndexOfArray = (arr: unknown[]): number =>
Math.floor(getArrayMaxIndex(arr) / 2);
export const getMiddleItemFromArray = <Item>(arr: Item[]): Item | undefined => {
const middleIndex = getMiddleIndexOfArray(arr);
if (middleIndex < 0) return undefined;
const middleItem = arr[middleIndex];
return middleItem;
};
type Comparator<Item> = (a: Item, b: Item) => boolean;
const shallowEqual: Comparator<unknown> = (a, b) => a === b;
export const toggleArrayElement = <Item>(
arr: Item[],
target: Item,
compare: Comparator<Item> = shallowEqual
): Item[] => {
const isEqualToTarget = (i: Item) => compare(i, target);
const existingItem = arr.find(isEqualToTarget);
if (!existingItem) return [...arr, target];
const withoutTarget = arr.filter((item) => !isEqualToTarget(item));
return withoutTarget;
};
const randomizedComparison = () => {
const floatBetweenZeroAndTwo = Math.random() * 2;
const integerBetweenZeroAndTwo = Math.round(floatBetweenZeroAndTwo);
const integerBetweenNegativeOneAndOne = integerBetweenZeroAndTwo - 1;
return integerBetweenNegativeOneAndOne;
};
export const shuffleArray = <Item>(arr: Item[]): Item[] =>
[...arr].sort(randomizedComparison);
export const getRandomArrayItem = <Item>(arr: Item[]): Item | undefined => {
const shuffled = shuffleArray(arr);
const randomItem = shuffled[0];
return randomItem;
};
// Takes a number and wraps it around if it goes beneath
// a specified minimum or over a specified maximum.
const clampWithWrap = (value: number, min: number, max: number): number => {
const howFarOverMax = value - max;
const howFarUnderMin = min - value;
if (value > max) {
return clampWithWrap(min + howFarOverMax - 1, min, max);
}
if (value < min) {
return clampWithWrap(max - howFarUnderMin + 1, min, max);
}
return value;
};
export default clampWithWrap;

CLI Caching

To setup simple caching in a CLI app, use the pcackages flat-cache and find-cache-dir.

const APP_NAME = "your-app-name-here";
const cacheDirectory = findCacheDir({ name: APP_NAME });
if (!cacheDirectory) throw new Error("Cache directory not found");

const cache = cacheManager.load(APP_NAME, cacheDirectory);

cache.get("your-key-here");
cache.set("your-key-here", "your-value-here");

cache.save();
const coerceIntoArray = <Item>(val: Item | Item[]): Item[] =>
Array.isArray(val) ? val : [val];
export default coerceIntoArray;
/**
* Allows you to apply color to a message within `console.log`
*
* The below example will log the message "Hello World" in
* green.
*
* @example
* console.log(...colorConsoleMessage('Hello World', 'green'))
*/
const colorConsoleMessage = (
message: string,
color: string
): [string, string] => [`%c${message}`, `color: ${color};`];
export default colorConsoleMessage;
const composeDummyImageSrc = (width: number, height = width) =>
`https://picsum.photos/${width}/${height}?key=${Math.random()}`;
export default composeDummyImageSrc;
const composeQuantityStatement = (
items: any[] | readonly any[] | number,
label: string,
pluralLabel?: string
) => {
const quantity = Array.isArray(items) ? items.length : items;
const finalPluralLabel = pluralLabel ?? `${label}s`;
const finalLabel = quantity === 1 ? label : finalPluralLabel;
return `${quantity} ${finalLabel}`;
};
export default composeQuantityStatement;
// Can be used to add a prop to an object you are
// defining if the value for that prop is not falsy
const conditionalProp = <Value, Key extends string>(
value: Value,
key: Key,
): Record<Key, Value> | {} => (value ? { [key]: value } : {});
//Example usage:
const folder = {
name: "Documents",
files: folderFiles,
...conditionalProp(folderFiles.length, "fileCount"),
// "fileCount" will be added to the object with
// a values equal to `folderFiles.length` only if
// `folderFiles.length` is greater than 0
};
export default conditionalProp;
//Converts a css style object (like those seen in
// React inline styles) to a css string.
//
// NOTE: this does not convert multi-word properties
// to kebab case, you will need to do that yourself
// if your object has multi-word properties.
const convertStyleObjectToString = (
obj: Record<string, string | number>,
): string =>
Object.entries(obj)
.map(([key, value]) => `${key}: ${value}`)
.join(";");
export default convertStyleObjectToString;
import { MouseEvent, MouseEventHandler } from "react";
type Coordinates = {
x: number;
y: number;
};
const deepCopy = <T>(data: T): T => JSON.parse(JSON.stringify(data));
export const fireClickRipple = (clickEvent: MouseEvent) => {
const element = clickEvent.currentTarget as HTMLElement;
const rect = element.getBoundingClientRect();
const cursorCoordinates: Coordinates = {
x: clickEvent.clientX,
y: clickEvent.clientY,
};
const elementCoordinates: Coordinates = {
x: rect.left,
y: rect.top,
};
const coordinatesRelativeToElement = {
x: cursorCoordinates.x - elementCoordinates.x,
y: cursorCoordinates.y - elementCoordinates.y,
};
const currentPositionValue = window.getComputedStyle(element).position;
const originalElementStyle = deepCopy(element.style);
if (currentPositionValue === "static" || currentPositionValue === "") {
// If the element is static, we need to set it to relative
element.style.position = "relative";
}
element.style.overflow = "hidden";
const rippleDiv = document.createElement("div");
rippleDiv.style.position = "absolute";
const dimensionMultiplier = 2;
const width = dimensionMultiplier * rect.width;
const height = dimensionMultiplier * rect.height;
const finalDimension = Math.max(height, width);
// rippleDiv.style.left = `${coordinatesRelativeToElement.x - width / 2}px`;
rippleDiv.style.left = `${
coordinatesRelativeToElement.x - finalDimension / 2
}px`;
rippleDiv.style.top = `${
coordinatesRelativeToElement.y - finalDimension / 2
}px`;
rippleDiv.style.width = `${finalDimension}px`;
rippleDiv.style.height = `${finalDimension}px`;
rippleDiv.style.borderRadius = "50%";
rippleDiv.style.background = "black";
element.appendChild(rippleDiv);
const duration = 500;
rippleDiv.animate(
[
{
transform: "scale(0) ",
opacity: 0.5,
},
{
transform: "scale(1)",
offset: 0.7,
},
{
opacity: 0,
offset: 1,
},
],
{
duration,
easing: "linear",
},
);
setTimeout(() => {
rippleDiv.remove();
element.style.overflow = originalElementStyle.overflow;
element.style.position = originalElementStyle.position;
}, duration);
};
export const withRipple =
(extraHandler: MouseEventHandler | undefined): MouseEventHandler =>
(event) => {
fireClickRipple(event);
extraHandler?.(event);
};
/* eslint-disable @typescript-eslint/naming-convention */
type INTERNAL_Opaque<BaseType, Token extends string> = BaseType & {
____________________: Token;
};
type INTERNAL_PseudoSymbol<Token extends string> = INTERNAL_Opaque<
Token,
Token
>;
export type Page = INTERNAL_PseudoSymbol<'page'>;
// export type Route = RouteEnd | Record<string, Route>;
type SAMPLE_RouteMap = {
// $ `Page` means its a valid route
about: Page;
// $ `Page & {...}` means its a valid route, and it has
// $ children
posts: Page & {
'[slug]': Page;
new: Page;
};
// $ `auth` does not use `Page`, meaning that `/auth` on
// $ its own is not a valid route.
auth: {
// $ `auth` children use `Page`, so while `/auth` is
// $ not a valid route, `/auth/signIn` and
// $ `/auth/signUp` are valid routes
signIn: Page;
signUp: Page;
};
profile: Page & {
'[slug]': Page & {
edit: Page;
};
};
};
// What a route path type generated from the above type should look like
type SAMPLE_RouteMap_Result =
| '/'
| '/about'
| '/posts'
| '/posts/[slug]'
| '/posts/new'
| '/auth/signIn'
| '/auth/signUp'
| '/profile'
| '/profile/[slug]'
| '/profile/[slug]/edit';
type IsPage<T> = T extends Page ? true : false;
type A = IsPage<SAMPLE_RouteMap['about']>
type B = IsPage<SAMPLE_RouteMap['posts']>
type C = IsPage<SAMPLE_RouteMap['auth']>

ESLint Setup for Typescript

Get Started

yarn create @eslint/config

Select the following options:

  • To check syntax and find problems
  • Make sure Node is checked for the environment prompt

Custom Alterations

Add the the following to the rules of the eslint config

{
	rules: {
		quotes: [
			"error",
			"single",
      {
        avoidEscape: true,
      },
		],
		'@typescript-eslint/no-explicit-any': 'off',
	}
}

Prettier

Run the following:

yarn add --dev eslint-plugin-prettier prettier

Create a .prettierrc file in the project root:

{
	"trailingComma": "es5",
	"tabWidth": 4,
	"semi": true,
	"singleQuote": true
}

Add the following to your config:

{
	plugins: ['prettier'],
	rules: {
		'prettier/prettier': 'error'
	}
}

Set your default VSCode formatter as eslint

Add the following to your package.json

{
	"scripts": {
		"lint": "eslint --fix",
		"format": "prettier --write ./src"
	}
}

JSDocs

I like to have eslint require for JSDocs to be written for most complex variables (and Typescript types) that are exported from their file.

Install:

yarn add --dev eslint-plugin-jsdoc

Config:

{
	extends: ["plugin:jsdoc/recommended"],
	plugins: ["jsdoc"],
	rules: {
	 "jsdoc/require-jsdoc": [
			"error",
			{
				publicOnly: true,
				require: {
					ArrowFunctionExpression: true,
					ClassDeclaration: true,
					ClassExpression: true,
					FunctionDeclaration: true,
					FunctionExpression: true,
					MethodDefinition: true,
				},
				contexts: [
					"ArrowFunctionExpression",
					"ClassDeclaration",
					"ClassExpression",
					"ClassProperty",
					"FunctionDeclaration",
					"FunctionExpression",
					"MethodDefinition",
					"TSMethodSignature",
				],
			},
		],
		"jsdoc/require-param-type": "off",
		"jsdoc/require-returns-type": "off",
		"jsdoc/require-returns": "off",
		"jsdoc/require-param": ["error", { checkRestProperty: false }],
		"jsdoc/check-param-names": [
			"error" | "warn",
			{
				checkRestProperty: false,
			},
		],
		"jsdoc/no-types": "error",
	}
}
const expectParam =
<Param>() =>
(param: Param) =>
param;
export default expectParam;
const isBasicError = (val: any): val is Record<"message", string> =>
typeof val === "object" &&
"message" in val &&
typeof val?.message === "string";
export const extractErrorMessage = (errorObject: unknown): string => {
// If the error object is a standard error, return the message
if (isBasicError(errorObject)) return errorObject.message;
// If the error object is a string, return it
if (typeof errorObject === "string") return errorObject;
// Last resort, if we are unable to reasonably derive
// an error message from the object, we just stringify
// the whole thing and return that
return JSON.stringify(errorObject);
};
/**
* Finds the element in the array that gets the highest
* score. Function to derive score is provided.
*/
const findBest = <Element>(
arr: Element[],
deriveScore: (p: Element) => number,
): Element | undefined =>
arr.reduce((bestScoringElement, currentElement) => {
const currentScore = deriveScore(currentElement);
const bestScore = deriveScore(bestScoringElement);
return currentScore > bestScore ? currentElement : bestScoringElement;
});
export default findBest;
/**
* A "safe" version of the array "find" method that
* throws an error if no value is found. This means
* that the return value of the function is guaranteed
* and does not need to be checked for null or
* undefined.
*
* @param arr The array to search
* @param predicate The function used to find the
* value
* @param [errorMessage] The message to include in the
* error that is thrown if no value is found. Defaults
* to "not found"
* @returns The first value in the array that returns
* `true` when passed to the predicate function. If
* no item in the array returns `true` from the
* predicate, an error is thrown.
*/
const findOrThrow = <T>(arr: T[], predicate: (p: T) => boolean): T => {
const result = arr.find(predicate);
if (!result) throw Error("Item not found");
return result;
};
export const _findOrThrow =
<T>(predicate: (p: T) => boolean, errorMessage: string = "not found") =>
(arr: T[]) =>
findOrThrow(arr, predicate, errorMessage);
export default findOrThrow;
/**
* USAGE: import this file at the top of your app's root. From then on any calls to
* `JSON.parse` that are not passed a custom reviver will fix any dates that were
* stringified by JSON.stringify.
*/
const isoDatePattern = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}[.Z:+\-\d]*?$/;
const dotnetDatePattern = /^\/Date\((\d+)([+\-\d]*)\)\/$/;
const baseJSONParse = JSON.parse;
JSON.parse = (text, customReviver) =>
baseJSONParse(text, (key, value) => {
if (customReviver) return customReviver(key, value);
if (typeof value === "string" && isoDatePattern.test(value))
return new Date(value);
const dotnetResult = dotnetDatePattern.exec(value);
if (dotnetResult)
return new Date(
dotnetResult.slice(1, 3).reduce((a, x) => a + (parseInt(x) || 0), 0),
);
return value;
});
if (window?.Response)
Response.prototype.json = async function () {
return JSON.parse(await this.text());
};
/**
* Take an array of strings and join them so that they can
* be used in a `font-family` css style declaration.
*
* The idea is to provide a more declarative and composable
* way to apply font family styles.
*/
const fontFamilyRule = (families: string[]): string => {
const formatted = families.map((familyName) =>
familyName.includes(" ") ? `"${familyName}"` : familyName,
);
const joined = formatted.join(", ");
return joined;
};
export default fontFamilyRule;
const getMaxIndex = (arr: unknown[]) => arr.length - 1;
const forEachBackAndForth = <Element>(
arr: Element[],
iterator: (value: Element, index: number, array: Element[]) => void,
): void => {
if (arr.length === 0) return;
const iterations = getMaxIndex(arr) * 2;
for (let i = 0; i <= iterations; i += 1) {
const index = i > getMaxIndex(arr) ? iterations - i : i;
iterator(arr[index], index, arr);
}
};
export default forEachBackAndForth;
const generateArray = <Element>(
length: number,
generator: (i: number) => Element,
): Element[] => [...Array(length)].map((_, index) => generator(index));
export default generateArray;
import { F } from "@mobily/ts-belt";
export type ArrayDifferenceType = "added" | "removed" | "updated";
export type ArrayDifference<T> = {
type: ArrayDifferenceType;
data: T;
};
type BatchedArrayDifferences<T> = {
type: ArrayDifferenceType;
data: T[];
};
/**
* @param baseArr
* @param newArr
* @param idDeriver
*/
export const getArrayDifference = <T>(
baseArr: T[],
newArr: T[],
idDeriver: (item: T) => string | number
) => {
const addedItems: BatchedArrayDifferences<T> = {
type: "added",
data: newArr.filter(
(item) => !baseArr.some((i) => idDeriver(i) === idDeriver(item))
),
};
const removedItems: BatchedArrayDifferences<T> = {
type: "removed",
data: baseArr.filter(
(item) => !newArr.some((i) => idDeriver(i) === idDeriver(item))
),
};
const changedItems: BatchedArrayDifferences<T> = {
type: "updated",
data: newArr.filter((item) => {
const equivalentFromBase = baseArr.find(
(baseItem) => idDeriver(baseItem) === idDeriver(item)
);
if (!equivalentFromBase) return false;
return !F.equals(equivalentFromBase, item);
}),
};
const differences: ArrayDifference<T>[] = [
addedItems,
removedItems,
changedItems,
]
.map((differenceSet) =>
differenceSet.data.map((differenceEntity) => ({
type: differenceSet.type,
data: differenceEntity,
}))
)
.flat();
return differences;
};
export type GetStringDiffScoreOptions = {
caseSensitive: boolean;
};
// Uses levenshtein distance to calculate a numerical
// score indicating how different 2 strings are
//
// CREDIT: This code was largely generated by Copilot,
// although I did clean it up a bit
const getStringDiffScore = (
a: string,
b: string,
{ caseSensitive = true }: Partial<GetStringDiffScoreOptions> = {},
): number => {
const finalA = caseSensitive ? a : a.toLowerCase();
const finalB = caseSensitive ? b : b.toLowerCase();
// If one of strings is empty, the difference score
// is just the length of the other string
if (!finalA.length) return finalB.length;
if (!finalB.length) return finalA.length;
const matrix: number[][] = [];
// increment along the first column of each row
for (let row = 0; row <= finalB.length; row += 1) {
matrix[row] = [row];
}
// increment each column in the first row
for (let col = 0; col <= finalA.length; col += 1) {
matrix[0][col] = col;
}
// Fill in the rest of the matrix
for (let row = 1; row <= finalB.length; row += 1) {
for (let col = 1; col <= finalA.length; col += 1) {
if (finalB.charAt(row - 1) === finalA.charAt(col - 1)) {
matrix[row][col] = matrix[row - 1][col - 1];
} else {
matrix[row][col] = Math.min(
matrix[row - 1][col - 1] + 1, // substitution
Math.min(
matrix[row][col - 1] + 1, // insertion
matrix[row - 1][col] + 1,
),
); // deletion
}
}
}
return matrix[finalB.length][finalA.length];
};
export default getStringDiffScore;
const hasCollision = <T>(arr: T[], collisionChecker: (a: T, b: T) => boolean) =>
arr.some((el, index, baseArr) => {
const itemsToCompare = baseArr.slice(index + 1);
return itemsToCompare.some((otherEl) => collisionChecker(el, otherEl));
});
export default hasCollision;

Add the following to your package.json scripts:

{
	"check-types": "tsc --noEmit"
}
/**
* Check if a key exists in an object. This works
* identically to the "in" keyword built in to JS,
* but also works as a typeguard to provide type
* information to typescript, which the "in" keyword
* does not do (at least in the current version of
* typescript, 4.6.4)
*
* @param key - The key to check for
* @param obj - The object within which to look for
* the key
* @returns Whether or not the key exists in 'obj'.
* In typescript, if the function returns true, then
* 'obj' is typed as `Record<Key, unknown>`, with
* `Key` being the key that was checked for.
*/
const keyIsInObject = <Key extends string>(
key: Key,
obj: any,
): obj is Record<Key, unknown> => key in obj;
export default keyIsInObject;
type ConsoleLevel = 'warning' | 'error' | 'info' | 'debug';
type PendingConsoleLog = {
params: any[];
level: ConsoleLevel;
firedAt: Date;
};
type CustomConsoleConstructorOptions = {
defaultLevel: ConsoleLevel;
labels: string[];
collectingTimeout: number;
};
type CustomConsoleExtensionNewLabelHandlingMode = 'append' | 'prepend' | 'override';
type CustomConsoleExtensionOptions = CustomConsoleConstructorOptions & {
handleNewLabels: CustomConsoleExtensionNewLabelHandlingMode;
modifyLabels: (labels: string[]) => string[];
};
const defaultCustomConsoleOptions: CustomConsoleConstructorOptions = {
labels: [],
defaultLevel: 'info',
collectingTimeout: 3000
};
const defaultExtensionOptions: Omit<
CustomConsoleExtensionOptions,
keyof CustomConsoleConstructorOptions
> = {
handleNewLabels: 'append',
modifyLabels: (labels) => labels
};
const composeLabelPrefix = (labels: string[]) => {
const combinedLabels = labels.join(', ');
const prefix = labels.length ? `(${combinedLabels})` : '';
return prefix;
};
const composeLogParams = (labels: string[], messages: any[]): [string, ...any[]] => {
const prefix = composeLabelPrefix(labels);
return [`${prefix}`, ...messages];
};
class CustomConsole {
labels: string[];
defaultLevel: ConsoleLevel;
private pendingLogs: PendingConsoleLog[] = [];
collectingTimeoutDuration: number;
private _isCollecting = false;
private groupLabel: string | undefined = undefined;
disabled = false;
private collectingTimeout: ReturnType<typeof setTimeout> | undefined = undefined;
constructor(userOptions: Partial<CustomConsoleConstructorOptions> = {}) {
const options: CustomConsoleConstructorOptions = {
...defaultCustomConsoleOptions,
...userOptions
};
this.labels = options.labels;
this.defaultLevel = options.defaultLevel;
this.collectingTimeoutDuration = options.collectingTimeout;
}
private composeNewLabels(
newLabels: string[],
mode: CustomConsoleExtensionNewLabelHandlingMode
): string[] {
switch (mode) {
case 'prepend':
return [...newLabels, ...this.labels];
case 'override':
return newLabels;
default:
// 'append'
return [...this.labels, ...newLabels];
}
}
get isCollecting() {
return this._isCollecting;
}
extend(extraOptions: Partial<CustomConsoleExtensionOptions> = {}): CustomConsole {
const currentOptions: CustomConsoleConstructorOptions = {
defaultLevel: this.defaultLevel,
labels: this.labels,
collectingTimeout: this.collectingTimeoutDuration
};
const extensionOptions: CustomConsoleExtensionOptions = {
...currentOptions,
...defaultExtensionOptions,
...extraOptions
};
const withExtraLabels = this.composeNewLabels(
extensionOptions.labels,
extensionOptions.handleNewLabels
);
const newLabels = extensionOptions.modifyLabels(withExtraLabels);
const newConsole = new CustomConsole({
labels: newLabels,
defaultLevel: extensionOptions.defaultLevel
});
return newConsole;
}
private clearCollectingTimeout() {
if (this.collectingTimeout) {
clearTimeout(this.collectingTimeout);
}
}
private startCollectingTimeout() {
this.collectingTimeout = setTimeout(
() => this.stopCollecting(),
this.collectingTimeoutDuration
);
}
private resetCollectingTimeoutCountdown() {
this.clearCollectingTimeout();
this.startCollectingTimeout();
}
logToLevel(level: ConsoleLevel, ...logParams: any[]) {
if (this.disabled) return undefined;
const finalLogParams = composeLogParams(this.labels, logParams);
if (!this._isCollecting) {
switch (level) {
case 'warning':
console.warn(...finalLogParams);
break;
case 'error':
console.error(...finalLogParams);
break;
case 'info':
console.info(...finalLogParams);
break;
case 'debug':
console.debug(...finalLogParams);
break;
default:
console.log(...finalLogParams);
break;
}
} else {
// If it is collecting...
this.pendingLogs.push({
level,
params: logParams,
firedAt: new Date()
});
this.resetCollectingTimeoutCountdown();
}
}
executePendingLogs() {
const prefix = composeLabelPrefix(this.labels);
const prefixes = [prefix, this.groupLabel].filter(
(label) => typeof label === 'string'
) as string[];
console.group(prefixes.join(' '));
this.pendingLogs.forEach(({ level, params }) => {
this.logToLevel(level, ...params);
});
console.groupEnd();
}
startCollecting(groupLabel?: string, timeoutDuration = this.collectingTimeoutDuration) {
this.collectingTimeoutDuration = timeoutDuration;
this._isCollecting = true;
this.groupLabel = groupLabel;
}
stopCollecting() {
this._isCollecting = false;
this.executePendingLogs();
this.pendingLogs = [];
}
log(...params: any[]) {
this.logToLevel(this.defaultLevel, ...params);
}
info(...params: any[]) {
this.logToLevel('info', ...params);
}
debug(...params: any[]) {
this.logToLevel('debug', ...params);
}
warning(...params: any[]) {
this.logToLevel('warning', ...params);
}
error(...params: any[]) {
this.logToLevel('error', ...params);
}
}
export default CustomConsole;
/* ---------------------------------- */
/* Types */
/* ---------------------------------- */
/* ---------- Utility Types --------- */
type AnyDict = Record<string, any>;
type ValueOf<T extends AnyDict | any[]> = T[keyof T];
// This should probably be moved into a `utilityTypes`
// file
type DeepPartial<T> = T extends AnyDict
? {
[P in keyof T]?: DeepPartial<T[P]>;
}
: T;
// This should probably be moved into a `utilityTypes`
// file
/* ----------- Merge Types ---------- */
type ShallowMergeRecords<A extends AnyDict, B extends AnyDict> = B &
Omit<A, keyof B>;
type ShallowMerge<A, B> = A extends AnyDict
? B extends AnyDict
? ShallowMergeRecords<A, B>
: B
: B;
// Take the fields from two object types that are not shared
// between them
type ShallowDiff<A extends AnyDict, B extends AnyDict> = Omit<A, keyof B> &
Omit<B, keyof A>;
// Take the fields from two object types that are shared
// between them
export type ShallowOverlap<T extends AnyDict, U extends AnyDict> = Omit<
T | U,
keyof ShallowDiff<T, U>
>;
export type DeepMerge<T extends AnyDict, U extends AnyDict> = ShallowDiff<
T,
U
> & {
[OverlappingKey in keyof ShallowOverlap<T, U>]: [
T[OverlappingKey],
U[OverlappingKey]
] extends [AnyDict, AnyDict]
? ValueOf<T[OverlappingKey]> | ValueOf<U[OverlappingKey]> extends AnyDict // If either of the objects contain nested objects, // perform a deep merge
? DeepMerge<T[OverlappingKey], U[OverlappingKey]> // If neither of the objects have nested objects, // do a shallow merge
: ShallowMerge<T[OverlappingKey], U[OverlappingKey]>
: U;
};
/* ---------------------------------- */
/* Main Code */
/* ---------------------------------- */
/* -------- Utility Functions ------- */
const isObjectLike = (item: any) =>
typeof item === 'object' && item !== null && !(item instanceof Date);
const isPlainObject = (item: any): item is Record<string, any> =>
isObjectLike(item) && !Array.isArray(item);
const isDate = (item: unknown): item is Date => item instanceof Date;
const copyDate = (date: Date): Date => new Date(date.getTime());
// Creates a copy of an object by recreating all
// values within it, must be done to avoid mutating
// during the merge
const deepCopy = <T>(item: T): T => {
// If the object is primitive we just give it back
if (isDate(item)) {
return copyDate(item) as any as T;
}
if (!isObjectLike(item)) {
return item;
}
const result: AnyDict = {};
Object.entries(item as AnyDict).forEach(([key, value]) => {
if (isPlainObject(value)) {
result[key] = deepCopy(value);
} else {
result[key] = value;
}
});
return result as T;
};
/* ----------- Merge Code ----------- */
// NOTE: If your project already includes a merge
// function (eg; lodash's merge) you can use that
// as the untyped merge and adapt the `deepMerge`
// function to use that.
// Performs the actual merge, the 'deepMerge' function
// just runs this function and injects correct typing
const untypedMerge = (source: AnyDict, update: AnyDict): AnyDict => {
// Use the source as the starting point for the result
// We run source through `deepCopy` to prevent mutating it
const result: AnyDict = deepCopy(source);
Object.entries(update).forEach(([key, valueFromUpdate]) => {
const valueFromSource = source[key];
const bothValuesArePlainObjects = [valueFromSource, valueFromUpdate].every(
isPlainObject
);
if (bothValuesArePlainObjects) {
// Merge the values if they are both objects
result[key] = untypedMerge(valueFromSource, valueFromUpdate);
} else {
// If only 1 or neither of them are objects, replace
// with value from update
result[key] = valueFromUpdate;
}
});
return result;
};
// We have to use function keyword cus otherwise TS gives
// an error about having to infer the type of the variable.
// I guess we get around by declaring it with the `function`
// keyword
function deepMerge<Source extends AnyDict, Update extends AnyDict>(
source: Source,
update: Update
): DeepMerge<Source, Update> {
return untypedMerge(source, update) as DeepMerge<Source, Update>;
}
// This functions identically to the `deepMerge` function,
// but with the type of the second parameter being a deep
// partial version of the first, and the return type
// being the type of the first param. This is useful for
// providing an 'update' of an object without changing it's
// type, eg; in a redux reducer.
export function deepUpdate<Source extends AnyDict>(
source: Source,
update: DeepPartial<Source>
) {
return deepMerge(source, update) as any as Source;
}
// NOTE: When importing this into a project, try out using
// an arrow function for the `deepMerge` functions. The
// `function` keyword is used here because that was the
// only way to get it to work when this was initially written
// in a `tsdx` template project.
export default deepMerge;
import { nanoid as generateId } from "nanoid";
type SubscriberCallback<EventPayload> = (
payload: EventPayload,
eventId: string,
) => void | Promise<void>;
export type Subscriber<EventPayload> = {
callback: SubscriberCallback<EventPayload>;
id: string;
};
const composeSubscriber = <EventPayload>(
callback: SubscriberCallback<EventPayload>,
): Subscriber<EventPayload> => ({
callback,
id: generateId(),
});
class Emitter<EventPayload> {
private subscribers: Subscriber<EventPayload>[] = [];
constructor() {
this.subscribers = [];
}
private unsubscribe = (id: string): void => {
this.subscribers = this.subscribers.filter(
(subscriber) => subscriber.id !== id,
);
};
subscribe = (callback: SubscriberCallback<EventPayload>): (() => void) => {
const newSubscriber = composeSubscriber(callback);
this.subscribers.push(newSubscriber);
return () => this.unsubscribe(newSubscriber.id);
};
emit = (payload: EventPayload, eventId = generateId()): void => {
this.subscribers.forEach(({ callback }) => callback(payload, eventId));
};
}
export default Emitter;
type WithId = {
id: string;
};
type Array<T> = T[] | readonly T[];
/* ----- Filter Out Item With ID ---- */
export const filterOutItemWithId = <T extends WithId>(
arr: Array<T>,
id: string,
): Array<T> => arr.filter((item) => item.id !== id);
export const _filterOutItemWithId =
<T extends WithId>(id: string) =>
(arr: Array<T>): Array<T> =>
filterOutItemWithId(arr, id);
/* ------- Update Item With Id ------ */
export const updateItemWithId = <T extends WithId>(
arr: Array<T>,
id: string,
update: (p: T) => T,
): Array<T> =>
arr.map((item) => {
if (item.id !== id) {
return item;
}
return update(item);
});
export const _updateItemWithId =
<T extends WithId>(id: string, update: (p: T) => T) =>
(arr: Array<T>): Array<T> =>
updateItemWithId(arr, id, update);
/* -------- Find Item With Id ------- */
export const findItemWithId = <T extends WithId>(
arr: Array<T>,
id: string,
): T | undefined => arr.find((item) => item.id === id);
export const _findItemWithId =
<T extends WithId>(id: string) =>
(arr: Array<T>): T | undefined =>
findItemWithId(arr, id);
/* --- Find Essential Item With Id -- */
export const findEssentialItemWithId = <T extends WithId>(
arr: Array<T>,
id: string,
): T | undefined => {
const item = findItemWithId(arr, id);
if (!item) {
throw new Error(`Could not find item with id ${id}`);
}
return item;
}
export const _findEssentialItemWithId =
<T extends WithId>(id: string) =>
(arr: Array<T>): T | undefined =>
findEssentialItemWithId(arr, id);
/* -- Sort items by reference array - */
export const sortItemsByReferenceArray = <T extends WithId>(
items: Array<T>,
referenceArray: Array<string>,
): Array<T> =>
[...items].sort(
(a, b) => referenceArray.indexOf(a.id) - referenceArray.indexOf(b.id),
);
export const _sortItemsByReferenceArray =
<T extends WithId>(referenceArray: Array<string>) =>
(items: Array<T>): Array<T> =>
sortItemsByReferenceArray(items, referenceArray);
import { useReducer } from "react";
// This import is only used for the optional react-hook
// integration (go to the bottom of the file to see it)
// `B` prefix denotes a `branded` type
type BNoPayload = {
JgJES6BF8uyaOwF1: "FY7eBhPYJlqOxuVp";
};
type BOptionalPayload = {
A7nWdXs0r5RLuHRf: "zPcrRNRIl4r5IHbA";
};
// Wrap your payload type in this if you want it to be optional
export type OptionalPayload<Payload> = Payload & BOptionalPayload;
type ExtractOptionalPayloadType<PossiblyBrandedPayload> =
PossiblyBrandedPayload extends OptionalPayload<infer UnbrandedPayload>
? UnbrandedPayload
: PossiblyBrandedPayload; // We know this isn't branded if this conditional is true
// If only type is provided, there will be no payload field.
// Wrap payload with `OptionalPayload` generic type to make
// it optional
export type Action<
Type extends string,
Payload = BNoPayload,
> = Payload extends BNoPayload
? {
type: Type;
}
: Payload extends BOptionalPayload
? {
type: Type;
payload?: ExtractOptionalPayloadType<Payload>;
}
: Required<{
type: Type;
payload: ExtractOptionalPayloadType<Payload>;
}>;
export type ReducerObject<State, StateAction extends Action<string, any>> = {
[TypeName in StateAction["type"]]: (
state: State,
action: Extract<StateAction, { type: TypeName }>,
) => State;
};
export const createReducer =
<State, StateAction extends Action<string, any>>(
reducerObject: ReducerObject<State, StateAction>,
) =>
<ActionType extends StateAction["type"]>(
state: State,
action: Extract<StateAction, { type: ActionType }>,
): State => {
const actionReducer = reducerObject[action.type];
return actionReducer(state, action);
};
// OPTIONAL: Custom version of React's `useReducer` that uses an
// object reducer
export const useStateReducer = <State, StateAction extends Action<any, any>>(
initialState: State,
reducerObject: ReducerObject<State, StateAction>,
) => useReducer(createReducer(reducerObject), initialState);
/**
* `PathOf` is a type that returns a string-type
* representing 'paths' to a records fields.
*/
/* eslint-disable @typescript-eslint/naming-convention */
type INTERNAL_ConditionalUnion<
Base,
Incoming,
Condition extends boolean
> = Condition extends true ? Base | Incoming : Base;
type INTERNAL_StringKeyOf<Obj> = keyof Obj & string;
type INTERNAL_DeepOmitOptional<Obj extends Record<any, unknown>> = {
[Key in keyof Obj as Obj[Key] extends Required<Obj>[Key]
? Key
: never]: Obj[Key] extends Record<any, unknown>
? INTERNAL_DeepOmitOptional<Obj[Key]>
: Obj[Key];
};
// We combine the previous path + the separator + the new key
// if the previous path is not an empty string. If the previous
// path is an empty string, just return the new key.
type INTERNAL_NextPath<
Previous extends string,
Separator extends string,
NewKey extends string
> = Previous extends '' ? NewKey : `${Previous}${Separator}${NewKey}`;
// The main functionality of `PathOf`, allows for certain
// aspects of the functionality to be customised via options
// passed as generics. For better DX, we will instead export
// different types that are just versions of this type with
// specific options applied. This approach also allows us to
// make sure the `Previous` key cannot be accessed as it is
// only intended to be provided internally during recursion
type INTERNAL_PathOf<
Obj extends Record<string, any>,
IncludeNonLeaves extends boolean,
Separator extends string = '.',
Previous extends string = ''
> = {
[Key in INTERNAL_StringKeyOf<Obj>]: Required<Obj>[Key] extends Record<
string,
any
>
? // $ If the value at `Key` is an object, we recurse down to the next level
INTERNAL_ConditionalUnion<
INTERNAL_PathOf<
Required<Obj>[Key],
IncludeNonLeaves,
Separator,
INTERNAL_NextPath<Previous, Separator, Key>
>,
// $ If we are allowing non-leaves, then the key can be either the current
// $ path, or the end result of the recursion. If we are only allowing leaves,
// $ then the path must be the end result of the recursion
INTERNAL_NextPath<Previous, Separator, Key>,
//? If this conditional is met, that means we are both returning the current
//? path as a valid option and continuing to recurse down to look for more
IncludeNonLeaves
>
: // $ If the value at `Key` is a primitive, we return the current path
INTERNAL_NextPath<Previous, Separator, Key>;
//? If this gets hit that means the recursion of the current branch is complete
//? And the current branch is returned as a valid path
}[INTERNAL_StringKeyOf<Obj>];
type PathOf<
Obj extends Record<string, any>,
Separator extends string = '.'
> = INTERNAL_PathOf<Obj, true, Separator>;
export type PathOfLeaf<
Obj extends Record<string, any>,
Separator extends string = '.'
> = INTERNAL_PathOf<Obj, false, Separator>;
export type NonOptionalPathOf<
Obj extends Record<string, any>,
Separator extends string = '.'
> = PathOf<INTERNAL_DeepOmitOptional<Obj>, Separator>;
export type NonOptionalPathOfLeaf<
Obj extends Record<string, any>,
Separator extends string = '.'
> = PathOfLeaf<INTERNAL_DeepOmitOptional<Obj>, Separator>;
export default PathOf;
/* #region Internal Utility Functions */
const generateArray = <Element>(
length: number,
generator: (i: number) => Element,
): Element[] => Array(length).map((_, index) => generator(index));
const randomInt = (min: number, max: number): number =>
Math.floor(Math.random() * (max - min + 1)) + min;
const capitalizeWord = (word: string): string => {
const [firstChar, ...otherChars] = word.split("");
const combined = [firstChar.toUpperCase(), ...otherChars].join("");
return combined;
};
/* #endregion */
const PLACEHOLDER_TEXT_WORDS = [
"lorem",
"ipsum",
"dolor",
"sit",
"amet",
"consectetur",
"adipiscing",
"elit",
"sed",
"do",
"eiusmod",
"tempor",
"incididunt",
"ut",
"labore",
"et",
"dolore",
"magna",
"aliqua",
"ut",
"enim",
"ad",
"minim",
"veniam",
"quis",
"nostrud",
"exercitation",
"ullamco",
"laboris",
"nisi",
"ut",
"aliquip",
"ex",
"ea",
"commodo",
"consequat",
"duis",
"aute",
"irure",
"dolor",
"in",
"reprehenderit",
"in",
"voluptate",
"velit",
"esse",
"cillum",
"dolore",
"eu",
"fugiat",
"nulla",
"pariatur",
"excepteur",
"sint",
"occaecat",
"cupidatat",
"non",
"proident",
"sunt",
"in",
"culpa",
"qui",
"officia",
"deserunt",
"mollit",
"anim",
"id",
"est",
"laborum",
];
export const generatePlaceholderWord = (): string => {
const index = randomInt(0, PLACEHOLDER_TEXT_WORDS.length - 1);
return PLACEHOLDER_TEXT_WORDS[index];
};
export const generatePlaceholderWords = (amount: number): string => {
const array = generateArray(amount, generatePlaceholderWord);
return array.join(" ");
};
export const generatePlaceholderSentence = (): string => {
const numberOfWords = randomInt(6, 9);
const firstWord = generatePlaceholderWord();
const tailWords = generatePlaceholderWords(numberOfWords - 1);
const words = `${capitalizeWord(firstWord)} ${tailWords}`;
return `${words}.`;
};
export const generatePlaceholderParagraph = (): string => {
const numberOfSentences = randomInt(4, 8);
const sentences = generateArray(
numberOfSentences,
generatePlaceholderSentence,
);
return sentences.join(" ");
};

Libs

This libs folder is for more complex gists that are effectively like mini libraries.

/**
* The idea behind a "storage atom" is to invert the
* way we use the JS storage APIs. Rather than using
* the base storage controller and accessing specific
* elements every time we use it, we instead create a
* new controller (the "atom") which then interfaces
* with the base API for us.
* */
const extractErrorMessage = (err: any): string => {
if (typeof err === "string" || typeof err === "number") {
return `${err}`;
}
const includedMessage = err.message;
if (includedMessage) {
if (
typeof includedMessage === "string" ||
typeof includedMessage === "number"
) {
return `${err}`;
}
return JSON.stringify(includedMessage);
}
return JSON.stringify(err);
};
export type StorageType = "local" | "session";
export const getControllerForStorageMode = (mode: StorageType) =>
mode === "local" ? localStorage : sessionStorage;
export type StorageAtom<Value> = {
set: (p: Value) => void;
get: () => Value;
remove: () => void;
reset: () => Value;
defaultValue: Value;
_meta: {
initializedAt: Date;
type: StorageType;
key: string;
};
};
// eslint-disable-next-line @typescript-eslint/no-empty-function
const stubFn = () => {};
const createDummyStorageAtom = <Value>(
type: StorageType,
key: string,
defaultValue: Value,
): StorageAtom<Value> => ({
remove: stubFn,
get: () => defaultValue,
reset: () => defaultValue,
set: stubFn,
defaultValue,
_meta: {
type,
initializedAt: new Date(),
key,
},
});
export type Stringifier<T> = (p: T) => string;
export type Parser<T> = (p: string) => T;
export type SerializationController<T> = {
stringify: Stringifier<T>;
parse: Parser<T>;
};
const getDefaultSerializationController = <
T,
>(): SerializationController<T> => ({
stringify: JSON.stringify,
parse: JSON.parse,
});
const createStorageAtom = <Value>(
type: StorageType,
key: string,
defaultValue: Value,
customSerializer: Partial<SerializationController<Value>> = {},
): StorageAtom<Value> => {
try {
const { stringify, parse } = {
...getDefaultSerializationController<Value>(),
...customSerializer,
};
const storageController = getControllerForStorageMode(type);
const get = (): Value => {
const rawValue = storageController.getItem(key);
if (!rawValue) {
return defaultValue;
}
try {
const parsed = parse(rawValue) as Value;
return parsed;
} catch (e) {
throw Error(
`An error occurred while trying to parse the value stored with key "${key}": ${extractErrorMessage(
e,
)}`,
);
}
};
const set = (newValue: Value) => {
const stringified = stringify(newValue);
storageController.setItem(key, stringified);
};
const remove = () => {
storageController.removeItem(key);
};
const reset = () => {
const stringified = stringify(defaultValue);
storageController.setItem(key, stringified);
return defaultValue;
};
return {
get,
set,
defaultValue,
remove,
reset,
_meta: {
initializedAt: new Date(),
key,
type,
},
};
} catch (e) {
return createDummyStorageAtom(type, key, defaultValue);
}
};
export default createStorageAtom;
// NOTE: For maximum functionality, replace usages of `JSON.parse`
// and `JSON.stringify` with the `superjson` package.
export type StorageController<Data, HasInitialValue extends boolean = false> = {
get: () => Data | (HasInitialValue extends true ? Data : undefined);
set: (data: Data) => Data;
} & (HasInitialValue extends true
? {
reset: () => Data;
}
: {
clear: () => undefined;
});
export type StorageControllerWithInitialValue<Data> = StorageController<
Data,
true
>;
export type StorageControllerWithoutInitialValue<Data> = StorageController<
Data,
false
>;
export function createStorageController<Data>( // No initial value provided
storageController: Storage,
key: string
): StorageControllerWithoutInitialValue<Data>;
export function createStorageController<Data>( // Initial value provided
storageController: Storage,
key: string,
initialData: Data
): StorageControllerWithInitialValue<Data>;
export function createStorageController<Data>( // Implementation
storageController: Storage,
key: string,
initialData?: Data
) {
const get = () => {
const data = storageController.getItem(key);
return JSON.parse(data ?? 'undefined');
};
const set = (data: Data) => {
storageController.setItem(key, JSON.stringify(data));
return get();
};
const clear = () => {
storageController.removeItem(key);
if (initialData)
throw new Error('Cannot clear storage controller with initial value');
return undefined;
};
const reset = () => {
if (!initialData)
throw new Error('Cannot reset storage controller without initial value');
return set(initialData);
};
if (initialData && get() === undefined) {
// Set with initial value if one exists and
// get() returns undefined
set(initialData);
}
return {
get,
set,
clear,
reset,
};
}
import { LiteralToPrimitive } from "type-fest";
import { z } from "zod";
type IsLiteral<PossiblyLiteral> =
LiteralToPrimitive<PossiblyLiteral> extends PossiblyLiteral ? false : true;
// Primitives do not extend descended literals, so if the primitive
// version of a type extends its "literal" version, then its "literal"
// version must have actually been primitive
type RawStringToZod<Str extends string> = IsLiteral<Str> extends true
? z.ZodLiteral<Str>
: z.ZodString;
type RawObjectToZod<Obj extends Record<string, unknown>> = z.ZodObject<{
[Key in keyof Obj]: RawToZod<Obj[Key]>;
}>;
/**
* Take a base typescript type and convert it to the corresponding
* zod schema type
* NOTE: This will not work for union types eg; `string | number`
*/
export type RawToZod<T> = z.ZodType<never> &
(T extends number
? z.ZodNumber
: T extends string
? RawStringToZod<T>
: T extends Record<string, unknown>
? RawObjectToZod<T>
: T extends (infer El)[]
? z.ZodArray<RawToZod<El>>
: T extends Date
? z.ZodDate
: T extends boolean
? z.ZodBoolean
: T extends undefined
? z.ZodUndefined
: T extends null
? z.ZodNull
: z.ZodTypeAny);
import { D } from "@mobily/ts-belt";
import cuid from "cuid";
import produce, { Draft } from "immer";
import create from "zustand";
import { combine } from "zustand/middleware";
type RemoveFirstParam<
Fn extends (...args: [any, ...any[]]) => any
> = Fn extends (...args: [any, ...infer RemainingParams]) => infer ReturnType
? (...args: RemainingParams) => ReturnType
: never;
type StoreActionDefinition<State, ExtraParams extends any[]> = (
draftState: Draft<State>,
...extraParams: ExtraParams
) => void;
type CreatedStoreAction<
ActionDefinition extends StoreActionDefinition<any, any>
> = RemoveFirstParam<ActionDefinition>;
/**
* Provides a layer of abstraction for creating zustand stores
* and actions with high type safety and low boiler plate
*/
class StoreBuilder<
State extends Record<string, any>,
Actions extends Record<string, StoreActionDefinition<State, any[]>> = Record<
never,
never
>
> {
private initialState: State;
private actions: Actions;
/**
* Build the new store
*
* @param initialState the initial state of the store
* @param actions INTERNAL ONLY, DO NOT USE
* @param selectors INTERNAL ONLY, DO NOT USE
*/
constructor(
initialState: State,
actions: Actions = {} as any,
) {
// the "actions" and parameter should always default to {} when
// `new StoreBuilder` is called. It should only actually be provided
// inside the `addAction` method
this.initialState = initialState;
this.actions = actions;
}
public addAction<ActionName extends string, Params extends any[]>(
actionName: ActionName,
action: StoreActionDefinition<State, Params>
): StoreBuilder<
State,
Actions &
{
[key in ActionName]: StoreActionDefinition<State, Params>;
},
Selectors
> {
return new StoreBuilder(
this.initialState,
{
...this.actions,
[actionName]: action,
},
this.selectors
);
}
public getStore() {
return create(
combine(this.initialState, (set, get) => ({
...(D.map(
this.actions,
(actionFn) => (
...params: Parameters<RemoveFirstParam<typeof actionFn>>
) =>
set((currentState) => {
return produce(currentState, (draftState) =>
actionFn(draftState, ...params)
);
}) // We map over the actions in the store and convert them so that
// they read the state from the store rather than taking it as a parameter
) as {
[Key in keyof Actions]: CreatedStoreAction<Actions[Key]>;
}),
}))
);
}
}
// Take an object and convert it to a string that can
// be used as URL parameters
const objToUrlParams = <
Params extends Record<string, number | string> = Record<
string,
number | string
>,
>(
obj: Params,
) =>
Object.entries(obj)
.map(([key, value]) => `${key}=${value}`)
.join("&");
export default objToUrlParams;
type PrioritisedState<T> = {
value: T;
isValid: boolean;
};
const stateIsValid = (state: PrioritisedState<unknown>) => state.isValid;
// If you have multiple different distinct states that
// UI that have overlap in their conditions and which
// you only ever want to show one of, this function
// allows you to determine which state to show.
const prioritisedStates = <T>(states: PrioritisedState<T>[], fallback: T) => {
const firstValid = states.find(stateIsValid);
return firstValid?.value ?? fallback;
};
export default prioritisedStates;
const removeFromString = (src: string, target: string) =>
src.replace(new RegExp(target, 'g'), "");
export default removeFromString;
/**
* A 'reverse debounce' is like a standard debounce,
* except it fires the callback immediately, and then
* blocks any further calls for a specified amount
* of time.
*
* The use case for this is when you want to fire a
* callback immediately for the best user experience
* while also preventing any duplicate calls.
*
* Can optionally pass a third parameter to set a
* maximum possibly delay between function calls.
*/
const reverseDebounce = <Params extends any[]>(
fn: (...p: Params) => void,
wait: number,
maxPossibleDelay: number = Number.MAX_SAFE_INTEGER
) => {
let lastRunAt = Date.now();
let blockedUntil = Date.now() - 1;
return (...params: Params): void => {
const timeSinceLastExecution = Date.now() - lastRunAt;
const maxDelayHasBeenViolated = timeSinceLastExecution >= maxPossibleDelay;
const blockingWindowHasPassed = Date.now() >= blockedUntil;
const canRun = blockingWindowHasPassed || maxDelayHasBeenViolated;
if (canRun) {
fn(...params);
lastRunAt = Date.now();
}
blockedUntil = Date.now() + wait;
};
};
export default reverseDebounce;
const roundNumber = (number: number, decimalPoints: number) => {
const factor = Math.max(10 ** decimalPoints, 1);
return Math.round(number * factor) / factor;
};
export default roundNumber;
const roundToInterval = (value: number, interval: number): number =>
Math.round(value / interval) * interval;
export default roundToInterval;
const sleep = (time: number) => {
const date = Date.now();
let currentDate = null;
do {
currentDate = Date.now();
} while (currentDate - date < time);
};
export default sleep;
const toPromise = <T>(value: T): Promise<T> => new Promise((resolve) => resolve(value));
export default toPromise;

TSConfig Setup

Paths

The paths compiler option in tsconfig.json allows you to setup import aliases to organise your imports.

{
	"compilerOptions": {
		"baseUrl": ".",
		"paths": {
			"$/*": ["./src/*"],
			"$*": ["./src/features/*", "./src/lib/*"]
		}
	}
}

This setup defines the following import aliases:

Path Import Alias
src/*folder-name* $/*folder-name*
src/features/*feature-name* $*feature-name*
src/features/*feature-name*/*folder-name* $*feature-name*/*folder-name*
src/lib/*lib-name* $*lib-name*

Usage of Paths (IMPORTANT)

Depending on what build tools your project is using, you may run into issues with the way it handles your custom paths. This can lead to issues where files are compiled in incorrect order, which causes imports from other files to be imported as undefined. To avoid this, your import statements should follow these rules:

  • All imports of resources not in a feature folder should be imported using their folder's index file using the $/folderName syntax. Eg; an import of src/utils/generateId.ts should look like this:
import { generateId } from "$/utils";
  • Importing resources from a feature folder into a file that is either not in a feature folder, or in a different feature folder should be imported from that feature folder's index. Eg; importing src/features/authentication/utils/signIn.ts into src/components/SignInForm.tsx would look like this:
// src/components/SignInForm.tsx
import { signIn } from "$authentication";
  • Importing a file from a feature folder into another file in the same feature folder should use the path structure of $feature/subFolder. Eg; importing src/features/calendar/utils/getEvents.ts into the file src/features/calendar/components/Calendar.tsx would ook like this:
// src/features/calendar/components/Calendar.tsx
import { getEvents } from "$calendar/utils";

Compatibility

Just about every tool that interacts in some way with your code will require additional setup to work correctly with your tsconfig path setups. Tools that will likely require extra setup include:

  • Build tools (webpack, vite, etc.)
  • Test runners (jest) *testing packages that do not need to actually interpret your code such as cypress and playwright may not need additional setup)
  • Linters that analyze imports (eslint-plugin-imports)

The setup required for each of these can be found relatively easily. Setup may require either manually defining import aliases that match your tsconfig, or using some kind of tool that integrates your tsconfig into the tool.

Extra Setup for Node

To get paths to work with ts-node you must also do the following steps:

Run this:

yarn add --dev tsconfig-paths

Then add this to your tsconfig:

{
	"ts-node": {
		"require": ["tsconfig-paths/register"]
	}
}

Enforce With Eslint

If you want to enforce the shortest possible paths are used when importing from 'lib' or 'features' you can add this to your eslint config:

{
	rules: {
		'no-restricted-imports': [
      'error',
      {
        patterns: [
          {
            group: ['$/features/**', '$/lib/**'],
            message: 'Use short-form path instead ("$lib-name" or "$feature-name")',
          },
        ],
      },
    ],
	}
}

Running Scripts

Install

First of all install ts-node as a dev dependency (if you don't have/need it as a regular dependency)

yarn add --dev ts-node

Config

In your tsconfig.json, do 2 things:

  • Make sure the includes option includes the folder that contains your script files.
  • Add the following to the bottom of the file:
{
	"ts-node": {
		"compilerOptions": {
			"module": "CommonJS"
		}
	}
}

Executing Scripts

Now you can write scripts in typescript and run them using ts-node

yarn ts-node scripts/yourScripts.ts

Type Checking ESLint Config Files

  1. Your eslint file must be a JS file,
  2. Your tsconfig.json must allow js and include .eslintrc.js (or .eslintrc.cjs if your project uses that)
  3. Make sure you have @types/eslint installed as a dev dependency
  4. Add the comment /** @type {import('eslint').Linter.BaseConfig} */ just above the config code

Example Config File:

/** @type {import('eslint').Linter.BaseConfig} */
module.exports = {
	env: {
		browser: true,
		es2021: true,
	},
	extends: ["plugin:react/recommended", "standard-with-typescript"],
	overrides: [],
	parserOptions: {
		ecmaVersion: "latest",
		sourceType: "module",
	},
	plugins: ["react"],
	rules: {},
};
// ## LOW-LEVEL VALUE TYPES
/* These are static types that take no
* no arguments and represent
* primitives */
export type PrimitiveValue = string | number | boolean | null;
export type PrintableValue = PrimitiveValue | undefined;
// All the possible values that can be returned
// by the `typeof` keyword
export type TypeLabel =
| "string"
| "number"
| "bigint"
| "boolean"
| "symbol"
| "undefined"
| "object"
| "function";
export type Falsy = false | 0 | "" | 0n | null | undefined;
// We can't include `NaN` here because typescript
// `NaN` is just a number, but it also can't be
// used as a literal the same way other numbers
// can
// ## OBJECT TYPES
export type AnyObject = Record<string, unknown> | unknown[];
export type EmptyObject = Record<never, never> | [];
// ## VALUE COMPOSITION TYPES
/* These types take type arguments and use
them to compose more complex types */
export type Dictionary<T> = Record<string, T>;
export type NonEmptyArray<T> = [T, ...T[]];
/* This is not actually particularly
useful. The idea behind this was to
use it as the type of a function's
parameter, but in an actual codebase,
arrays that we know aren't empty
will probably still be typed with the
standard `[]` typing, so passing it to
a function that expects `NonEmptyArray`
will trigger a typescript warning */
// A value stored in a passed array or object
// type
export type ValueFrom<Obj extends AnyObject> = Obj[keyof Obj];
export type KeyValuePair<Dict extends Dictionary<unknown>> = ValueFrom<{
[Key in keyof Dict]: [Key, Dict[Key]];
// $ This may look weird, but doing it this way means that
// $ typescript will know the type of the value in the tuple
// $ based on the key of the tuple, rather than pairs just being
// $ typed as `[keyof Dict, ValueFrom<Dict>]`
}>;
export type DeepPartial<Dict extends Dictionary<unknown>> = {
[Key in keyof Dict]?: Dict[Key] extends Dictionary<unknown>
? DeepPartial<Dict[Key]>
: Dict[Key];
};
// Alternate versions of `Extract` that enforces overlap between
// the type arguments
export type Narrow<Base extends Sub, Sub> = Extract<Base, Sub>;
// Version of `Omit` where the keys being omitted
// must actually be keys of the base type. This
// should be used instead of `Omit` unless you are
// dealing with dynamic types that may not be keys
// of the base type.
export type StrictOmit<BaseType, ToOmit extends keyof BaseType> = Omit<
BaseType,
ToOmit
>;
// Take an object type and make specific fields
// non-optional
export type RequireSpecificKeys<
Dict extends Dictionary<any>,
Keys extends keyof Dict,
> = Dict & Required<Pick<Dict, Keys>>;
// Take an object type and make specific fields
// optional
export type PartialSpecificKeys<
Dict extends Dictionary<any>,
Keys extends keyof Dict,
> = Omit<Dict, Keys> & Partial<Pick<Dict, Keys>>;
// Removes the `readonly` property from a type
export type Mutable<Obj extends AnyObject> = {
-readonly [P in keyof Obj]: Obj[P];
};
export type ShallowMerge<
Source extends Record<string, any>,
Update extends Record<string, any>,
> = Omit<Source, keyof Update> & Update;
// Replace the type of a specific field(s)
// in an object type
export type ReplaceFieldType<
Dict extends Dictionary<any>,
Key extends keyof Dict,
Replacement,
> = Omit<Dict, Key> & Record<Key, Replacement>;
// Get keys of the base type that are of a specific type
export type ExtractKeyOf<BaseType, KeyExtraction> = Extract<
keyof BaseType,
KeyExtraction
>;
export type StringKeyOf<BaseType> = ExtractKeyOf<BaseType, string>;
export type ExcludeKeyOf<BaseType, KeyExclusion> = Exclude<
keyof BaseType,
KeyExclusion
>;
export type PrefixKeys<
Dict extends Dictionary<unknown>,
Prefix extends string,
> = {
[Key in StringKeyOf<Dict> as `${Prefix}${Key}`]: Dict[Key];
};
export type ConditionalUnion<
Base,
Incoming,
Condition extends boolean,
> = Condition extends true ? Base | Incoming : Base;
// Invert a boolean type
export type Not<Bool extends boolean> = Bool extends true ? false : true;
export type DeepOmitOptional<Obj extends Record<any, unknown>> = {
[Key in keyof Obj as Obj[Key] extends Required<Obj>[Key]
? Key
: never]: Obj[Key] extends Record<any, unknown>
? DeepOmitOptional<Obj[Key]>
: Obj[Key];
};
/**
* Pick some keys from an object type and make those
* fields required, except for some exceptions which
* will simply be picked without being marked as
* required
*/
export type PickAndMakeRequiredWithExceptions<
Base,
ToPick extends keyof Base,
ToNotMakeRequired extends ToPick,
> = Pick<Base, ToNotMakeRequired> &
Required<Omit<Pick<Base, ToPick>, ToNotMakeRequired>>;
// ## FUNCTION TYPES
// Can be either a static value, or a getter
// function for deriving a value
export type GetterOrStatic<PassedType, ReturnType> =
| ((p: PassedType) => ReturnType)
| ReturnType;
export type Comparator<T> = (a: T, b: T) => boolean;
export type SortComparator<T> = (a: T, b: T) => number;
export type Checker<T> = (item: T) => boolean;
export type Converter<Base, Target> = (base: Base) => Target;
export type Updater<T> = (a: T) => T;
// ## MISC TYPES
// When you want to specify auto-complete suggestions in
// the editor while also allowing values outside of the
// suggestions to be provided.
// NOTE: Trust me, the use of `Omit` is not a mistake, for
// whatever reasons you have to use it instead of `Exclude`
// for this to work with strings. Not sure about other types
// as I have only tested with strings.
type WithLooseAutocomplete<AcceptedType, Suggestions extends AcceptedType> =
| Suggestions
| Exclude<Omit<AcceptedType, Suggestions & keyof AcceptedType>, Suggestions>;
/**
* Use a map to essentially create a switch statement
* specifically for returning values based on the
* input's value.
*/
const valueSwitch = <Predicate, Return>(
value: Predicate,
cases: [Predicate, Return][]
): Return | undefined => new Map(cases).get(value);
/**
* Value switch that throws an error if the value
* is not found in the map.
*/
export const unsafeValueSwitch = <Predicate, Return>(
value: Predicate,
cases: [Predicate, Return][],
): Return => {
const foundValue = valueSwitch(value, cases);
if (!foundValue) throw Error(`Value '${value}' not found in cases `);
return foundValue;
};
export default valueSwitch;
export type ValueWithSimpleStub = number | string | any[];
export const withStringFallback = (value: string | undefined) => value ?? '';
export const withNumberFallback = (value: number | undefined) => value ?? 0;
export const withArrayFallback = <T>(value: T[] | undefined) =>
value ?? ([] as T[]);
// eslint-disable-next-line max-classes-per-file
import { z } from 'zod';
export type ZodSchemaMigration<
FromSchema extends z.ZodTypeAny,
ToSchema extends z.ZodTypeAny
> = {
to: ToSchema;
from: FromSchema;
up: (previousVersionData: z.infer<FromSchema>) => z.infer<ToSchema>;
down: (nextVersionData: z.infer<ToSchema>) => z.infer<FromSchema>;
};
const composeMigration = <
FromSchema extends z.ZodTypeAny,
ToSchema extends z.ZodTypeAny
>(
migration: ZodSchemaMigration<ToSchema, FromSchema>
) => migration;
const v1StateSchema = z.object({
person: z.object({
name: z.string(),
pet: z.object({
name: z.string(),
}),
}),
});
const v2StateSchema = z.object({
person: z.object({
name: z.string(),
}),
pet: z.object({
name: z.string(),
}),
});
export class MigrationController<FinalSchema extends z.ZodTypeAny> {
private legacyMigrations: ZodSchemaMigration<any, any>[] = [];
private finalMigration: ZodSchemaMigration<any, FinalSchema>;
// This constructor is only used by the
constructor(schema: FinalSchema);
constructor(finalMigration: ZodSchemaMigration<any, FinalSchema>);
constructor(
...params:
| [FinalSchema]
| [ZodSchemaMigration<any, FinalSchema>, ZodSchemaMigration<any, any>[]]
) {
if (params.length === 1) {
const [schema] = params;
this.finalMigration = {
from: z.any,
to: schema,
up: (data) => data,
down: (data) => data,
};
} else {
const [finalMigration, previousMigrations] = params;
this.finalMigration = finalMigration;
this.legacyMigrations = previousMigrations;
}
}
public addMigration<NewFinalSchema extends z.ZodTypeAny>(
migration: Omit<ZodSchemaMigration<FinalSchema, NewFinalSchema>, 'from'>
) {
const newMigration: ZodSchemaMigration<FinalSchema, NewFinalSchema> = {
from: this.finalMigration.to,
...migration,
};
return new MigrationController<NewFinalSchema>(
newMigration,
this.legacyMigrations.concat(this.finalMigration)
);
}
public parse(data: any): z.infer<FinalSchema> {
const readyForFinalMigration = this.legacyMigrations.reduce(
(result, migration) => {
try {
const fromData = migration.from.parse(result);
return migration.up(fromData);
} catch (e) {
return result;
}
},
data
);
return this.finalMigration.to.parse(readyForFinalMigration);
}
}
const a = new MigrationController(v1);
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment