Created
July 25, 2023 06:38
-
-
Save neverUsedGithub/a5aa890c650c8b8ba36261422db70adc to your computer and use it in GitHub Desktop.
A type-level graphql lexer & parser & ts converter.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Utility | |
type GetFirstCharacter<Text extends string> = Text extends `${infer First}${infer Rest}` ? First : never; | |
type StringStartsWith<Source extends string, Char extends string> = Source extends `${infer First}${infer Rest}` | |
? First extends Char | |
? true | |
: false | |
: false; | |
type SliceStringFirst<Source extends string> = Source extends `${infer First}${infer Rest}` | |
? Rest | |
: never; | |
type Digit = "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9"; | |
type Char = "a" | "A" | "b" | "B" | "c" | "C" | "d" | "D" | "e" | "E" | "f" | "F" | "g" | "G" | "h" | "H" | "i" | "I" | "j" | "J" | "k" | "K" | "l" | "L" | "m" | "M" | "n" | "N" | "o" | "O" | "p" | "P" | "q" | "Q" | "r" | "R" | "s" | "S" | "t" | "T" | "u" | "U" | "v" | "V" | "w" | "W" | "x" | "X" | "y" | "Y" | "z" | "Z" | "_"; | |
type CharRest = Char | Digit; | |
type Whitespace = "\t" | " " | ""; | |
type Newline = "\n" | "\r"; | |
type Skip = Whitespace | Newline | ";"; // Remove ';' if there are any issues | |
type Delimiter = "{" | "}" | "(" | ")" | "[" | "]" | ":" | "," | "." | "!"; | |
// Token Types | |
type TokenNumber<Source extends string = string> = { type: "number", value: Source }; | |
type TokenIdentifier<Source extends string = string> = { type: "identifier", value: Source }; | |
type TokenString<Source extends string = string> = { type: "string", value: Source }; | |
type TokenDelimiter<Source extends Delimiter = any> = { type: "delimiter", value: Source }; | |
type TokenEOF = { type: "eof", value: "-EOF-" }; | |
type Token = TokenNumber | TokenIdentifier | TokenString | TokenDelimiter | TokenEOF; | |
// Tokenizer | |
type TokenizerError<Message extends string> = { success: false, message: Message }; | |
type TokenizeNumber<Source extends string, Last extends string = ""> = Source extends `${infer Current}${infer Rest}` | |
? Current extends Digit | |
? TokenizeNumber<Rest, `${Last}${Current}`> | |
: Last extends "" ? never : [ TokenNumber<Last>, `${Current}${Rest}` ] | |
: Last extends "" ? never : [ TokenNumber<Last>, "" ] | |
type TokenizeIdentifier<Source extends string, Last extends string = ""> = Source extends `${infer Current}${infer Rest}` | |
? Last extends "" | |
? Current extends Char | |
? TokenizeIdentifier<Rest, `${Last}${Current}`> | |
: Last extends "" ? never : [ TokenIdentifier<Last>, `${Current}${Rest}` ] | |
: Current extends CharRest | |
? TokenizeIdentifier<Rest, `${Last}${Current}`> | |
: Last extends "" ? never : [ TokenIdentifier<Last>, `${Current}${Rest}` ] | |
: Last extends "" ? never : [ TokenIdentifier<Last>, "" ] | |
type TokenizeString<Source extends string, Last extends string = ""> = Source extends `${infer Current}${infer Rest}` | |
? Last extends "" | |
// String needs to be opened | |
? Current extends '"' | |
? TokenizeString<Rest, `${Last}${Current}`> | |
: never | |
// String has already been opened | |
: Current extends '"' | |
? [ TokenString<`${Last}${Current}`>, Rest ] | |
: TokenizeString<Rest, `${Last}${Current}`> | |
: never | |
type Tokenize<Source extends string, Stack extends Token[] = []> = | |
StringStartsWith<Source, Skip> extends true | |
? Tokenize<SliceStringFirst<Source>, Stack> | |
: TokenizeNumber<Source> extends never | |
? TokenizeIdentifier<Source> extends never | |
? TokenizeString<Source> extends never | |
? GetFirstCharacter<Source> extends never | |
? Source extends "" | |
// End of file, so return stack and eof token | |
? [ ...Stack, TokenEOF ] | |
// No other token types so encountered an unexpected character | |
: TokenizerError<`unexpected character '${GetFirstCharacter<Source>}'`> | |
: GetFirstCharacter<Source> extends Delimiter | |
// Tokenize delimiters | |
? Tokenize<SliceStringFirst<Source>, [ ...Stack, TokenDelimiter<GetFirstCharacter<Source>> ]> | |
// Didnt match any tokens | |
: Source extends "" | |
// End of file, so return stack | |
? Stack | |
// No other token types so encountered an unexpected character | |
: TokenizerError<`unexpected character '${GetFirstCharacter<Source>}'`> | |
// Tokenize strings | |
: TokenizeString<Source> extends [ infer Tok, infer Rest ] | |
? Rest extends string | |
? Tok extends Token | |
? Tokenize<Rest, [ ...Stack, Tok ]> | |
: "lexer@error#9" | |
: "lexer@error#8" | |
: "lexer@error#7" | |
// Tokenize identifiers | |
: TokenizeIdentifier<Source> extends [ infer Tok, infer Rest ] | |
? Rest extends string | |
? Tok extends Token | |
? Tokenize<Rest, [ ...Stack, Tok ]> | |
: "lexer@error#6" | |
: "lexer@error#5" | |
: "lexer@error#4" | |
// Tokenize numbers | |
: TokenizeNumber<Source> extends [ infer Tok, infer Rest ] | |
? Rest extends string | |
? Tok extends Token | |
? Tokenize<Rest, [ ...Stack, Tok ]> | |
: "lexer@error#3" | |
: "lexer@error#2" | |
: "lexer@error#1" | |
// AST Types | |
type BlockDefinitionNode<Name extends string = any, Definitions = BlockTypeNode> = { type: "BlockDefinition", name: Name, definitions: Definitions }; | |
type BlockTypeNode<Definitions extends BlockDefinitions = any> = { type: "BlockType", definitions: Definitions }; | |
type BlockDefinitions = { fields: Record<string, ASTNode>, frags: FragmentSpreadNode[] }; | |
type NullabilityModifierNode<Node extends ASTNode = any> = { type: "NonNullableModifier", node: Node }; | |
type ProgramNode<Children extends ASTNode[] = any> = { type: "Program", children: Children }; | |
type FragmentSpreadNode<Name extends string = any> = { type: "FragmentSpread", name: Name }; | |
type ArrayModifierNode<Node extends ASTNode = any> = { type: "ArrayModifier", node: Node }; | |
type IdentifierNode<Name extends string = any> = { type: "Identifier", name: Name }; | |
type ASTNode = BlockDefinitionNode | BlockTypeNode | IdentifierNode | ProgramNode | FragmentSpreadNode | ArrayModifierNode | NullabilityModifierNode; | |
// Parser Utility | |
type TokenToString<Tok extends Token> = `'${Ensure<Tok, Token>["value"]}' (${Ensure<Tok, Token>["type"]})`; | |
// Parser | |
type ParserError<Message extends string> = { success: false, message: Message }; | |
type ParserUnexpectedToken<Curr, Expected = never> = Expected extends never | |
? ParserError<`unexpected token ${TokenToString<Ensure<Curr, Token>>}`> | |
: ParserError<`unexpected token ${TokenToString<Ensure<Curr, Token>>}, expected ${TokenToString<Ensure<Expected, Token>>}`> | |
type ParseFragmentSpread<Tokens extends Token[]> = null | |
type ParseBlockType<Tokens extends Token[], Stack extends BlockDefinitions = { fields: {}, frags: [] }> = Tokens extends [ infer State, ...infer Rest ] | |
// peek() === { type: "delimiter", value: "}" } | |
? State extends TokenDelimiter<"}"> | |
? [ Rest, BlockTypeNode<Stack> ] | |
: State extends TokenIdentifier<infer Name> | |
? Rest extends [ infer State, ...infer Rest ] | |
// peek() === { type: "delimiter", value: ":" } | |
? State extends TokenDelimiter<":"> | |
? ParseType<Ensure<Rest, Token[]>> extends ParserError<any> | |
? ParseType<Ensure<Rest, Token[]>> | |
: ParseBlockType<Ensure<ParseType<Ensure<Rest, Token[]>>[0], Token[]>, { | |
fields: Stack["fields"] & { [K in Name]: Ensure<ParseType<Ensure<Rest, Token[]>>[1], ASTNode> }, | |
frags: Stack["frags"] | |
}> | |
: State extends TokenDelimiter<"{"> | |
? ParseBlockType<Ensure<Rest, Token[]>> extends ParserError<any> | |
? ParseBlockType<Ensure<Rest, Token[]>> | |
: ParseBlockType<Ensure<ParseBlockType<Ensure<Rest, Token[]>>[0], Token[]>, { | |
fields: Stack["fields"] & { [K in Name]: ParseBlockType<Ensure<Rest, Token[]>>[1] }, | |
frags: Stack["frags"] | |
}> | |
: ParserUnexpectedToken<State, TokenDelimiter<":">> | |
: "parser@error#8" | |
: State extends TokenDelimiter<"."> | |
? Rest extends [ TokenDelimiter<".">, TokenDelimiter<".">, infer State, ...infer Rest ] | |
// ? ParseFragmentSpread<Ensure<Rest, Token[]>> extends ParserError<any> | |
// ? ParseFragmentSpread<Ensure<Rest, Token[]>> | |
// : ParseBlockType<ParseFragmentSpread<Ensure<Rest, Token[]>>[0], Stack & { [ K in Name ]: ParseFragmentSpread<Ensure<Rest, Token[]>>[1] }> | |
? State extends TokenIdentifier<infer Name> | |
? ParseBlockType<Ensure<Rest, Token[]>, { | |
fields: Stack["fields"], | |
frags: [ ...Stack["frags"], FragmentSpreadNode<Name> ] | |
}> | |
: ParserUnexpectedToken<State, TokenIdentifier<"*">> | |
: ParserUnexpectedToken<State, TokenDelimiter<".">> | |
: ParserUnexpectedToken<State, TokenIdentifier<"*"> | TokenDelimiter<".">> | |
: "parser@error#7" | |
type ParseType<Tokens extends Token[]> = Tokens extends [ infer State, ...infer Rest ] | |
// peek() === { type: "delimiter", value: "{" } | |
? State extends TokenDelimiter<"{"> | |
? ParseBlockType<Ensure<Rest, Token[]>> | |
: State extends TokenIdentifier<infer Value> | |
? Rest extends [ TokenDelimiter<"[">, TokenDelimiter<"]">, ...infer Rest ] | |
? Rest extends [ TokenDelimiter<"!">, ...infer Rest ] | |
? [ Rest, NullabilityModifierNode<ArrayModifierNode<IdentifierNode<Value>>> ] | |
: [ Rest, ArrayModifierNode<IdentifierNode<Value>> ] | |
: Rest extends [ TokenDelimiter<"!">, ...infer Rest ] | |
? [ Rest, NullabilityModifierNode<IdentifierNode<Ensure<State, TokenIdentifier>["value"]>> ] | |
: [ Rest, IdentifierNode<Ensure<State, TokenIdentifier>["value"]> ] | |
: ParserUnexpectedToken<State> | |
: "parser@error#6" | |
type Ensure<T, U> = T extends U ? T : never; | |
type ParseTypeDefinition<Tokens extends Token[]> = Tokens extends [ infer State, ...infer Rest ] | |
// peek() === { type: "identifier", value: "*" } !! value not checked here | |
? State extends TokenIdentifier<infer KW> | |
? Rest extends [ infer State, ...infer Rest ] | |
// peek() === { type: "identifier", value: * } | |
? State extends TokenIdentifier<infer Name> | |
? Rest extends [ infer State, ...infer Rest ] | |
? State extends TokenDelimiter<"{"> | |
? ParseType<Ensure<[ State, ...Rest ], Token[]>> extends ParserError<any> | |
? ParseType<Ensure<[ State, ...Rest ], Token[]>> | |
: [ ParseType<Ensure<[ State, ...Rest ], Token[]>>[0], BlockDefinitionNode<Name, ParseType<Ensure<[ State, ...Rest ], Token[]>>[1]> ] | |
: ParserUnexpectedToken<State, TokenDelimiter<"{">> | |
: "parser@error#5" | |
: ParserUnexpectedToken<State, TokenIdentifier<"*">> | |
: "parser@error#3" | |
: "parser@error#2" | |
: "parser@error#1" | |
type ParseFragmentDefinition<Tokens extends Token[]> = Tokens extends [ infer State, ...infer Rest ] | |
// peek() === { type: "identifier", value: "*" } !! value not checked here | |
? State extends TokenIdentifier<infer KW> | |
? Rest extends [ infer State, ...infer Rest ] | |
// peek() === { type: "identifier", value: * } | |
? State extends TokenIdentifier<infer Name> | |
? Rest extends [ infer State, ...infer Rest ] | |
// peek() === { type: "identifier", value: "on" } | |
? State extends TokenIdentifier<"on"> | |
? Rest extends [ infer State, ...infer Rest ] | |
? State extends TokenIdentifier<infer NameOn> | |
? Rest extends [ infer State, ...infer Rest ] | |
? State extends TokenDelimiter<"{"> | |
? ParseType<Ensure<[ State, ...Rest ], Token[]>> extends ParserError<any> | |
? ParseType<Ensure<[ State, ...Rest ], Token[]>> | |
: [ ParseType<Ensure<[ State, ...Rest ], Token[]>>[0], BlockDefinitionNode<Name, ParseType<Ensure<[ State, ...Rest ], Token[]>>[1]> ] | |
: ParserUnexpectedToken<State, TokenDelimiter<"{">> | |
: "parser@error#14" | |
: ParserUnexpectedToken<State, TokenIdentifier<"*">> | |
: "parser@error#13" | |
: ParserUnexpectedToken<State, TokenIdentifier<"on">> | |
: "parser@error#12" | |
: ParserUnexpectedToken<State, TokenIdentifier<"*">> | |
: "parser@error#11" | |
: "parser@error#10" | |
: "parser@error#9" | |
type ParseProgram<Tokens extends Token[], Children extends ASTNode[] = []> = Tokens extends [ infer Current, ...infer Rest ] | |
? Current extends TokenEOF | |
? ProgramNode<Children> | |
: Current extends TokenIdentifier<"type"> | |
? ParseTypeDefinition<Tokens> extends ParserError<any> | |
? ParseTypeDefinition<Tokens> | |
: ParseProgram<Ensure<ParseTypeDefinition<Tokens>[0], Token[]>, [ ...Children, Ensure<ParseTypeDefinition<Tokens>[1], BlockDefinitionNode> ]> | |
: Current extends TokenIdentifier<"fragment"> | |
? ParseFragmentDefinition<Tokens> extends ParserError<any> | |
? ParseFragmentDefinition<Tokens> | |
: ParseProgram<Ensure<ParseFragmentDefinition<Tokens>[0], Token[]>, [ ...Children, Ensure<ParseFragmentDefinition<Tokens>[1], BlockDefinitionNode> ]> | |
: ParserUnexpectedToken<Current, TokenIdentifier<"type"> | TokenIdentifier<"fragment">> | |
: ParserError<`unexpected end of file`> | |
type Parse<Tokens extends Token[]> = ParseProgram<Tokens> | |
// TS Converter Utility | |
type FindFragment<Children extends ASTNode[], Name extends string> = Children extends [infer First, ...infer Rest] | |
? First extends { name: Name, definitions: infer V } | |
? { name: Name, definitions: V } | |
: FindFragment<Ensure<Rest, ASTNode[]>, Name> | |
: never | |
type FindFragments<Children extends ASTNode[], Fragments extends FragmentSpreadNode[]> = { | |
[K in keyof Fragments]: FindFragment<Children, Fragments[K]["name"]>["definitions"] | |
} | |
type ConvertFragmentList<PNode extends ProgramNode, Fragments extends BlockTypeNode[], Already extends Record<string, any> = {}> = Fragments extends [ infer Frag, ...infer Rest ] | |
? ConvertFragmentList<PNode, Ensure<Rest, BlockTypeNode[]>, Already & ConvertBlockTypeNode<PNode, Ensure<Frag, BlockTypeNode>>> | |
: Already | |
// TS Converter | |
type ConvertIdentifierNode<PNode extends ProgramNode, Node extends IdentifierNode> = Node["name"] extends "String" | |
? string | |
: Node["name"] extends "Float" | |
? number | |
: Node["name"] extends "Int" | |
? number | |
: Node["name"] extends "Boolean" | |
? boolean | |
: unknown; | |
type ConvertBlockTypeField<PNode extends ProgramNode, Item extends BlockDefinitions["fields"][number]> = (Item extends BlockTypeNode | |
? ConvertBlockTypeNode<PNode, Item> | |
: Item extends IdentifierNode | |
? ConvertIdentifierNode<PNode, Item> | |
: Item extends ArrayModifierNode | |
? ConvertArrayModifierNode<PNode, Item> | |
: Item extends NullabilityModifierNode | |
? ConvertNullabilityModifierNode<PNode, Item> | |
: unknown) | null | |
type ConvertBlockTypeFields<PNode extends ProgramNode, Items extends BlockDefinitions["fields"]> = { | |
[K in keyof Items]: ConvertBlockTypeField<PNode, Items[K]> | |
} | |
type ConvertArrayModifierNode<PNode extends ProgramNode, Node extends ArrayModifierNode> = ConvertBlockTypeField<PNode, Node["node"]>[] | |
type ConvertNullabilityModifierNode<PNode extends ProgramNode, Node extends NullabilityModifierNode> = NonNullable<ConvertBlockTypeField<PNode, Node["node"]>> | |
type ConvertBlockTypeFrags<PNode extends ProgramNode, Frags extends BlockDefinitions["frags"]> = ConvertFragmentList<PNode, FindFragments<PNode["children"], Frags>> | |
type ConvertBlockTypeNode<PNode extends ProgramNode, Node extends BlockTypeNode> = ConvertBlockTypeFields<PNode, Node["definitions"]["fields"]> & ConvertBlockTypeFrags<PNode, Node["definitions"]["frags"]>; | |
type ConvertFragmentSpreadNode<PNode extends ProgramNode, Node extends FragmentSpreadNode> = | |
ConvertBlockTypeNode<PNode, FindFragment<PNode["children"], Node["name"]>["definitions"]> | |
type ConvertBlockDefinitionNode<PNode extends ProgramNode, Node extends BlockDefinitionNode> = ConvertBlockTypeNode<PNode, Node["definitions"]> | |
type Convert<Node extends ProgramNode> = Node["children"][0] extends BlockDefinitionNode | |
? ConvertBlockDefinitionNode<Node, Node["children"][0]> | |
: unknown | |
// Main types | |
export type Graphql<Source extends string> = Tokenize<Source> extends TokenizerError<any> | |
? Tokenize<Source> | |
: Parse<Ensure<Tokenize<Source>, Token[]>> extends ParserError<any> | |
? Parse<Ensure<Tokenize<Source>, Token[]>> | |
: Convert<Ensure<Parse<Ensure<Tokenize<Source>, Token[]>>, ProgramNode>> |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Usage: