❯ pnpx ts-node stitch.ts
.../Library/pnpm/store/v3/tmp/dlx-88146 | Progress: resolved 1, reused 0, downloaded .../Library/pnpm/store/v3/tmp/dlx-88146 | Progress: resolved 13, reused 13, downloade.../Library/pnpm/store/v3/tmp/dlx-88146 | +17 ++
.../Library/pnpm/store/v3/tmp/dlx-88146 | Progress: resolved 13, reused 13, downloadePackages are hard linked from the content-addressable store to the virtual store.
Content-addressable store is at: /Users/ac/Library/pnpm/store/v3
Virtual store is at: ../../../Library/pnpm/store/v3/tmp/dlx-88146/node_modules/.pnpm
.../Library/pnpm/store/v3/tmp/dlx-88146 | Progress: resolved 13, reused 13, downloade.../Library/pnpm/store/v3/tmp/dlx-88146 | Progress: resolved 17, reused 17, downloade WARN Issues with peer dependencies found
.
└─┬ ts-node
├── ✕ missing peer @types/node@"*"
└── ✕ missing peer typescript@>=2.7
Peer dependencies that should be installed:
@types/node@"*" typescript@>=2.7
.../Library/pnpm/store/v3/tmp/dlx-88146 | Progress: resolved 17, reused 17, downloade.../Library/pnpm/store/v3/tmp/dlx-88146 | Progress: resolved 17, reused 17, downloaded 0, added 17, done
schema {
query: Query
mutation: Mutation
subscription: Subscription
}
type Query {
"""
fetch data from the table: "backend_stuff"
"""
backend_stuff(
"""distinct select on columns"""
distinct_on: [backend_stuff_select_column!]
"""limit the number of rows returned"""
limit: Int
"""skip the first n rows. Use only with order_by"""
offset: Int
"""sort the rows by one or more columns"""
order_by: [backend_stuff_order_by!]
"""filter the rows returned"""
where: backend_stuff_bool_exp
): [backend_stuff!]!
"""
fetch aggregated fields from the table: "backend_stuff"
"""
backend_stuff_aggregate(
"""distinct select on columns"""
distinct_on: [backend_stuff_select_column!]
"""limit the number of rows returned"""
limit: Int
"""skip the first n rows. Use only with order_by"""
offset: Int
"""sort the rows by one or more columns"""
order_by: [backend_stuff_order_by!]
"""filter the rows returned"""
where: backend_stuff_bool_exp
): backend_stuff_aggregate!
"""fetch data from the table: "backend_stuff" using primary key columns"""
backend_stuff_by_pk(id: Int!): backend_stuff
}
"""mutation root"""
type Mutation {
"""
delete data from the table: "backend_stuff"
"""
delete_backend_stuff(
"""filter the rows which have to be deleted"""
where: backend_stuff_bool_exp!
): backend_stuff_mutation_response
"""
delete single row from the table: "backend_stuff"
"""
delete_backend_stuff_by_pk(id: Int!): backend_stuff
"""
insert data into the table: "backend_stuff"
"""
insert_backend_stuff(
"""the rows to be inserted"""
objects: [backend_stuff_insert_input!]!
"""on conflict condition"""
on_conflict: backend_stuff_on_conflict
): backend_stuff_mutation_response
"""
insert a single row into the table: "backend_stuff"
"""
insert_backend_stuff_one(
"""the row to be inserted"""
object: backend_stuff_insert_input!
"""on conflict condition"""
on_conflict: backend_stuff_on_conflict
): backend_stuff
"""
update data of the table: "backend_stuff"
"""
update_backend_stuff(
"""increments the numeric columns with given value of the filtered values"""
_inc: backend_stuff_inc_input
"""sets the columns of the filtered rows to the given values"""
_set: backend_stuff_set_input
"""filter the rows which have to be updated"""
where: backend_stuff_bool_exp!
): backend_stuff_mutation_response
"""
update single row of the table: "backend_stuff"
"""
update_backend_stuff_by_pk(
"""increments the numeric columns with given value of the filtered values"""
_inc: backend_stuff_inc_input
"""sets the columns of the filtered rows to the given values"""
_set: backend_stuff_set_input
pk_columns: backend_stuff_pk_columns_input!
): backend_stuff
}
type Subscription {
"""
fetch data from the table: "backend_stuff"
"""
backend_stuff(
"""distinct select on columns"""
distinct_on: [backend_stuff_select_column!]
"""limit the number of rows returned"""
limit: Int
"""skip the first n rows. Use only with order_by"""
offset: Int
"""sort the rows by one or more columns"""
order_by: [backend_stuff_order_by!]
"""filter the rows returned"""
where: backend_stuff_bool_exp
): [backend_stuff!]!
"""
fetch aggregated fields from the table: "backend_stuff"
"""
backend_stuff_aggregate(
"""distinct select on columns"""
distinct_on: [backend_stuff_select_column!]
"""limit the number of rows returned"""
limit: Int
"""skip the first n rows. Use only with order_by"""
offset: Int
"""sort the rows by one or more columns"""
order_by: [backend_stuff_order_by!]
"""filter the rows returned"""
where: backend_stuff_bool_exp
): backend_stuff_aggregate!
"""fetch data from the table: "backend_stuff" using primary key columns"""
backend_stuff_by_pk(id: Int!): backend_stuff
}
"""Boolean expression to compare columns of type "Int". All fields are combined with logical 'AND'."""
input Int_comparison_exp {
_eq: Int
_gt: Int
_gte: Int
_in: [Int!]
_is_null: Boolean
_lt: Int
_lte: Int
_neq: Int
_nin: [Int!]
}
"""
columns and relationships of "backend_stuff"
"""
type backend_stuff {
created_at: timestamptz!
id: Int!
}
"""
aggregated selection of "backend_stuff"
"""
type backend_stuff_aggregate {
aggregate: backend_stuff_aggregate_fields
nodes: [backend_stuff!]!
}
"""
aggregate fields of "backend_stuff"
"""
type backend_stuff_aggregate_fields {
avg: backend_stuff_avg_fields
count(columns: [backend_stuff_select_column!], distinct: Boolean): Int!
max: backend_stuff_max_fields
min: backend_stuff_min_fields
stddev: backend_stuff_stddev_fields
stddev_pop: backend_stuff_stddev_pop_fields
stddev_samp: backend_stuff_stddev_samp_fields
sum: backend_stuff_sum_fields
var_pop: backend_stuff_var_pop_fields
var_samp: backend_stuff_var_samp_fields
variance: backend_stuff_variance_fields
}
"""aggregate avg on columns"""
type backend_stuff_avg_fields {
id: Float
}
"""Boolean expression to filter rows from the table "backend_stuff". All fields are combined with a logical 'AND'."""
input backend_stuff_bool_exp {
_and: [backend_stuff_bool_exp!]
_not: backend_stuff_bool_exp
_or: [backend_stuff_bool_exp!]
created_at: timestamptz_comparison_exp
id: Int_comparison_exp
}
"""
unique or primary key constraints on table "backend_stuff"
"""
enum backend_stuff_constraint {
"""unique or primary key constraint"""
backend_stuff_pkey
}
"""
input type for incrementing numeric columns in table "backend_stuff"
"""
input backend_stuff_inc_input {
id: Int
}
"""
input type for inserting data into table "backend_stuff"
"""
input backend_stuff_insert_input {
created_at: timestamptz
id: Int
}
"""aggregate max on columns"""
type backend_stuff_max_fields {
created_at: timestamptz
id: Int
}
"""aggregate min on columns"""
type backend_stuff_min_fields {
created_at: timestamptz
id: Int
}
"""
response of any mutation on the table "backend_stuff"
"""
type backend_stuff_mutation_response {
"""number of rows affected by the mutation"""
affected_rows: Int!
"""data from the rows affected by the mutation"""
returning: [backend_stuff!]!
}
"""
on conflict condition type for table "backend_stuff"
"""
input backend_stuff_on_conflict {
constraint: backend_stuff_constraint!
update_columns: [backend_stuff_update_column!]! = []
where: backend_stuff_bool_exp
}
"""Ordering options when selecting data from "backend_stuff"."""
input backend_stuff_order_by {
created_at: order_by
id: order_by
}
"""primary key columns input for table: backend_stuff"""
input backend_stuff_pk_columns_input {
id: Int!
}
"""
select columns of table "backend_stuff"
"""
enum backend_stuff_select_column {
"""column name"""
created_at
"""column name"""
id
}
"""
input type for updating data in table "backend_stuff"
"""
input backend_stuff_set_input {
created_at: timestamptz
id: Int
}
"""aggregate stddev on columns"""
type backend_stuff_stddev_fields {
id: Float
}
"""aggregate stddev_pop on columns"""
type backend_stuff_stddev_pop_fields {
id: Float
}
"""aggregate stddev_samp on columns"""
type backend_stuff_stddev_samp_fields {
id: Float
}
"""aggregate sum on columns"""
type backend_stuff_sum_fields {
id: Int
}
"""
update columns of table "backend_stuff"
"""
enum backend_stuff_update_column {
"""column name"""
created_at
"""column name"""
id
}
"""aggregate var_pop on columns"""
type backend_stuff_var_pop_fields {
id: Float
}
"""aggregate var_samp on columns"""
type backend_stuff_var_samp_fields {
id: Float
}
"""aggregate variance on columns"""
type backend_stuff_variance_fields {
id: Float
}
"""column ordering options"""
enum order_by {
"""in ascending order, nulls last"""
asc
"""in ascending order, nulls first"""
asc_nulls_first
"""in ascending order, nulls last"""
asc_nulls_last
"""in descending order, nulls first"""
desc
"""in descending order, nulls first"""
desc_nulls_first
"""in descending order, nulls last"""
desc_nulls_last
}
scalar timestamptz
"""Boolean expression to compare columns of type "timestamptz". All fields are combined with logical 'AND'."""
input timestamptz_comparison_exp {
_eq: timestamptz
_gt: timestamptz
_gte: timestamptz
_in: [timestamptz!]
_is_null: Boolean
_lt: timestamptz
_lte: timestamptz
_neq: timestamptz
_nin: [timestamptz!]
}
Last active
September 19, 2022 19:33
-
-
Save andycmaj/f49ba94b85e48ea49f4482efb2f94ffe to your computer and use it in GitHub Desktop.
offline stitching
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import { stitchSchemas } from '@graphql-tools/stitch'; | |
import { introspectSchema } from '@graphql-tools/wrap'; | |
import { AsyncExecutor } from '@graphql-tools/utils'; | |
import { IncomingMessage } from 'http'; | |
import { GraphQLSchema, print } from 'graphql'; | |
import { fetch } from 'cross-fetch'; | |
import { printSchemaWithDirectives } from '@graphql-tools/utils'; | |
export interface IGraphlContext { | |
req: IncomingMessage; | |
} | |
export interface IPrevContext { | |
graphqlContext: IGraphlContext; | |
} | |
// Builds a remote schema executor function, | |
// customize any way that you need (auth, headers, etc). | |
// Expects to receive an object with "document" and "variable" params, | |
// and asynchronously returns a JSON response from the remote. | |
function makeRemoteExecutor(url: string) { | |
const executor: AsyncExecutor = async ({ document, variables, context }) => { | |
const query = typeof document === 'string' ? document : print(document); | |
const fetchResult = await fetch(url, { | |
method: 'POST', | |
headers: context.authHeader | |
? { | |
'x-hasura-admin-secret': context.authHeader, | |
'Content-Type': 'application/json', | |
} | |
: context.req.headers, | |
body: JSON.stringify({ query, variables }), | |
}); | |
return fetchResult.json(); | |
}; | |
return executor; | |
} | |
async function main() { | |
let remoteExecutableSchema: any = null; | |
try { | |
remoteExecutableSchema = makeRemoteExecutor( | |
'http://localhost:8081/v1/graphql', | |
); | |
} catch (e) { | |
console.log(e); | |
} | |
const adminContext = { | |
authHeader: 'myadminsecretkey', | |
}; | |
const result = await stitchSchemas({ | |
subschemas: [ | |
{ | |
// 1. Introspect a remote schema. Simple, but there are caveats: | |
// - Remote server must enable introspection. | |
// - Custom directives are not included in introspection. | |
schema: await introspectSchema(remoteExecutableSchema, adminContext), | |
executor: remoteExecutableSchema, | |
}, | |
// { | |
// // 2. Incorporate a locally-executable subschema. | |
// // No need for a remote executor! | |
// // Note that that the gateway still proxies through | |
// // to this same underlying executable schema instance. | |
// schema, | |
// }, | |
], | |
}); | |
console.log(printSchemaWithDirectives(result)); | |
} | |
main(); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment