Created
April 9, 2020 12:17
-
-
Save natac13/d35ef1f23a0769156c58a21198595a95 to your computer and use it in GitHub Desktop.
Mongoose graphql files
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import R from 'ramda' | |
| import mongoose from 'mongoose' | |
| export function newObjectFromArgs(args, model, options) { | |
| const fields = R.compose( | |
| R.omit([]), | |
| R.pathOr({}, ['schema', 'paths']) | |
| )(model) | |
| const newObject = R.reduce((acc, k) => { | |
| const field = fields[k] | |
| // console.log('newObject Field', field) | |
| if (R.isNil(field)) { | |
| return { ...acc } | |
| } | |
| if (field.instance === 'Date') { | |
| return { ...acc, [k]: args[k] } | |
| } else if (field.instance === 'Array') { | |
| // console.log('array') | |
| return { | |
| ...acc, | |
| [k]: R.map((item) => newObjectFromArgs(item, field, options))(args[k]) | |
| } | |
| } else if (field.instance === 'ObjectId') { | |
| return { ...acc, [k]: mongoose.Types.ObjectId(args[k]) } | |
| } else if (field.schema) { | |
| return { | |
| ...acc, | |
| [k]: newObjectFromArgs(args[k], field, options) | |
| } | |
| } else { | |
| return { | |
| ...acc, | |
| [k]: args[k] | |
| } | |
| } | |
| }, {})(R.keys(args)) | |
| return newObject | |
| } |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import R from 'ramda' | |
| // Goes way to deep into recursion. | |
| // function getSelections(fieldNode, fragments, depth = 0) { | |
| // return R.reduce((acc, sel) => { | |
| // depth = depth + 1; | |
| // // check if the sel.kind is a fragmentSpread | |
| // // if so send the fragment fieldNode to the getSelections recursive fn. | |
| // if (sel.kind === 'FragmentSpread') { | |
| // // console.log('depth', depth); | |
| // return { | |
| // ...acc, | |
| // ...getSelections(fragments[sel.name.value], fragments), | |
| // }; | |
| // } | |
| // // console.log('depth', depth); | |
| // console.log('depth', depth); | |
| // return { | |
| // ...acc, | |
| // [sel.name.value]: R.isNil(sel.selectionSet) | |
| // ? 1 | |
| // : { ...getSelections(sel, fragments) }, | |
| // }; | |
| // }, {})(R.pathOr([], ['selectionSet', 'selections'])(fieldNode)); | |
| // } | |
| // function getSelections(fieldNode, fragments, res = {}) { | |
| // const result = { ...res }; | |
| // for (const sel of R.pathOr([], ['selectionSet', 'selections'])(fieldNode)) { | |
| // if (sel.kind === 'FragmentSpread') { | |
| // getSelections(fragments[sel.name.value], fragments, result); | |
| // } else { | |
| // result[sel.name.value] = R.isNil(sel.selectionSet) | |
| // ? 1 | |
| // : { ...getSelections(sel, fragments) }; | |
| // } | |
| // } | |
| // } | |
| function getSelections(fieldNode, fragments, result = {}) { | |
| const selections = R.pathOr([], ['selectionSet', 'selections'])(fieldNode) | |
| // Side effects with res/result param. To recursively generate the required projection. | |
| R.forEach((sel) => { | |
| if (sel.kind === 'FragmentSpread') { | |
| getSelections(fragments[sel.name.value], fragments, result) | |
| } else { | |
| // eslint-disable-next-line no-param-reassign | |
| result[sel.name.value] = R.isNil(sel.selectionSet) | |
| ? 1 | |
| : { ...getSelections(sel, fragments) } | |
| } | |
| })(selections) | |
| return result | |
| } | |
| export function getNestedQueryInfo(ast, queryName) { | |
| let fieldNode = ast.fieldNodes | |
| ? ast.fieldNodes.find((fn) => R.equals(fn.kind, 'Field')) | |
| : ast | |
| if (queryName) { | |
| // eslint-disable-next-line | |
| for (let path of queryName.split('.')) { | |
| if (!fieldNode) { | |
| break | |
| } | |
| // console.log(JSON.stringify(ast.fragments, null, 3), queryName); | |
| fieldNode = fieldNode.selectionSet.selections.find( | |
| (fn) => | |
| R.equals(fn.kind, 'Field') && | |
| fn.name && | |
| R.pathEq(['name', 'value'], path)(fn) | |
| ) | |
| // console.log('Haaaaaaaaaaaaaaaaaaaaaaaaaa'); | |
| // console.log('fieldNode'); | |
| // console.log(fieldNode); | |
| } | |
| } | |
| // console.log('is there a fieldNode', fieldNode); | |
| if (fieldNode) { | |
| // console.log(getSelections(fieldNode)); | |
| // console.log('hehre1`324'); | |
| // console.log(getSelections(fieldNode, ast.fragments)); | |
| return { | |
| requestMap: getSelections(fieldNode, ast.fragments), | |
| ast: fieldNode | |
| } | |
| } | |
| return { | |
| requestMap: {}, | |
| ast: null | |
| } | |
| } | |
| export function parseRequestedFields(ast, queryName, force) { | |
| const { requestMap } = getNestedQueryInfo(ast, queryName) | |
| if (force) { | |
| const forced = R.reduce((acc, field) => ({ ...acc, [field]: 1 }), {})(force) | |
| return { | |
| ...requestMap, | |
| ...forced | |
| } | |
| } | |
| // console.log('tttttttttttttttttttt', requestMap); | |
| return { ...requestMap } | |
| } | |
| function getProjectionObject( | |
| requestMap, | |
| model, | |
| args = {}, | |
| extrasPackets, | |
| currentObject = '' | |
| // increment = 0 | |
| ) { | |
| if (args && extrasPackets) { | |
| // | |
| } | |
| // console.log('requestMap', requestMap); | |
| // console.log('model', model.schema) | |
| const result = R.reduce((acc, field) => { | |
| const entry = model.schema.paths[field] | |
| // console.log('$$$$$$$$$$$$$$$$$$$$'); | |
| // console.log(field, JSON.stringify(model.schema, null, 6), entry); | |
| // console.log(model.schema.virtuals.fullName.getters[0]()); | |
| if (!entry) { | |
| // const reference = R.pathOr(null, ['schema', 'tree', `${field}Id`, 'ref'])( | |
| // model | |
| // ) | |
| // if (!R.isNil(reference)) { | |
| // console.log('no entry', field) | |
| // console.log(acc) | |
| // const fkField = `${field}` | |
| // const temp = { | |
| // ...acc, | |
| // [fkField]: currentObject | |
| // ? `${currentObject}.${fkField}` | |
| // : `$${fkField}` | |
| // } | |
| // console.log(temp) | |
| // } | |
| // console.log(model.schema.tree) | |
| // console.log(isReference) | |
| return acc | |
| } | |
| const value = requestMap[field] | |
| if (value === 1 && !currentObject) { | |
| return { | |
| ...acc, | |
| [field]: 1 | |
| } | |
| } | |
| if (value === 1 && currentObject) { | |
| return { | |
| ...acc, | |
| [`${currentObject}.${field}`]: 1 | |
| } | |
| } | |
| if (typeof value === 'object') { | |
| // console.log(entry); | |
| // console.log('234', field, value); | |
| return { | |
| ...acc, | |
| ...getProjectionObject(value, entry, {}, null, field) | |
| } | |
| } | |
| return acc | |
| }, {})(Object.keys(requestMap)) | |
| // let allRelationships = objectMetaData.relationships || {}; | |
| // let result = [...requestMap.entries()].reduce( | |
| // (hash, [field, selectionEntry]) => { | |
| // let entry = objectMetaData.fields[field]; | |
| // if (!entry) { | |
| // if (allRelationships[field]) { | |
| // let fkField = allRelationships[field].fkField; | |
| // hash[fkField] = currentObject | |
| // ? currentObject + '.' + fkField | |
| // : '$' + fkField; | |
| // } | |
| // return hash; | |
| // } | |
| // if (selectionEntry === true) { | |
| // if (entry.__isDate) { | |
| // let format = args[field + '_format'] || entry.format; | |
| // hash[field] = { | |
| // $dateToString: { | |
| // format, | |
| // date: currentObject ? currentObject + '.' + field : '$' + field, | |
| // }, | |
| // }; | |
| // } else { | |
| // hash[field] = currentObject | |
| // ? currentObject + '.' + field | |
| // : '$' + field; | |
| // } | |
| // } else if (entry.__isArray) { | |
| // let currentObjName = 'item' + (increment || ''); | |
| // hash[field] = { | |
| // $map: { | |
| // input: currentObject ? currentObject + '.' + field : '$' + field, | |
| // as: currentObjName, | |
| // in: getProjectionObject( | |
| // selectionEntry, | |
| // entry.type, | |
| // {}, | |
| // null, | |
| // '$$' + currentObjName, | |
| // increment + 1 | |
| // ), | |
| // }, | |
| // }; | |
| // } else { | |
| // hash[field] = getProjectionObject( | |
| // selectionEntry, | |
| // entry.type, | |
| // {}, | |
| // null, | |
| // currentObject ? currentObject + '.' + field : '$' + field, | |
| // increment | |
| // ); | |
| // } | |
| // return hash; | |
| // }, | |
| // {} | |
| // ); | |
| // console.log('finalResutl', result); | |
| return result | |
| } | |
| export function getMongoProjection(requestMap, model, args, extrasPackets) { | |
| return getProjectionObject(requestMap, model, args, extrasPackets) | |
| } | |
| export function parseRequestedHierarchy( | |
| ast, | |
| requestMap, | |
| model, | |
| args = {} | |
| // anchor | |
| ) { | |
| const extrasPackets = {} | |
| // console.log('requestMap', requestMap); | |
| // if (type.relationships) { | |
| // Object.keys(type.relationships).forEach((name) => { | |
| // let relationship = type.relationships[name]; | |
| // let { ast: astNew, requestMap } = getNestedQueryInfo( | |
| // ast, | |
| // anchor === 'string' ? anchor + '.' + name : name | |
| // ); | |
| // if (requestMap.size) { | |
| // extrasPackets.set( | |
| // name, | |
| // parseRequestedHierarchy(astNew, requestMap, relationship.type) | |
| // ); | |
| // } | |
| // }); | |
| // } | |
| return { | |
| extrasPackets, | |
| requestMap, | |
| $project: R.isEmpty(requestMap) | |
| ? null | |
| : getMongoProjection(requestMap, model, args, extrasPackets) | |
| } | |
| } |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| /* eslint-disable no-param-reassign,no-return-assign */ | |
| import R from 'ramda' | |
| import mongoose from 'mongoose' | |
| import escapeStringRegexp from 'escape-string-regexp' | |
| import { parseRequestedFields, parseRequestedHierarchy } from './projectHelpers' | |
| const numberArrayOperations = new Set(['lt', 'lte', 'gt', 'gte']) | |
| const numberArrayEmOperations = new Set(['emlt', 'emlte', 'emgt', 'emgte']) | |
| const stringOps = new Set(['contains', 'startsWith', 'endsWith', 'regex']) | |
| const stringArrayOps = new Set([ | |
| 'textContains', | |
| 'startsWith', | |
| 'endsWith', | |
| 'regex' | |
| ]) | |
| // ensure checks if fieldName is on hash; if so, set the $size field to the currentArgs[k] | |
| function ensure(hash, fieldName, cb = () => {}) { | |
| if (!hash[fieldName]) { | |
| hash[fieldName] = {} | |
| } | |
| cb() | |
| } | |
| function ensureArr(hash, fieldName, cb = () => {}) { | |
| if (!hash[fieldName]) { | |
| hash[fieldName] = [] | |
| } | |
| cb() | |
| } | |
| // the following fields will be removed from fields below | |
| const protectedFields = ['password'] | |
| /* using for recursion */ | |
| // fillMongoFiltersObject will create the $match stage from the mongoose model and the incoming args from the graphql query. | |
| export const fillMongoFiltersObject = (args, model, hash = {}, prefix = '') => { | |
| // const currentHash = { ...hash }; | |
| const currentArgs = { ...args } | |
| if (R.isEmpty(currentArgs)) { | |
| return hash | |
| } | |
| const fields = R.compose( | |
| R.omit(protectedFields), | |
| R.pathOr({}, ['schema', 'paths']) | |
| )(model) | |
| // console.log(666, fields, currentArgs); | |
| Object.keys(currentArgs).forEach((k) => { | |
| if (k === 'OR' && currentArgs.OR != null) { | |
| // console.log('Wrong Place'); | |
| if (!Array.isArray(currentArgs.OR)) { | |
| throw new Error(`Non array passed to OR - received ${hash.OR}`) | |
| } | |
| hash.$or = currentArgs.OR.map((packetArgs) => { | |
| // console.log(packetArgs); | |
| return fillMongoFiltersObject(packetArgs, model, undefined, prefix) | |
| }) | |
| // console.log(hash); | |
| } else if (fields[k]) { | |
| // console.log('fields[k].instance', fields[k].instance); | |
| if (fields[k].instance === 'Date') { | |
| if (currentArgs[k] === null) { | |
| hash[k] = null | |
| } else { | |
| currentArgs[k] = new Date(currentArgs[k]) | |
| } | |
| } else if (fields[k].instance === 'Array') { | |
| // console.log('esl;', fields[k].path); | |
| // console.log('here', currentArgs[k]); | |
| /* const a = Object.keys(currentArgs[k])[0]; | |
| const [f, qop] = R.split('_')(a); | |
| if (qop.indexOf('count') >= 0) { | |
| console.log(f, qop); | |
| console.log(f, qop); | |
| const operator = `$${R.toLower(R.drop(5)(qop))}`; | |
| console.log('operator', operator); | |
| const x = { | |
| [operator]: [ | |
| { $size: `$${fields[k].path}.${f}` }, | |
| currentArgs[k][a], | |
| ], | |
| }; | |
| hash.$elemMatch = x; | |
| } else */ | |
| if (currentArgs[k] === null) { | |
| hash[prefix + k] = null | |
| } else { | |
| const fullText = Object.keys(currentArgs[k])[0] | |
| if (fullText === 'courseId_noneEq') { | |
| const rightText = R.split('_')(fullText)[0] | |
| // console.log( | |
| // currentArgs[k], | |
| // fullText, | |
| // rightText, | |
| // currentArgs[k][fullText] | |
| // ) | |
| const elMatch = fillMongoFiltersObject( | |
| { [rightText]: currentArgs[k][fullText] }, | |
| fields[k] | |
| ) | |
| hash[prefix + k] = { | |
| $not: { | |
| $elemMatch: elMatch | |
| } | |
| } | |
| } else { | |
| const elMatch = fillMongoFiltersObject(currentArgs[k], fields[k]) | |
| hash[prefix + k] = { | |
| $elemMatch: elMatch | |
| } | |
| } | |
| } | |
| return | |
| } else if (fields[k].instance === 'ObjectID') { | |
| if (currentArgs[k] === null) { | |
| hash[`${prefix}${k}`] = null | |
| } else { | |
| // override the value in currentArg to ensure it is an ObjectId. | |
| // this is added to the hash at the end of the else if (fields[k]) | |
| currentArgs[k] = mongoose.Types.ObjectId(currentArgs[k]) | |
| } | |
| } else if (fields[k].schema) { | |
| if (currentArgs[k] === null) { | |
| hash[`${prefix}${k}`] = null | |
| } else { | |
| fillMongoFiltersObject( | |
| currentArgs[k], | |
| fields[k], | |
| hash, | |
| `${prefix}${k}.` | |
| ) | |
| return | |
| } | |
| } | |
| // console.log('$$$$$$$$$$$$$$$$$$$'); | |
| // console.log('currentArgs[k]', currentArgs[k], `${prefix}${k}`, hash); | |
| // console.log(hash); | |
| hash[`${prefix}${k}`] = currentArgs[k] | |
| /* END else if (field[k]) */ | |
| } else if (k.indexOf('_') >= 0) { | |
| // console.log('hello', k.indexOf('_') >= 0); | |
| // meaning there is a _ present in the query param. ie term_lt | |
| const pieces = R.split('_')(k) | |
| const queryOperation = R.last(pieces) | |
| let fieldName = pieces.slice(0, pieces.length - 1).join('_') | |
| if (fieldName === 'id') { | |
| fieldName = '_' + fieldName | |
| } | |
| // console.log(123, pieces, queryOperation, fieldName); | |
| const field = fields[fieldName] | |
| fieldName = prefix + fieldName | |
| // console.log(456, /* field, */ fieldName, currentArgs[k], queryOperation); | |
| if (R.isNil(currentArgs[k]) && queryOperation !== 'ne') { | |
| return | |
| } | |
| if (queryOperation === 'count') { | |
| hash = R.assocPath([fieldName, '$size'], currentArgs[k])(hash) | |
| /* The LT, GT, LTE, GTE count queries can only be done on arrays on the main document: first level */ | |
| } else if (queryOperation === 'countLt') { | |
| hash = R.assocPath( | |
| ['$expr', '$lt'], | |
| [{ $size: `$${fieldName}` }, currentArgs[k]] | |
| )(hash) | |
| // return R.assocPath( | |
| // ['$expr', '$lt'], | |
| // [{ $size: `$${fieldName}` }, currentArgs[k]] | |
| // )(hash); | |
| } else if (queryOperation === 'countLte') { | |
| // ensure( | |
| // hash, | |
| // '$expr', | |
| // () => (hash.$expr.$lte = [{ $size: `$${fieldName}` }, currentArgs[k]]) | |
| // ); | |
| hash = R.assocPath( | |
| ['$expr', '$lte'], | |
| [{ $size: `$${fieldName}` }, currentArgs[k]] | |
| )(hash) | |
| } else if (queryOperation === 'countGt') { | |
| // ensure( | |
| // hash, | |
| // '$expr', | |
| // () => (hash.$expr.$gt = [{ $size: `$${fieldName}` }, currentArgs[k]]) | |
| // ); | |
| hash = R.assocPath( | |
| ['$expr', '$gt'], | |
| [{ $size: `$${fieldName}` }, currentArgs[k]] | |
| )(hash) | |
| } else if (queryOperation === 'countGte') { | |
| // ensure( | |
| // hash, | |
| // '$expr', | |
| // () => (hash.$expr.$gte = [{ $size: `$${fieldName}` }, currentArgs[k]]) | |
| // ); | |
| hash = R.assocPath( | |
| ['$expr', '$gte'], | |
| [{ $size: `$${fieldName}` }, currentArgs[k]] | |
| )(hash) | |
| } else if (queryOperation === 'in') { | |
| if (field.instance === 'Array') { | |
| if (field.caster.instance === 'Date') { | |
| // convert the array to an array of dates | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => | |
| (hash[fieldName].$in = currentArgs[k].map((arr) => | |
| arr.map((v) => (R.isNil(v) ? null : new Date(v))) | |
| )) | |
| ) | |
| } else if (field.caster.instance === 'ObjectID') { | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => | |
| (hash[fieldName].$in = currentArgs[k].map((arr) => | |
| arr.map((v) => | |
| R.isNil(v) ? null : mongoose.Types.ObjectId(v) | |
| ) | |
| )) | |
| ) | |
| } | |
| } else if (field.instance === 'Date') { | |
| // console.log('hehrerhrherehre999999999999999999'); | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => | |
| (hash[fieldName].$in = R.map((v) => | |
| R.isNil(v) ? null : new Date(v) | |
| )(currentArgs[k])) | |
| ) | |
| } else if (field.instance === 'ObjectID') { | |
| // makes all the items in the array to check with $in to ObjectIds. | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => | |
| (hash[fieldName].$in = R.map((v) => | |
| R.isNil(v) ? null : mongoose.Types.ObjectId(v) | |
| )(currentArgs[k])) | |
| ) | |
| } else { | |
| ensure(hash, fieldName, () => (hash[fieldName].$in = args[k])) | |
| } | |
| } else if (queryOperation === 'nin') { | |
| if (field.instance === 'Array') { | |
| if (field.caster.instance === 'Date') { | |
| // convert the array to an array of dates | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => | |
| (hash[fieldName].$nin = currentArgs[k].map((arr) => | |
| arr.map((v) => (R.isNil(v) ? null : new Date(v))) | |
| )) | |
| ) | |
| } else if (field.caster.instance === 'ObjectID') { | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => | |
| (hash[fieldName].$nin = currentArgs[k].map((arr) => | |
| arr.map((v) => | |
| R.isNil(v) ? null : mongoose.Types.ObjectId(v) | |
| ) | |
| )) | |
| ) | |
| } | |
| } else if (field.instance === 'Date') { | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => | |
| (hash[fieldName].$nin = R.map((v) => | |
| R.isNil(v) ? null : new Date(v) | |
| )(currentArgs[k])) | |
| ) | |
| } else if (field.instance === 'ObjectID') { | |
| // makes all the items in the array to check with $in to ObjectIds. | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => | |
| (hash[fieldName].$nin = R.map((v) => | |
| R.isNil(v) ? null : mongoose.Types.ObjectId(v) | |
| )(currentArgs[k])) | |
| ) | |
| } else { | |
| ensure(hash, fieldName, () => (hash[fieldName].$nin = currentArgs[k])) | |
| } | |
| } else if (queryOperation === 'ne') { | |
| ensure(hash, fieldName, () => (hash[fieldName].$ne = currentArgs[k])) | |
| } /* else { */ | |
| // console.log('1111111111111111111111111111111', field.instance); | |
| if (field.instance === 'String') { | |
| if ( | |
| stringOps.has(queryOperation) && | |
| R.pathOr(false, [fieldName, '$regex'])(hash) | |
| ) { | |
| throw new Error( | |
| 'Only one of startsWith, endsWith, contains, and regex can be specified for a given string field. Combine all of these filters into a single regex' | |
| ) | |
| } | |
| if (queryOperation === 'contains') { | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => | |
| (hash[fieldName].$regex = new RegExp( | |
| escapeStringRegexp(args[k]), | |
| 'i' | |
| )) | |
| ) | |
| } else if (queryOperation === 'startsWith') { | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => | |
| (hash[fieldName].$regex = new RegExp( | |
| `^${escapeStringRegexp(args[k])}`, | |
| 'i' | |
| )) | |
| ) | |
| } else if (queryOperation === 'endsWith') { | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => | |
| (hash[fieldName].$regex = new RegExp( | |
| `${escapeStringRegexp(args[k])}$`, | |
| 'i' | |
| )) | |
| ) | |
| } else if (queryOperation === 'regex') { | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => (hash[fieldName].$regex = new RegExp(args[k], 'i')) | |
| ) | |
| } | |
| } else if (field.instance === 'Array') { | |
| if ( | |
| stringArrayOps.has(queryOperation) && | |
| R.pathOr(false, [fieldName, '$regex'])(hash) | |
| ) { | |
| throw new Error( | |
| 'Only one of startsWith, endsWith, textContains, and regex can be specified for a given string field. Combine all of these filters into a single regex' | |
| ) | |
| } | |
| if (queryOperation === 'contains' || queryOperation === 'containsAny') { | |
| // console.log(1, hash); | |
| ensure(hash, fieldName) | |
| // console.log(2, hash); | |
| ensureArr(hash[fieldName], '$in') | |
| // console.log(3, hash, field.instance); | |
| if (queryOperation === 'contains') { | |
| if (field.caster.instance === 'Date') { | |
| hash[fieldName].$in.push(new Date(currentArgs[k])) | |
| } else if (field.caster.instance === 'ObjectID') { | |
| hash[fieldName].$in.push(mongoose.Types.ObjectId(currentArgs[k])) | |
| } else { | |
| hash[fieldName].$in.push(currentArgs[k]) | |
| } | |
| } else { | |
| // console.log(2222222222222222222222222222222222, currentArgs[k]); | |
| hash[fieldName].$in.push( | |
| ...R.map((item) => { | |
| if (field.caster.instance === 'Date') { | |
| return new Date(item) | |
| } | |
| if (field.caster.instance === 'ObjectID') { | |
| return mongoose.Types.ObjectId(item) | |
| } | |
| return item | |
| })(currentArgs[k]) | |
| ) | |
| } | |
| } else if (queryOperation === 'textContains') { | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => | |
| (hash[fieldName].$regex = new RegExp( | |
| escapeStringRegexp(currentArgs[k]), | |
| 'i' | |
| )) | |
| ) | |
| } else if (queryOperation === 'startsWith') { | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => | |
| (hash[fieldName].$regex = new RegExp( | |
| `^${escapeStringRegexp(currentArgs[k])}`, | |
| 'i' | |
| )) | |
| ) | |
| } else if (queryOperation === 'endsWith') { | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => | |
| (hash[fieldName].$regex = new RegExp( | |
| `${escapeStringRegexp(currentArgs[k])}$`, | |
| 'i' | |
| )) | |
| ) | |
| } else if (queryOperation === 'regex') { | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => (hash[fieldName].$regex = new RegExp(currentArgs[k], 'i')) | |
| ) | |
| } else if (numberArrayOperations.has(queryOperation)) { | |
| if (queryOperation === 'lt') { | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => (hash[fieldName].$lt = currentArgs[k]) | |
| ) | |
| } else if (queryOperation === 'lte') { | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => (hash[fieldName].$lte = currentArgs[k]) | |
| ) | |
| } else if (queryOperation === 'gt') { | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => (hash[fieldName].$gt = currentArgs[k]) | |
| ) | |
| } else if (queryOperation === 'gte') { | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => (hash[fieldName].$gte = currentArgs[k]) | |
| ) | |
| } | |
| } else if (numberArrayEmOperations.has(queryOperation)) { | |
| ensure(hash, fieldName) | |
| ensure(hash[fieldName], '$elemMatch') | |
| if (queryOperation === 'emlt') { | |
| hash[fieldName].$elemMatch.$lt = currentArgs[k] | |
| } else if (queryOperation === 'emlte') { | |
| hash[fieldName].$elemMatch.$lte = currentArgs[k] | |
| } else if (queryOperation === 'emgt') { | |
| hash[fieldName].$elemMatch.$gt = currentArgs[k] | |
| } else if (queryOperation === 'emgte') { | |
| hash[fieldName].$elemMatch.$gte = currentArgs[k] | |
| } | |
| } | |
| } else if (field.instance === 'Number') { | |
| if (queryOperation === 'lt') { | |
| ensure(hash, fieldName, () => (hash[fieldName].$lt = currentArgs[k])) | |
| } else if (queryOperation === 'lte') { | |
| ensure(hash, fieldName, () => (hash[fieldName].$lte = currentArgs[k])) | |
| } else if (queryOperation === 'gt') { | |
| ensure(hash, fieldName, () => (hash[fieldName].$gt = currentArgs[k])) | |
| } else if (queryOperation === 'gte') { | |
| ensure(hash, fieldName, () => (hash[fieldName].$gte = currentArgs[k])) | |
| } | |
| } else if (field.instance === 'Date') { | |
| if (queryOperation === 'lt') { | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => (hash[fieldName].$lt = new Date(currentArgs[k])) | |
| ) | |
| } else if (queryOperation === 'lte') { | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => (hash[fieldName].$lte = new Date(currentArgs[k])) | |
| ) | |
| } else if (queryOperation === 'gt') { | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => (hash[fieldName].$gt = new Date(currentArgs[k])) | |
| ) | |
| } else if (queryOperation === 'gte') { | |
| ensure( | |
| hash, | |
| fieldName, | |
| () => (hash[fieldName].$gte = new Date(currentArgs[k])) | |
| ) | |
| } | |
| } | |
| // } | |
| } | |
| }) | |
| // console.log('999999999999999999999999', hash); | |
| return hash | |
| } | |
| /* eslint-enable no-param-reassign */ | |
| export const getMongoFilters = (args, model) => { | |
| return fillMongoFiltersObject(args, model) | |
| } | |
| function parseSort(sort, hash = {}, prefix = '') { | |
| const isObject = typeof Object.values(sort)[0] === 'object' | |
| if (isObject) { | |
| // console.log('isObject', Object.values(sort)[0]); | |
| return parseSort( | |
| Object.values(sort)[0], | |
| hash, | |
| `${prefix}${Object.keys(sort)[0]}.` | |
| ) | |
| } | |
| // console.log('not object', sort); | |
| return { | |
| ...hash, | |
| [`${prefix}${Object.keys(sort)[0]}`]: Object.values(sort)[0] | |
| } | |
| } | |
| export const deconstructGraphqlQLQuery = ( | |
| args, | |
| ast, | |
| model, | |
| queryName, | |
| options = {} | |
| ) => { | |
| const safeArgs = { ...JSON.parse(JSON.stringify(args)) } | |
| const $match = getMongoFilters(safeArgs, model) | |
| const requestMap = parseRequestedFields(ast, queryName, options.force || []) | |
| const metadataRequested = parseRequestedFields(ast, 'Meta') | |
| const { $project, extrasPackets } = parseRequestedHierarchy( | |
| ast, | |
| requestMap, | |
| model, | |
| safeArgs, | |
| queryName | |
| ) | |
| // console.log('requestMap', requestMap); | |
| // console.log('$match', $match); | |
| // console.log('$project', $project, extrasPackets); | |
| const sort = safeArgs.SORT | |
| const sorts = safeArgs.SORTS | |
| let $sort = null | |
| let $skip = null | |
| let $limit = null | |
| const aggregationPipeline = [] | |
| // console.log('Run Aggregate pipeline'); | |
| // console.log($match, { $match }); | |
| aggregationPipeline.push({ $match }) | |
| if (sort) { | |
| // console.log('hehre', sort); | |
| // parseSort(sort); | |
| $sort = parseSort(sort) | |
| aggregationPipeline.push({ $sort }) | |
| } else if (sorts) { | |
| $sort = {} | |
| sorts.forEach((packet) => { | |
| Object.assign($sort, packet) | |
| }) | |
| aggregationPipeline.push({ $sort }) | |
| } | |
| if (args.LIMIT != null || args.SKIP != null) { | |
| $skip = args.SKIP | |
| $limit = args.LIMIT | |
| } else if (args.PAGE != null && args.PAGE_SIZE != null) { | |
| $skip = (args.PAGE - 1) * args.PAGE_SIZE | |
| $limit = args.PAGE_SIZE | |
| } | |
| if ($skip) { | |
| aggregationPipeline.push({ $skip }) | |
| } | |
| if ($limit) { | |
| aggregationPipeline.push({ $limit }) | |
| } | |
| // addRelationshipLookups(aggregationPipeline, ast, queryName, model, $project) | |
| // console.log('No do use aggregation pipeline', options); | |
| // console.log( | |
| // 'therefore $match is the query part and $project is the projection' | |
| // ); | |
| // must be last | |
| if ($project) { | |
| aggregationPipeline.push({ $project }) | |
| } | |
| return { | |
| $match, | |
| $sort, | |
| $skip, | |
| $limit, | |
| $project, | |
| aggregationPipeline, | |
| metadataRequested, | |
| extrasPackets | |
| } | |
| } | |
| // function addRelationshipLookups( | |
| // aggregationPipeline, | |
| // ast, | |
| // rootQuery, | |
| // model, | |
| // $project | |
| // ) { | |
| // let { ast: currentAst } = getNestedQueryInfo(ast, rootQuery) | |
| // if (!currentAst) { | |
| // return | |
| // } | |
| // let originalAst = ast | |
| // console.log(currentAst, originalAst) | |
| // let addedFields = new Set([]); | |
| // Object.keys(TypeMetadata.relationships).forEach((relationshipName) => { | |
| // let relationship = TypeMetadata.relationships[relationshipName]; | |
| // let foreignKeyType = TypeMetadata.fields[relationship.fkField]; | |
| // let fkField = relationship.fkField; | |
| // let keyField = relationship.keyField; | |
| // let keyType = relationship.type.fields[relationship.keyField]; | |
| // let keyTypeIsArray = /Array/g.test(keyType); | |
| // let foreignKeyIsArray = | |
| // foreignKeyType == StringArrayType || foreignKeyType == MongoIdArrayType; | |
| // let destinationKeyType = relationship.type.fields[relationship.keyField]; | |
| // let receivingKeyIsArray = /Array$/.test(destinationKeyType); | |
| // let relationshipsToLoop = [{ relationshipName, meta: false }]; | |
| // if (relationship.__isArray) { | |
| // relationshipsToLoop.push({ | |
| // relationshipName: relationshipName + 'Meta', | |
| // meta: true, | |
| // }); | |
| // } | |
| // for (let { relationshipName, meta } of relationshipsToLoop) { | |
| // let ast = getRelationshipAst( | |
| // currentAst, | |
| // relationshipName, | |
| // relationship.type | |
| // ); | |
| // if (!ast) continue; | |
| // let relationshipArgs = parseGraphqlArguments(ast.arguments); | |
| // Object.assign(relationshipArgs, relationshipArgs.FILTER || {}); | |
| // delete relationshipArgs.FILTER; | |
| // let { | |
| // aggregationPipeline: pipelineValues, | |
| // $match, | |
| // } = decontructGraphqlQuery( | |
| // relationshipArgs, | |
| // currentAst, | |
| // relationship.type, | |
| // relationshipName | |
| // ); | |
| // let canUseSideQuery = | |
| // !meta && | |
| // !pipelineValues.find( | |
| // (entry) => entry.$skip != null || entry.$limit != null | |
| // ); | |
| // if (canUseSideQuery) { | |
| // if ( | |
| // (!settings.getPreferLookup() && !relationshipArgs.PREFER_LOOKUP) || | |
| // relationshipArgs.DONT_PREFER_LOOKUP | |
| // ) { | |
| // continue; | |
| // } | |
| // } | |
| // let fkNameToUse = fkField.replace(/^_/, 'x_'); | |
| // let asString = false; | |
| // let asObjectId = false; | |
| // let asObjectIdArray = false; | |
| // let asStringIdArray = false; | |
| // if ( | |
| // !foreignKeyIsArray && | |
| // foreignKeyType != StringType && | |
| // (keyType == StringType || keyType == StringArrayType) | |
| // ) { | |
| // fkNameToUse += '___as___string'; | |
| // asString = true; | |
| // } else if ( | |
| // !foreignKeyIsArray && | |
| // foreignKeyType != MongoIdType && | |
| // (keyType == MongoIdType || keyType == MongoIdArrayType) | |
| // ) { | |
| // fkNameToUse += '___as___objectId'; | |
| // asObjectId = true; | |
| // } else if ( | |
| // foreignKeyIsArray && | |
| // foreignKeyType != MongoIdArrayType && | |
| // (keyType == MongoIdType || keyType == MongoIdArrayType) | |
| // ) { | |
| // asObjectIdArray = true; | |
| // fkNameToUse += '__as__objectIdArray'; | |
| // } else if ( | |
| // foreignKeyIsArray && | |
| // foreignKeyType != StringArrayType && | |
| // (keyType == StringType || keyType == StringArrayType) | |
| // ) { | |
| // asStringIdArray = true; | |
| // fkNameToUse += '__as__stringIdArray'; | |
| // } | |
| // if (!addedFields.has(fkNameToUse)) { | |
| // addedFields.add(fkNameToUse); | |
| // if (asString) { | |
| // aggregationPipeline.push({ | |
| // $addFields: { [fkNameToUse]: { $toString: '$' + fkField } }, | |
| // }); | |
| // } else if (asObjectId) { | |
| // aggregationPipeline.push({ | |
| // $addFields: { [fkNameToUse]: { $toObjectId: '$' + fkField } }, | |
| // }); | |
| // } else if (asObjectIdArray) { | |
| // aggregationPipeline.push({ | |
| // $addFields: { | |
| // [fkNameToUse]: { | |
| // $map: { | |
| // input: '$' + fkField, | |
| // as: 'val', | |
| // in: { $toObjectId: ['$$val'] }, | |
| // }, | |
| // }, | |
| // }, | |
| // }); | |
| // } else if (asStringIdArray) { | |
| // aggregationPipeline.push({ | |
| // $addFields: { | |
| // [fkNameToUse]: { | |
| // $map: { | |
| // input: '$' + fkField, | |
| // as: 'val', | |
| // in: { $toString: ['$$val'] }, | |
| // }, | |
| // }, | |
| // }, | |
| // }); | |
| // } else { | |
| // aggregationPipeline.push({ | |
| // $addFields: { [fkNameToUse]: '$' + fkField }, | |
| // }); | |
| // } | |
| // } | |
| // const keyAsArray = { | |
| // $cond: { | |
| // if: { $isArray: '$' + keyField }, | |
| // then: '$' + keyField, | |
| // else: [], | |
| // }, | |
| // }; | |
| // const foreignKeyAsArray = { | |
| // $cond: { if: { $isArray: '$$fkField' }, then: '$$fkField', else: [] }, | |
| // }; | |
| // if (foreignKeyIsArray) { | |
| // if (keyTypeIsArray) { | |
| // Object.assign($match, { | |
| // $expr: { | |
| // $ne: [[], { $setIntersection: [foreignKeyAsArray, keyAsArray] }], | |
| // }, | |
| // }); | |
| // } else { | |
| // Object.assign($match, { | |
| // $expr: { $in: ['$' + keyField, foreignKeyAsArray] }, | |
| // }); | |
| // } | |
| // } else if (keyTypeIsArray) { | |
| // Object.assign($match, { $expr: { $in: ['$$fkField', keyAsArray] } }); | |
| // } else { | |
| // Object.assign($match, { | |
| // $expr: { $eq: ['$$fkField', '$' + keyField] }, | |
| // }); | |
| // } | |
| // $project = $project || {}; | |
| // aggregationPipeline.push({ | |
| // $lookup: { | |
| // from: relationship.type.table, | |
| // let: { fkField: '$' + fkNameToUse }, | |
| // pipeline: pipelineValues, | |
| // as: (meta ? '__' : '') + relationshipName, | |
| // }, | |
| // }); | |
| // if (meta) { | |
| // let pipelineProject = pipelineValues.find((val) => val.$project); | |
| // pipelineProject.$project = { _id: 1 }; | |
| // $project[relationshipName] = { | |
| // count: { $size: `$__${relationshipName}` }, | |
| // }; | |
| // } else { | |
| // if (relationship.__isObject) { | |
| // pipelineValues.push({ $limit: 1 }); | |
| // aggregationPipeline.push({ | |
| // $unwind: { | |
| // path: '$' + relationshipName, | |
| // preserveNullAndEmptyArrays: true, | |
| // }, | |
| // }); | |
| // $project[relationshipName] = { | |
| // $ifNull: ['$' + relationshipName, null], | |
| // }; | |
| // } else { | |
| // $project[relationshipName] = '$' + relationshipName; | |
| // } | |
| // } | |
| // } | |
| // }); | |
| // } |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import mongoose from 'mongoose' | |
| import R from 'ramda' | |
| import { fillMongoFiltersObject } from './query' | |
| import { newObjectFromArgs } from './insertHelpers' | |
| export async function getUpdateObject( | |
| updatesObject, | |
| typeMetadata, | |
| relationshipLoadingUtils = {} | |
| ) { | |
| const $set = {} | |
| const $inc = {} | |
| const $push = {} | |
| const $pull = {} | |
| const $addToSet = {} | |
| await getUpdateObjectContents( | |
| updatesObject, | |
| typeMetadata, | |
| '', | |
| $set, | |
| $inc, | |
| $push, | |
| $pull, | |
| $addToSet, | |
| relationshipLoadingUtils | |
| ) | |
| const result = { $set, $inc, $push, $pull, $addToSet } | |
| Object.keys(result).forEach((k) => { | |
| if (!Object.keys(result[k]).length) { | |
| delete result[k] | |
| } | |
| }) | |
| return result | |
| } | |
| function getUpdateObjectContents( | |
| updatesObject, | |
| model, | |
| prefix, | |
| $set, | |
| $inc, | |
| $push, | |
| $pull, | |
| $addToSet, | |
| options | |
| ) { | |
| const fields = R.compose( | |
| R.omit([]), | |
| R.pathOr({}, ['schema', 'paths']) | |
| )(model) | |
| R.forEach((k) => { | |
| let field = fields[k] | |
| // console.log('field', field) | |
| if (field) { | |
| if (field.instance === 'Date') { | |
| $set[prefix + k] = new Date(updatesObject[k]) | |
| } else if (field.instance === 'Array') { | |
| if (field.caster.$isArraySubdocument) { | |
| // console.log('field', field) | |
| $set[prefix + k] = R.map((o) => newObjectFromArgs(o, field, {}))( | |
| updatesObject[k] | |
| ) | |
| } else if (field.caster.instance === 'ObjectId') { | |
| $set[prefix + k] = R.map((item) => mongoose.Types.ObjectId(item))( | |
| updatesObject[k] | |
| ) | |
| } else if (field.caster.instance === 'Date') { | |
| $set[prefix + k] = R.map((item) => new Date(updatesObject[k])) | |
| } else { | |
| $set[prefix + k] = updatesObject[k] | |
| } | |
| } else if (field.instance === 'ObjectId') { | |
| $set[prefix + k] = mongoose.Types.ObjectId(updatesObject[k]) | |
| } else { | |
| $set[prefix + k] = updatesObject[k] | |
| } | |
| } else { | |
| // no matching field on mongoose object mean speacial action. | |
| const pieces = R.split('_')(k) | |
| const queryOperation = R.last(pieces) | |
| const fieldName = R.compose( | |
| R.join('_'), | |
| R.init | |
| )(pieces) | |
| // console.log(fieldName, queryOperation, pieces) | |
| field = fields[fieldName] | |
| // console.log(field) | |
| if (queryOperation === 'INC') { | |
| $inc[prefix + fieldName] = updatesObject[k] | |
| } else if (queryOperation === 'DEC') { | |
| $inc[prefix + fieldName] = updatesObject[k] * -1 | |
| } else if (queryOperation === 'PUSH') { | |
| // if (field.instance === 'Array') { | |
| // console.log('YESSSSS') | |
| // } | |
| if (field.caster.$isArraySubdocument) { | |
| const toAdd = newObjectFromArgs(updatesObject[k], field, options) | |
| $push[prefix + fieldName] = { $each: [toAdd] } | |
| } else if (field.caster.instance === 'Date') { | |
| $push[prefix + fieldName] = { $each: [new Date(updatesObject[k])] } | |
| } else if (field.caster.instance === 'ObjectId') { | |
| $push[prefix + fieldName] = { | |
| $each: [mongoose.Types.ObjectId(updatesObject[k])] | |
| } | |
| } else { | |
| $push[prefix + fieldName] = { $each: [updatesObject[k]] } | |
| } | |
| } else if (queryOperation === 'CONCAT') { | |
| if (!$push[prefix + fieldName]) { | |
| $push[prefix + fieldName] = { $each: [] } | |
| } | |
| if (field.caster.$isArraySubdocument) { | |
| const toAdd = R.map((item) => | |
| newObjectFromArgs(item, field, options) | |
| )(updatesObject[k]) | |
| $push[prefix + fieldName].$each.push(...toAdd) | |
| } else if (field.caster.instance === 'Date') { | |
| $push[prefix + fieldName].$each.push( | |
| ...R.map((d) => new Date(d))(updatesObject[k]) | |
| ) | |
| } else if (field.caster.instance === 'ObjectId') { | |
| $push[prefix + fieldName].$each.push( | |
| ...R.map((id) => mongoose.Types.ObjectId(id))(updatesObject[k]) | |
| ) | |
| } else { | |
| $push[prefix + fieldName].$each.push(...updatesObject[k]) | |
| } | |
| } else if (queryOperation === 'UPDATE') { | |
| if (field.caster.$isArraySubdocument) { | |
| getUpdateObjectContents( | |
| updatesObject[k].Updates, | |
| field, | |
| prefix + `${fieldName}.${updatesObject[k].index}.`, | |
| $set, | |
| $inc, | |
| $push, | |
| $pull, | |
| $addToSet, | |
| options | |
| ) | |
| } else if (field.caster.instance === 'Date') { | |
| $set[prefix + `${fieldName}.${updatesObject[k].index}`] = new Date( | |
| updatesObject[k].value | |
| ) | |
| } else if (field.caster.instance === 'ObjectId') { | |
| $set[ | |
| prefix + `${fieldName}.${updatesObject[k].index}` | |
| ] = mongoose.Types.ObjectId(updatesObject[k].value) | |
| } else { | |
| getUpdateObjectContents( | |
| updatesObject[k], | |
| field.type, | |
| prefix + `${fieldName}.`, | |
| $set, | |
| $inc, | |
| $push, | |
| $pull, | |
| $addToSet, | |
| options | |
| ) | |
| } | |
| } else if (queryOperation === 'UPDATES') { | |
| if (field.caster.instance === 'Date') { | |
| R.forEach((update) => { | |
| $set[prefix + `${fieldName}.${update.index}`] = new Date( | |
| update.value | |
| ) | |
| })(updatesObject[k]) | |
| } else if (field.caster.instance === 'ObjectId') { | |
| R.forEach((update) => { | |
| $set[ | |
| prefix + `${fieldName}.${update.index}` | |
| ] = mongoose.Types.Object(update.value) | |
| })(updatesObject[k]) | |
| } else { | |
| for (const update of updatesObject[k]) { | |
| getUpdateObjectContents( | |
| update.Updates, | |
| field, | |
| prefix + `${fieldName}.${update.index}.`, | |
| $set, | |
| $inc, | |
| $push, | |
| $pull, | |
| $addToSet, | |
| options | |
| ) | |
| } | |
| } | |
| } else if (queryOperation === 'PULL') { | |
| if (field.caster.instance === 'Date') { | |
| $pull[prefix + fieldName] = { | |
| $in: R.map((val) => new Date(val))(updatesObject[k]) | |
| } | |
| } else if (field.caster.instance === 'ObjectId') { | |
| $pull[prefix + fieldName] = { | |
| $in: R.map((val) => mongoose.Types.ObjectId(val))(updatesObject[k]) | |
| } | |
| } else { | |
| $pull[prefix + fieldName] = fillMongoFiltersObject( | |
| updatesObject[k], | |
| field | |
| ) | |
| } | |
| } else if (queryOperation === 'ADDTOSET') { | |
| if (field.caster.instance === 'Date') { | |
| $addToSet[prefix + fieldName] = { | |
| $in: R.map((val) => new Date(val))(updatesObject[k]) | |
| } | |
| } else if (field.caster.instance === 'ObjectId') { | |
| $addToSet[prefix + fieldName] = { | |
| $in: R.map((val) => mongoose.Types.ObjectId(val))(updatesObject[k]) | |
| } | |
| } | |
| } | |
| } | |
| })(Object.keys(updatesObject)) | |
| } |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment