|
| 1 | +'use strict'; |
| 2 | + |
| 3 | +const parser = require('graphql/language/parser'), |
| 4 | + parse = parser.parse; |
| 5 | + |
| 6 | +// A LRU cache with key: docString and value: graphql document |
| 7 | +const LRU = require('lru-cache'), |
| 8 | + options = { max: 1000, maxAge: 1000 * 60 * 60 * 24 }, // default 1 day |
| 9 | + docCache = LRU(options); |
| 10 | + |
| 11 | +// Strip insignificant whitespace |
| 12 | +// Note that this could do a lot more, such as reorder fields etc. |
| 13 | +function normalize(string) { |
| 14 | + return string.replace(/[\s,]+/g, ' ').trim(); |
| 15 | +} |
| 16 | + |
| 17 | +// A map fragmentName -> [normalized source] |
| 18 | +let fragmentSourceMap = {}; |
| 19 | + |
| 20 | +function cacheKeyFromLoc(loc) { |
| 21 | + return normalize(loc.source.body.substring(loc.start, loc.end)); |
| 22 | +} |
| 23 | + |
| 24 | +// set cache option [https://github.com/isaacs/node-lru-cache] |
| 25 | +// only support max and maxAge |
| 26 | +function setCacheOptions(options) { |
| 27 | + if (options) { |
| 28 | + const { max, maxAge } = options; |
| 29 | + if (max && typeof (max) === 'number') { |
| 30 | + docCache.max = max; |
| 31 | + } |
| 32 | + if (maxAge && typeof (maxAge) === 'number') { |
| 33 | + docCache.maxAge = maxAge; |
| 34 | + } |
| 35 | + } |
| 36 | +} |
| 37 | + |
| 38 | +// get cached items count |
| 39 | +function getCachedItemsCount() { |
| 40 | + return docCache.itemCount; |
| 41 | +} |
| 42 | + |
| 43 | +// For testing. |
| 44 | +function resetCaches() { |
| 45 | + docCache.reset(); |
| 46 | + fragmentSourceMap = {}; |
| 47 | +} |
| 48 | + |
| 49 | +// Take a unstripped parsed document (query/mutation or even fragment), and |
| 50 | +// check all fragment definitions, checking for name->source uniqueness. |
| 51 | +// We also want to make sure only unique fragments exist in the document. |
| 52 | +function processFragments(ast) { |
| 53 | + const astFragmentMap = {}; |
| 54 | + const definitions = []; |
| 55 | + for (let i = 0; i < ast.definitions.length; i++) { |
| 56 | + const fragmentDefinition = ast.definitions[i]; |
| 57 | + |
| 58 | + if (fragmentDefinition.kind === 'FragmentDefinition') { |
| 59 | + const fragmentName = fragmentDefinition.name.value; |
| 60 | + const sourceKey = cacheKeyFromLoc(fragmentDefinition.loc); |
| 61 | + |
| 62 | + // We know something about this fragment |
| 63 | + if (fragmentSourceMap.hasOwnProperty(fragmentName) && !fragmentSourceMap[fragmentName][sourceKey]) { |
| 64 | + |
| 65 | + // this is a problem because the app developer is trying to register another fragment with |
| 66 | + // the same name as one previously registered. So, we tell them about it. |
| 67 | + console.warn('Warning: fragment with name ' + fragmentName + ' already exists.\n' |
| 68 | + + 'graphql-tag enforces all fragment names across your application to be unique; read more about\n' |
| 69 | + + 'this in the docs: http://dev.apollodata.com/core/fragments.html#unique-names'); |
| 70 | + |
| 71 | + fragmentSourceMap[fragmentName][sourceKey] = true; |
| 72 | + |
| 73 | + } else if (!fragmentSourceMap.hasOwnProperty(fragmentName)) { |
| 74 | + fragmentSourceMap[fragmentName] = {}; |
| 75 | + fragmentSourceMap[fragmentName][sourceKey] = true; |
| 76 | + } |
| 77 | + |
| 78 | + if (!astFragmentMap[sourceKey]) { |
| 79 | + astFragmentMap[sourceKey] = true; |
| 80 | + definitions.push(fragmentDefinition); |
| 81 | + } |
| 82 | + } else { |
| 83 | + definitions.push(fragmentDefinition); |
| 84 | + } |
| 85 | + } |
| 86 | + |
| 87 | + ast.definitions = definitions; |
| 88 | + return ast; |
| 89 | +} |
| 90 | + |
| 91 | +function stripLoc(doc, removeLocAtThisLevel) { |
| 92 | + const docType = Object.prototype.toString.call(doc); |
| 93 | + if (docType === '[object Array]') { |
| 94 | + return doc.map(function(d) { |
| 95 | + return stripLoc(d, removeLocAtThisLevel); |
| 96 | + }); |
| 97 | + } |
| 98 | + |
| 99 | + if (docType !== '[object Object]') { |
| 100 | + throw new Error('Unexpected input.'); |
| 101 | + } |
| 102 | + |
| 103 | + // We don't want to remove the root loc field so we can use it |
| 104 | + // for fragment substitution (see below) |
| 105 | + if (removeLocAtThisLevel && doc.loc) { |
| 106 | + delete doc.loc; |
| 107 | + } |
| 108 | + |
| 109 | + // https://github.com/apollographql/graphql-tag/issues/40 |
| 110 | + if (doc.loc) { |
| 111 | + delete doc.loc.startToken; |
| 112 | + delete doc.loc.endToken; |
| 113 | + } |
| 114 | + |
| 115 | + const keys = Object.keys(doc); |
| 116 | + let key; |
| 117 | + let value; |
| 118 | + let valueType; |
| 119 | + |
| 120 | + for (key in keys) { |
| 121 | + if (keys.hasOwnProperty(key)) { |
| 122 | + value = doc[keys[key]]; |
| 123 | + valueType = Object.prototype.toString.call(value); |
| 124 | + |
| 125 | + if (valueType === '[object Object]' || valueType === '[object Array]') { |
| 126 | + doc[keys[key]] = stripLoc(value, true); |
| 127 | + } |
| 128 | + } |
| 129 | + } |
| 130 | + |
| 131 | + return doc; |
| 132 | +} |
| 133 | + |
| 134 | +let experimentalFragmentVariables = false; |
| 135 | +function parseDocument(doc) { |
| 136 | + const cacheKey = normalize(doc); |
| 137 | + const cachedItem = docCache.get(cacheKey); |
| 138 | + if (cachedItem) { |
| 139 | + return cachedItem; |
| 140 | + } |
| 141 | + let parsed; |
| 142 | + try { |
| 143 | + parsed = parse(doc, { experimentalFragmentVariables }); |
| 144 | + } catch (error) { |
| 145 | + throw new Error(error); |
| 146 | + } |
| 147 | + // check that all "new" fragments inside the documents are consistent with |
| 148 | + // existing fragments of the same name |
| 149 | + parsed = processFragments(parsed); |
| 150 | + parsed = stripLoc(parsed, false); |
| 151 | + docCache.set(cacheKey, parsed); |
| 152 | + |
| 153 | + return parsed; |
| 154 | +} |
| 155 | + |
| 156 | +function enableExperimentalFragmentVariables() { |
| 157 | + experimentalFragmentVariables = true; |
| 158 | +} |
| 159 | + |
| 160 | +function disableExperimentalFragmentVariables() { |
| 161 | + experimentalFragmentVariables = false; |
| 162 | +} |
| 163 | +// XXX This should eventually disallow arbitrary string interpolation, like Relay does |
| 164 | +function gql(/* arguments */) { |
| 165 | + const args = Array.prototype.slice.call(arguments); |
| 166 | + const literals = args[0]; |
| 167 | + |
| 168 | + // We always get literals[0] and then matching post literals for each arg given |
| 169 | + let result = (typeof (literals) === 'string') ? literals : literals[0]; |
| 170 | + |
| 171 | + for (let i = 1; i < args.length; i++) { |
| 172 | + result += args[i]; |
| 173 | + result += literals[i]; |
| 174 | + } |
| 175 | + return parseDocument(result); |
| 176 | +} |
| 177 | + |
| 178 | +// Support typescript, which isn't as nice as Babel about default exports |
| 179 | +gql.default = gql; |
| 180 | +gql.resetCaches = resetCaches; |
| 181 | +gql.getCachedItemsCount = getCachedItemsCount; |
| 182 | +gql.setCacheOptions = setCacheOptions; |
| 183 | +gql.enableExperimentalFragmentVariables = enableExperimentalFragmentVariables; |
| 184 | +gql.disableExperimentalFragmentVariables = disableExperimentalFragmentVariables; |
| 185 | +gql.parseDocument = parseDocument; |
| 186 | +gql.stripLoc = stripLoc; |
| 187 | + |
| 188 | +module.exports = gql; |
0 commit comments