PHP WebShell
Текущая директория: /usr/lib/node_modules/bitgo/node_modules/@urql/core/dist
Просмотр файла: 16a45cc7.min.mjs.map
{"version":3,"file":"16a45cc7.min.mjs","sources":["../src/utils/error.ts","../src/utils/hash.ts","../src/utils/stringifyVariables.ts","../src/utils/request.ts","../src/utils/result.ts","../src/internal/fetchOptions.ts","../src/internal/fetchSource.ts"],"sourcesContent":["import { GraphQLError } from 'graphql';\n\nconst generateErrorMessage = (\n networkErr?: Error,\n graphQlErrs?: GraphQLError[]\n) => {\n let error = '';\n if (networkErr !== undefined) {\n return (error = `[Network] ${networkErr.message}`);\n }\n\n if (graphQlErrs !== undefined) {\n graphQlErrs.forEach(err => {\n error += `[GraphQL] ${err.message}\\n`;\n });\n }\n\n return error.trim();\n};\n\nconst rehydrateGraphQlError = (error: any): GraphQLError => {\n if (typeof error === 'string') {\n return new GraphQLError(error);\n } else if (typeof error === 'object' && error.message) {\n return new GraphQLError(\n error.message,\n error.nodes,\n error.source,\n error.positions,\n error.path,\n error,\n error.extensions || {}\n );\n } else {\n return error as any;\n }\n};\n\n/** An error which can consist of GraphQL errors and Network errors. */\nexport class CombinedError extends Error {\n public name: string;\n public message: string;\n public graphQLErrors: GraphQLError[];\n public networkError?: Error;\n public response?: any;\n\n constructor({\n networkError,\n graphQLErrors,\n response,\n }: {\n networkError?: Error;\n graphQLErrors?: Array<string | Partial<GraphQLError> | Error>;\n response?: any;\n }) {\n const normalizedGraphQLErrors = (graphQLErrors || []).map(\n rehydrateGraphQlError\n );\n const message = generateErrorMessage(networkError, normalizedGraphQLErrors);\n\n super(message);\n\n this.name = 'CombinedError';\n this.message = message;\n this.graphQLErrors = normalizedGraphQLErrors;\n this.networkError = networkError;\n this.response = response;\n }\n\n toString() {\n return this.message;\n }\n}\n","// When we have separate strings it's useful to run a progressive\n// version of djb2 where we pretend that we're still looping over\n// the same string\nexport const phash = (h: number, x: string): number => {\n h = h | 0;\n for (let i = 0, l = x.length | 0; i < l; i++) {\n h = (h << 5) + h + x.charCodeAt(i);\n }\n\n return h;\n};\n\n// This is a djb2 hashing function\nexport const hash = (x: string): number => phash(5381 | 0, x) >>> 0;\n","const seen = new Set();\nconst cache = new WeakMap();\n\nconst stringify = (x: any): string => {\n if (x === null || seen.has(x)) {\n return 'null';\n } else if (typeof x !== 'object') {\n return JSON.stringify(x) || '';\n } else if (x.toJSON) {\n return stringify(x.toJSON());\n } else if (Array.isArray(x)) {\n let out = '[';\n for (let i = 0, l = x.length; i < l; i++) {\n if (i > 0) out += ',';\n const value = stringify(x[i]);\n out += value.length > 0 ? value : 'null';\n }\n\n out += ']';\n return out;\n }\n\n const keys = Object.keys(x).sort();\n if (!keys.length && x.constructor && x.constructor !== Object) {\n const key = cache.get(x) || Math.random().toString(36).slice(2);\n cache.set(x, key);\n return `{\"__key\":\"${key}\"}`;\n }\n\n seen.add(x);\n let out = '{';\n for (let i = 0, l = keys.length; i < l; i++) {\n const key = keys[i];\n const value = stringify(x[key]);\n if (value) {\n if (out.length > 1) out += ',';\n out += stringify(key) + ':' + value;\n }\n }\n\n seen.delete(x);\n out += '}';\n return out;\n};\n\nexport const stringifyVariables = (x: any): string => {\n seen.clear();\n return stringify(x);\n};\n","import { TypedDocumentNode } from '@graphql-typed-document-node/core';\n\nimport {\n Location,\n DefinitionNode,\n DocumentNode,\n Kind,\n parse,\n print,\n} from 'graphql';\n\nimport { hash, phash } from './hash';\nimport { stringifyVariables } from './stringifyVariables';\nimport { GraphQLRequest } from '../types';\n\ninterface WritableLocation {\n loc: Location | undefined;\n}\n\nexport interface KeyedDocumentNode extends DocumentNode {\n __key: number;\n}\n\nexport const stringifyDocument = (\n node: string | DefinitionNode | DocumentNode\n): string => {\n let str = (typeof node !== 'string'\n ? (node.loc && node.loc.source.body) || print(node)\n : node\n )\n .replace(/([\\s,]|#[^\\n\\r]+)+/g, ' ')\n .trim();\n\n if (typeof node !== 'string') {\n const operationName = 'definitions' in node && getOperationName(node);\n if (operationName) {\n str = `# ${operationName}\\n${str}`;\n }\n\n if (!node.loc) {\n (node as WritableLocation).loc = {\n start: 0,\n end: str.length,\n source: {\n body: str,\n name: 'gql',\n locationOffset: { line: 1, column: 1 },\n },\n } as Location;\n }\n }\n\n return str;\n};\n\nconst docs = new Map<number, KeyedDocumentNode>();\n\nexport const keyDocument = (q: string | DocumentNode): KeyedDocumentNode => {\n let key: number;\n let query: DocumentNode;\n if (typeof q === 'string') {\n key = hash(stringifyDocument(q));\n query = docs.get(key) || parse(q, { noLocation: true });\n } else {\n key = (q as KeyedDocumentNode).__key || hash(stringifyDocument(q));\n query = docs.get(key) || q;\n }\n\n // Add location information if it's missing\n if (!query.loc) stringifyDocument(query);\n\n (query as KeyedDocumentNode).__key = key;\n docs.set(key, query as KeyedDocumentNode);\n return query as KeyedDocumentNode;\n};\n\nexport const createRequest = <Data = any, Variables = object>(\n q: string | DocumentNode | TypedDocumentNode<Data, Variables>,\n vars?: Variables\n): GraphQLRequest<Data, Variables> => {\n if (!vars) vars = {} as Variables;\n const query = keyDocument(q);\n return {\n key: phash(query.__key, stringifyVariables(vars)) >>> 0,\n query,\n variables: vars,\n };\n};\n\n/**\n * Finds the Name value from the OperationDefinition of a Document\n */\nexport const getOperationName = (query: DocumentNode): string | undefined => {\n for (let i = 0, l = query.definitions.length; i < l; i++) {\n const node = query.definitions[i];\n if (node.kind === Kind.OPERATION_DEFINITION && node.name) {\n return node.name.value;\n }\n }\n};\n\n/**\n * Finds the operation-type\n */\nexport const getOperationType = (query: DocumentNode): string | undefined => {\n for (let i = 0, l = query.definitions.length; i < l; i++) {\n const node = query.definitions[i];\n if (node.kind === Kind.OPERATION_DEFINITION) {\n return node.operation;\n }\n }\n};\n","import { ExecutionResult, Operation, OperationResult } from '../types';\nimport { CombinedError } from './error';\n\nexport const makeResult = (\n operation: Operation,\n result: ExecutionResult,\n response?: any\n): OperationResult => {\n if ((!('data' in result) && !('errors' in result)) || 'path' in result) {\n throw new Error('No Content');\n }\n\n return {\n operation,\n data: result.data,\n error: Array.isArray(result.errors)\n ? new CombinedError({\n graphQLErrors: result.errors,\n response,\n })\n : undefined,\n extensions:\n (typeof result.extensions === 'object' && result.extensions) || undefined,\n hasNext: !!result.hasNext,\n };\n};\n\nexport const mergeResultPatch = (\n prevResult: OperationResult,\n patch: ExecutionResult,\n response?: any\n): OperationResult => {\n const result = { ...prevResult };\n result.hasNext = !!patch.hasNext;\n\n if (!('path' in patch)) {\n if ('data' in patch) result.data = patch.data;\n return result;\n }\n\n if (Array.isArray(patch.errors)) {\n result.error = new CombinedError({\n graphQLErrors: result.error\n ? [...result.error.graphQLErrors, ...patch.errors]\n : patch.errors,\n response,\n });\n }\n\n let part: Record<string, any> | Array<any> = (result.data = {\n ...result.data,\n });\n\n let i = 0;\n let prop: string | number;\n while (i < patch.path.length) {\n prop = patch.path[i++];\n part = part[prop] = Array.isArray(part[prop])\n ? [...part[prop]]\n : { ...part[prop] };\n }\n\n Object.assign(part, patch.data);\n return result;\n};\n\nexport const makeErrorResult = (\n operation: Operation,\n error: Error,\n response?: any\n): OperationResult => ({\n operation,\n data: undefined,\n error: new CombinedError({\n networkError: error,\n response,\n }),\n extensions: undefined,\n});\n","import { DocumentNode, print } from 'graphql';\n\nimport { getOperationName, stringifyVariables } from '../utils';\nimport { Operation } from '../types';\n\nexport interface FetchBody {\n query?: string;\n operationName: string | undefined;\n variables: undefined | Record<string, any>;\n extensions: undefined | Record<string, any>;\n}\n\nconst shouldUseGet = (operation: Operation): boolean => {\n return operation.kind === 'query' && !!operation.context.preferGetMethod;\n};\n\nexport const makeFetchBody = (request: {\n query: DocumentNode;\n variables?: object;\n}): FetchBody => ({\n query: print(request.query),\n operationName: getOperationName(request.query),\n variables: request.variables || undefined,\n extensions: undefined,\n});\n\nexport const makeFetchURL = (\n operation: Operation,\n body?: FetchBody\n): string => {\n const useGETMethod = shouldUseGet(operation);\n const url = operation.context.url;\n if (!useGETMethod || !body) return url;\n\n const search: string[] = [];\n if (body.operationName) {\n search.push('operationName=' + encodeURIComponent(body.operationName));\n }\n\n if (body.query) {\n search.push(\n 'query=' +\n encodeURIComponent(body.query.replace(/#[^\\n\\r]+/g, ' ').trim())\n );\n }\n\n if (body.variables) {\n search.push(\n 'variables=' + encodeURIComponent(stringifyVariables(body.variables))\n );\n }\n\n if (body.extensions) {\n search.push(\n 'extensions=' + encodeURIComponent(stringifyVariables(body.extensions))\n );\n }\n\n return `${url}?${search.join('&')}`;\n};\n\nexport const makeFetchOptions = (\n operation: Operation,\n body?: FetchBody\n): RequestInit => {\n const useGETMethod = shouldUseGet(operation);\n\n const extraOptions =\n typeof operation.context.fetchOptions === 'function'\n ? operation.context.fetchOptions()\n : operation.context.fetchOptions || {};\n\n return {\n ...extraOptions,\n body: !useGETMethod && body ? JSON.stringify(body) : undefined,\n method: useGETMethod ? 'GET' : 'POST',\n headers: useGETMethod\n ? extraOptions.headers\n : { 'content-type': 'application/json', ...extraOptions.headers },\n };\n};\n","import { Source, make } from 'wonka';\nimport { Operation, OperationResult } from '../types';\nimport { makeResult, makeErrorResult, mergeResultPatch } from '../utils';\n\nconst asyncIterator =\n typeof Symbol !== 'undefined' ? Symbol.asyncIterator : null;\nconst decoder = typeof TextDecoder !== 'undefined' ? new TextDecoder() : null;\nconst jsonHeaderRe = /content-type:[^\\r\\n]*application\\/json/i;\nconst boundaryHeaderRe = /boundary=\"?([^=\";]+)\"?/i;\n\ntype ChunkData = { done: false; value: Buffer | Uint8Array } | { done: true };\n\n// NOTE: We're avoiding referencing the `Buffer` global here to prevent\n// auto-polyfilling in Webpack\nconst toString = (input: Buffer | ArrayBuffer): string =>\n input.constructor.name === 'Buffer'\n ? (input as Buffer).toString()\n : decoder!.decode(input as ArrayBuffer);\n\n// DERIVATIVE: Copyright (c) 2021 Marais Rossouw <hi@marais.io>\n// See: https://github.com/maraisr/meros/blob/219fe95/src/browser.ts\nconst executeIncrementalFetch = (\n onResult: (result: OperationResult) => void,\n operation: Operation,\n response: Response\n): Promise<void> => {\n // NOTE: Guarding against fetch polyfills here\n const contentType =\n (response.headers && response.headers.get('Content-Type')) || '';\n if (!/multipart\\/mixed/i.test(contentType)) {\n return response.json().then(payload => {\n onResult(makeResult(operation, payload, response));\n });\n }\n\n let boundary = '---';\n const boundaryHeader = contentType.match(boundaryHeaderRe);\n if (boundaryHeader) boundary = '--' + boundaryHeader[1];\n\n let read: () => Promise<ChunkData>;\n let cancel = () => {\n /*noop*/\n };\n if (asyncIterator && response[asyncIterator]) {\n const iterator = response[asyncIterator]();\n read = iterator.next.bind(iterator);\n } else if ('body' in response && response.body) {\n const reader = response.body.getReader();\n cancel = reader.cancel.bind(reader);\n read = reader.read.bind(reader);\n } else {\n throw new TypeError('Streaming requests unsupported');\n }\n\n let buffer = '';\n let isPreamble = true;\n let nextResult: OperationResult | null = null;\n let prevResult: OperationResult | null = null;\n\n function next(data: ChunkData): Promise<void> | void {\n if (!data.done) {\n const chunk = toString(data.value);\n let boundaryIndex = chunk.indexOf(boundary);\n if (boundaryIndex > -1) {\n boundaryIndex += buffer.length;\n } else {\n boundaryIndex = buffer.indexOf(boundary);\n }\n\n buffer += chunk;\n while (boundaryIndex > -1) {\n const current = buffer.slice(0, boundaryIndex);\n const next = buffer.slice(boundaryIndex + boundary.length);\n\n if (isPreamble) {\n isPreamble = false;\n } else {\n const headersEnd = current.indexOf('\\r\\n\\r\\n') + 4;\n const headers = current.slice(0, headersEnd);\n const body = current.slice(headersEnd, current.lastIndexOf('\\r\\n'));\n\n let payload: any;\n if (jsonHeaderRe.test(headers)) {\n try {\n payload = JSON.parse(body);\n nextResult = prevResult = prevResult\n ? mergeResultPatch(prevResult, payload, response)\n : makeResult(operation, payload, response);\n } catch (_error) {}\n }\n\n if (next.slice(0, 2) === '--' || (payload && !payload.hasNext)) {\n if (!prevResult)\n return onResult(makeResult(operation, {}, response));\n break;\n }\n }\n\n buffer = next;\n boundaryIndex = buffer.indexOf(boundary);\n }\n }\n\n if (nextResult) {\n onResult(nextResult);\n nextResult = null;\n }\n\n if (!data.done && (!prevResult || prevResult.hasNext)) {\n return read().then(next);\n }\n }\n\n return read().then(next).finally(cancel);\n};\n\nexport const makeFetchSource = (\n operation: Operation,\n url: string,\n fetchOptions: RequestInit\n): Source<OperationResult> => {\n const maxStatus = fetchOptions.redirect === 'manual' ? 400 : 300;\n const fetcher = operation.context.fetch;\n\n return make<OperationResult>(({ next, complete }) => {\n const abortController =\n typeof AbortController !== 'undefined' ? new AbortController() : null;\n if (abortController) {\n fetchOptions.signal = abortController.signal;\n }\n\n let ended = false;\n let statusNotOk = false;\n let response: Response;\n\n Promise.resolve()\n .then(() => {\n if (ended) return;\n return (fetcher || fetch)(url, fetchOptions);\n })\n .then((_response: Response | void) => {\n if (!_response) return;\n response = _response;\n statusNotOk = response.status < 200 || response.status >= maxStatus;\n return executeIncrementalFetch(next, operation, response);\n })\n .then(complete)\n .catch((error: Error) => {\n if (error.name !== 'AbortError') {\n const result = makeErrorResult(\n operation,\n statusNotOk ? new Error(response.statusText) : error,\n response\n );\n\n next(result);\n complete();\n }\n });\n\n return () => {\n ended = true;\n if (abortController) {\n abortController.abort();\n }\n };\n });\n};\n"],"names":["generateErrorMessage","rehydrateGraphQlError","networkErr","message","super","response","h","const","hash","stringify","seen","x","let","i","Array","l$1","out","stringifyDocument","loc","key","noLocation","getOperationType","query","l","definitions","length","node","kind","Kind","OPERATION_DEFINITION","operation","result","extensions","Error","patch","path","CombinedError","prop","error","part","data","shouldUseGet","variables","makeFetchURL","request","body","push","useGETMethod","encodeURIComponent","name","test","complete","catch","boundary","cancel","prevResult","next","getReader","indexOf","buffer","_error","payload","fetcher"],"mappings":"oLAEMA,kEAqBH,SAAMC,mLAhBPC,MAmDQC,mCAENC,gDArDFF,oGAaID,uBA6CFE,QACKE,0HAIOF,sHChEZG,SAAKA,EAADA,+ECLRC,SAAWC,0BAIP,MAAO,sBAGR,SAAMC,kBACWC,MAACC,mEAGZC,GAAIC,gBACFJ,iBAASK,8TAsBVC,wBAAgBC,4EAWxBN,eCvBWO,2NAqBCC,yGAiBVC,IAAAA,MAAWF,IACyBG,kBAApCD,kPA0CSE,EAAoBC,OAC1BV,IAAIC,EAAI,EAAGU,EAAID,EAAME,YAAYC,OAAQZ,EAAIU,EAAGV,IAAK,KAClDa,EAAOJ,EAAME,YAAYX,MAC3Ba,EAAKC,OAASC,EAAKC,oCACdH,EAAKI,mYChGTC,gCASLC,UAAUC,sGAYOC,yDAGjBH,wDAED,6IAiBgBI,IAANC,iBACFL,iEACKM,kDAaOZ,QAGrBa,EAAOC,EADPC,4FC5DIC,eACGX,8DAOKzB,IAEZqC,4BAIWC,EACXb,uDAKA,WAAqBc,8DAEIA,2DAKrBC,wCAOKH,oBAOAI,gLAqBPD,EAAOE,kBAADC,8HCnEVzC,uLAQoB0C,uFAclB,0BAAyBC,gKAwGvBtC,6OAaQuC,UACLC,MAZH,eAaQd,yFA/GUe,mLAWlBC,mCASEC,GAAJ3C,qBAES4C,EAATA,iBACOhB,CAAAA,KAAI,mFACeK,KAAKY,4BACDC,kCAO1BC,oNAmBeC,6IAUPC,8IA/DZjD,aAuFMkD"}Выполнить команду
Для локальной разработки. Не используйте в интернете!