PHP WebShell

Текущая директория: /usr/lib/node_modules/bitgo/node_modules/@urql/core/dist

Просмотр файла: urql-core.min.mjs.map

{"version":3,"file":"urql-core.min.mjs","sources":["../src/utils/typenames.ts","../src/utils/maskTypename.ts","../src/utils/streamUtils.ts","../src/utils/operation.ts","../src/gql.ts","../src/exchanges/cache.ts","../src/exchanges/ssr.ts","../src/exchanges/subscription.ts","../src/exchanges/dedup.ts","../src/exchanges/fetch.ts","../src/exchanges/fallback.ts","../src/exchanges/compose.ts","../src/exchanges/error.ts","../src/client.ts"],"sourcesContent":["import {\n  DocumentNode,\n  FieldNode,\n  InlineFragmentNode,\n  SelectionNode,\n  Kind,\n  visit,\n} from 'graphql';\n\nimport { KeyedDocumentNode, keyDocument } from './request';\n\ninterface EntityLike {\n  [key: string]: EntityLike | EntityLike[] | any;\n  __typename: string | null | void;\n}\n\nconst collectTypes = (\n  obj: EntityLike | EntityLike[],\n  types: { [typename: string]: unknown }\n) => {\n  if (Array.isArray(obj)) {\n    for (let i = 0; i < obj.length; i++) collectTypes(obj[i], types);\n  } else if (typeof obj === 'object' && obj !== null) {\n    for (const key in obj) {\n      if (key === '__typename' && typeof obj[key] === 'string') {\n        types[obj[key] as string] = 0;\n      } else {\n        collectTypes(obj[key], types);\n      }\n    }\n  }\n\n  return types;\n};\n\nexport const collectTypesFromResponse = (response: object) =>\n  Object.keys(collectTypes(response as EntityLike, {}));\n\nconst formatNode = (node: FieldNode | InlineFragmentNode) => {\n  if (\n    node.selectionSet &&\n    !node.selectionSet.selections.some(\n      node =>\n        node.kind === Kind.FIELD &&\n        node.name.value === '__typename' &&\n        !node.alias\n    )\n  ) {\n    return {\n      ...node,\n      selectionSet: {\n        ...node.selectionSet,\n        selections: [\n          ...(node.selectionSet.selections as SelectionNode[]),\n          {\n            kind: Kind.FIELD,\n            name: {\n              kind: Kind.NAME,\n              value: '__typename',\n            },\n          },\n        ],\n      },\n    };\n  }\n};\n\nconst formattedDocs = new Map<number, KeyedDocumentNode>();\n\nexport const formatDocument = <T extends DocumentNode>(node: T): T => {\n  const query = keyDocument(node);\n\n  let result = formattedDocs.get(query.__key);\n  if (!result) {\n    result = visit(query, {\n      Field: formatNode,\n      InlineFragment: formatNode,\n    }) as KeyedDocumentNode;\n\n    // Ensure that the hash of the resulting document won't suddenly change\n    // we are marking __key as non-enumerable so when external exchanges use visit\n    // to manipulate a document we won't restore the previous query due to the __key\n    // property.\n    Object.defineProperty(result, '__key', {\n      value: query.__key,\n      enumerable: false,\n    });\n\n    formattedDocs.set(query.__key, result);\n  }\n\n  return (result as unknown) as T;\n};\n","export const maskTypename = (data: any): any => {\n  if (!data || typeof data !== 'object') return data;\n\n  return Object.keys(data).reduce(\n    (acc, key: string) => {\n      const value = data[key];\n      if (key === '__typename') {\n        Object.defineProperty(acc, '__typename', {\n          enumerable: false,\n          value,\n        });\n      } else if (Array.isArray(value)) {\n        acc[key] = value.map(maskTypename);\n      } else if (value && typeof value === 'object' && '__typename' in value) {\n        acc[key] = maskTypename(value);\n      } else {\n        acc[key] = value;\n      }\n\n      return acc;\n    },\n    Array.isArray(data) ? [] : {}\n  );\n};\n","import { Source, pipe, toPromise, filter, take } from 'wonka';\nimport { OperationResult, PromisifiedSource } from '../types';\n\nexport function withPromise<T extends OperationResult>(\n  source$: Source<T>\n): PromisifiedSource<T> {\n  (source$ as PromisifiedSource<T>).toPromise = () => {\n    return pipe(\n      source$,\n      filter(result => !result.stale && !result.hasNext),\n      take(1),\n      toPromise\n    );\n  };\n\n  return source$ as PromisifiedSource<T>;\n}\n","import {\n  GraphQLRequest,\n  Operation,\n  OperationContext,\n  OperationType,\n} from '../types';\n\nfunction makeOperation<Data = any, Variables = object>(\n  kind: OperationType,\n  request: GraphQLRequest<Data, Variables>,\n  context: OperationContext\n): Operation<Data, Variables>;\n\nfunction makeOperation<Data = any, Variables = object>(\n  kind: OperationType,\n  request: Operation<Data, Variables>,\n  context?: OperationContext\n): Operation<Data, Variables>;\n\nfunction makeOperation(kind, request, context) {\n  if (!context) context = request.context;\n\n  return {\n    key: request.key,\n    query: request.query,\n    variables: request.variables,\n    kind,\n    context,\n  };\n}\n\nexport { makeOperation };\n\n/** Spreads the provided metadata to the source operation's meta property in context.  */\nexport const addMetadata = (\n  operation: Operation,\n  meta: OperationContext['meta']\n) => {\n  return makeOperation(operation.kind, operation, {\n    ...operation.context,\n    meta: {\n      ...operation.context.meta,\n      ...meta,\n    },\n  });\n};\n","/* eslint-disable prefer-rest-params */\nimport { TypedDocumentNode } from '@graphql-typed-document-node/core';\n\nimport {\n  DocumentNode,\n  DefinitionNode,\n  FragmentDefinitionNode,\n  Kind,\n} from 'graphql';\n\nimport { keyDocument, stringifyDocument } from './utils';\n\nconst applyDefinitions = (\n  fragmentNames: Map<string, string>,\n  target: DefinitionNode[],\n  source: Array<DefinitionNode> | ReadonlyArray<DefinitionNode>\n) => {\n  for (let i = 0; i < source.length; i++) {\n    if (source[i].kind === Kind.FRAGMENT_DEFINITION) {\n      const name = (source[i] as FragmentDefinitionNode).name.value;\n      const value = stringifyDocument(source[i]);\n      // Fragments will be deduplicated according to this Map\n      if (!fragmentNames.has(name)) {\n        fragmentNames.set(name, value);\n        target.push(source[i]);\n      } else if (\n        process.env.NODE_ENV !== 'production' &&\n        fragmentNames.get(name) !== value\n      ) {\n        // Fragments with the same names is expected to have the same contents\n        console.warn(\n          '[WARNING: Duplicate Fragment] A fragment with name `' +\n            name +\n            '` already exists in this document.\\n' +\n            'While fragment names may not be unique across your source, each name must be unique per document.'\n        );\n      }\n    } else {\n      target.push(source[i]);\n    }\n  }\n};\n\nfunction gql<Data = any, Variables = object>(\n  strings: TemplateStringsArray,\n  ...interpolations: Array<TypedDocumentNode | DocumentNode | string>\n): TypedDocumentNode<Data, Variables>;\n\nfunction gql<Data = any, Variables = object>(\n  string: string\n): TypedDocumentNode<Data, Variables>;\n\nfunction gql(/* arguments */) {\n  const fragmentNames = new Map<string, string>();\n  const definitions: DefinitionNode[] = [];\n  const interpolations: DefinitionNode[] = [];\n\n  // Apply the entire tagged template body's definitions\n  let body: string = Array.isArray(arguments[0])\n    ? arguments[0][0]\n    : arguments[0] || '';\n  for (let i = 1; i < arguments.length; i++) {\n    const value = arguments[i];\n    if (value && value.definitions) {\n      interpolations.push(...value.definitions);\n    } else {\n      body += value;\n    }\n\n    body += arguments[0][i];\n  }\n\n  // Apply the tag's body definitions\n  applyDefinitions(fragmentNames, definitions, keyDocument(body).definitions);\n  // Copy over each interpolated document's definitions\n  applyDefinitions(fragmentNames, definitions, interpolations);\n\n  return keyDocument({\n    kind: Kind.DOCUMENT,\n    definitions,\n  });\n}\n\nexport { gql };\n","/* eslint-disable @typescript-eslint/no-use-before-define */\nimport { filter, map, merge, pipe, share, tap } from 'wonka';\n\nimport { Client } from '../client';\nimport { Exchange, Operation, OperationResult } from '../types';\n\nimport {\n  makeOperation,\n  addMetadata,\n  collectTypesFromResponse,\n  formatDocument,\n} from '../utils';\n\ntype ResultCache = Map<number, OperationResult>;\n\ninterface OperationCache {\n  [key: string]: Set<number>;\n}\n\nconst shouldSkip = ({ kind }: Operation) =>\n  kind !== 'mutation' && kind !== 'query';\n\nexport const cacheExchange: Exchange = ({ forward, client, dispatchDebug }) => {\n  const resultCache = new Map() as ResultCache;\n  const operationCache = Object.create(null) as OperationCache;\n\n  // Adds unique typenames to query (for invalidating cache entries)\n  const mapTypeNames = (operation: Operation): Operation => {\n    const formattedOperation = makeOperation(operation.kind, operation);\n    formattedOperation.query = formatDocument(operation.query);\n    return formattedOperation;\n  };\n\n  const isOperationCached = (operation: Operation) => {\n    const {\n      key,\n      kind,\n      context: { requestPolicy },\n    } = operation;\n    return (\n      kind === 'query' &&\n      requestPolicy !== 'network-only' &&\n      (requestPolicy === 'cache-only' || resultCache.has(key))\n    );\n  };\n\n  return ops$ => {\n    const sharedOps$ = share(ops$);\n\n    const cachedOps$ = pipe(\n      sharedOps$,\n      filter(op => !shouldSkip(op) && isOperationCached(op)),\n      map(operation => {\n        const cachedResult = resultCache.get(operation.key);\n\n        dispatchDebug({\n          operation,\n          ...(cachedResult\n            ? {\n                type: 'cacheHit',\n                message: 'The result was successfully retried from the cache',\n              }\n            : {\n                type: 'cacheMiss',\n                message: 'The result could not be retrieved from the cache',\n              }),\n        });\n\n        const result: OperationResult = {\n          ...cachedResult,\n          operation: addMetadata(operation, {\n            cacheOutcome: cachedResult ? 'hit' : 'miss',\n          }),\n        };\n\n        if (operation.context.requestPolicy === 'cache-and-network') {\n          result.stale = true;\n          reexecuteOperation(client, operation);\n        }\n\n        return result;\n      })\n    );\n\n    const forwardedOps$ = pipe(\n      merge([\n        pipe(\n          sharedOps$,\n          filter(op => !shouldSkip(op) && !isOperationCached(op)),\n          map(mapTypeNames)\n        ),\n        pipe(\n          sharedOps$,\n          filter(op => shouldSkip(op))\n        ),\n      ]),\n      map(op => addMetadata(op, { cacheOutcome: 'miss' })),\n      filter(\n        op => op.kind !== 'query' || op.context.requestPolicy !== 'cache-only'\n      ),\n      forward,\n      tap(response => {\n        let { operation } = response;\n        if (!operation) return;\n\n        const typenames = collectTypesFromResponse(response.data).concat(\n          operation.context.additionalTypenames || []\n        );\n\n        // Invalidates the cache given a mutation's response\n        if (response.operation.kind === 'mutation') {\n          const pendingOperations = new Set<number>();\n\n          dispatchDebug({\n            type: 'cacheInvalidation',\n            message: `The following typenames have been invalidated: ${typenames}`,\n            operation,\n            data: { typenames, response },\n          });\n\n          for (let i = 0; i < typenames.length; i++) {\n            const typeName = typenames[i];\n            const operations =\n              operationCache[typeName] ||\n              (operationCache[typeName] = new Set());\n            operations.forEach(key => {\n              pendingOperations.add(key);\n            });\n            operations.clear();\n          }\n\n          pendingOperations.forEach(key => {\n            if (resultCache.has(key)) {\n              operation = (resultCache.get(key) as OperationResult).operation;\n              resultCache.delete(key);\n              reexecuteOperation(client, operation);\n            }\n          });\n          // Mark typenames on typenameInvalidate for early invalidation\n        } else if (operation.kind === 'query' && response.data) {\n          resultCache.set(operation.key, response);\n          for (let i = 0; i < typenames.length; i++) {\n            const typeName = typenames[i];\n            const operations =\n              operationCache[typeName] ||\n              (operationCache[typeName] = new Set());\n            operations.add(operation.key);\n          }\n        }\n      })\n    );\n\n    return merge([cachedOps$, forwardedOps$]);\n  };\n};\n\n// Reexecutes a given operation with the default requestPolicy\nexport const reexecuteOperation = (client: Client, operation: Operation) => {\n  return client.reexecuteOperation(\n    makeOperation(operation.kind, operation, {\n      ...operation.context,\n      requestPolicy: 'network-only',\n    })\n  );\n};\n","import { GraphQLError } from 'graphql';\nimport { pipe, share, filter, merge, map, tap } from 'wonka';\nimport { Exchange, OperationResult, Operation } from '../types';\nimport { CombinedError } from '../utils';\nimport { reexecuteOperation } from './cache';\n\nexport interface SerializedResult {\n  hasNext?: boolean;\n  data?: string | undefined; // JSON string of data\n  extensions?: string | undefined; // JSON string of data\n  error?: {\n    graphQLErrors: Array<Partial<GraphQLError> | string>;\n    networkError?: string;\n  };\n}\n\nexport interface SSRData {\n  [key: string]: SerializedResult;\n}\n\nexport interface SSRExchangeParams {\n  isClient?: boolean;\n  initialState?: SSRData;\n  staleWhileRevalidate?: boolean;\n  includeExtensions?: boolean;\n}\n\nexport interface SSRExchange extends Exchange {\n  /** Rehydrates cached data */\n  restoreData(data: SSRData): void;\n  /** Extracts cached data */\n  extractData(): SSRData;\n}\n\n/** Serialize an OperationResult to plain JSON */\nconst serializeResult = (\n  { hasNext, data, extensions, error }: OperationResult,\n  includeExtensions: boolean\n): SerializedResult => {\n  const result: SerializedResult = {};\n  if (data !== undefined) result.data = JSON.stringify(data);\n  if (includeExtensions && extensions !== undefined) {\n    result.extensions = JSON.stringify(extensions);\n  }\n  if (hasNext) result.hasNext = true;\n\n  if (error) {\n    result.error = {\n      graphQLErrors: error.graphQLErrors.map(error => {\n        if (!error.path && !error.extensions) return error.message;\n\n        return {\n          message: error.message,\n          path: error.path,\n          extensions: error.extensions,\n        };\n      }),\n    };\n\n    if (error.networkError) {\n      result.error.networkError = '' + error.networkError;\n    }\n  }\n\n  return result;\n};\n\n/** Deserialize plain JSON to an OperationResult */\nconst deserializeResult = (\n  operation: Operation,\n  result: SerializedResult,\n  includeExtensions: boolean\n): OperationResult => ({\n  operation,\n  data: result.data ? JSON.parse(result.data) : undefined,\n  extensions:\n    includeExtensions && result.extensions\n      ? JSON.parse(result.extensions)\n      : undefined,\n  error: result.error\n    ? new CombinedError({\n        networkError: result.error.networkError\n          ? new Error(result.error.networkError)\n          : undefined,\n        graphQLErrors: result.error.graphQLErrors,\n      })\n    : undefined,\n  hasNext: result.hasNext,\n});\n\nconst revalidated = new Set<number>();\n\n/** The ssrExchange can be created to capture data during SSR and also to rehydrate it on the client */\nexport const ssrExchange = (params?: SSRExchangeParams): SSRExchange => {\n  const staleWhileRevalidate = !!(params && params.staleWhileRevalidate);\n  const includeExtensions = !!(params && params.includeExtensions);\n  const data: Record<string, SerializedResult | null> = {};\n\n  // On the client-side, we delete results from the cache as they're resolved\n  // this is delayed so that concurrent queries don't delete each other's data\n  const invalidateQueue: number[] = [];\n  const invalidate = (result: OperationResult) => {\n    invalidateQueue.push(result.operation.key);\n    if (invalidateQueue.length === 1) {\n      Promise.resolve().then(() => {\n        let key: number | void;\n        while ((key = invalidateQueue.shift())) {\n          data[key] = null;\n        }\n      });\n    }\n  };\n\n  // The SSR Exchange is a temporary cache that can populate results into data for suspense\n  // On the client it can be used to retrieve these temporary results from a rehydrated cache\n  const ssr: SSRExchange = ({ client, forward }) => ops$ => {\n    // params.isClient tells us whether we're on the client-side\n    // By default we assume that we're on the client if suspense-mode is disabled\n    const isClient =\n      params && typeof params.isClient === 'boolean'\n        ? !!params.isClient\n        : !client.suspense;\n\n    const sharedOps$ = share(ops$);\n\n    let forwardedOps$ = pipe(\n      sharedOps$,\n      filter(\n        operation => !data[operation.key] || !!data[operation.key]!.hasNext\n      ),\n      forward\n    );\n\n    // NOTE: Since below we might delete the cached entry after accessing\n    // it once, cachedOps$ needs to be merged after forwardedOps$\n    let cachedOps$ = pipe(\n      sharedOps$,\n      filter(operation => !!data[operation.key]),\n      map(op => {\n        const serialized = data[op.key]!;\n        const result = deserializeResult(op, serialized, includeExtensions);\n        if (staleWhileRevalidate && !revalidated.has(op.key)) {\n          result.stale = true;\n          revalidated.add(op.key);\n          reexecuteOperation(client, op);\n        }\n\n        return result;\n      })\n    );\n\n    if (!isClient) {\n      // On the server we cache results in the cache as they're resolved\n      forwardedOps$ = pipe(\n        forwardedOps$,\n        tap((result: OperationResult) => {\n          const { operation } = result;\n          if (operation.kind !== 'mutation') {\n            const serialized = serializeResult(result, includeExtensions);\n            data[operation.key] = serialized;\n          }\n        })\n      );\n    } else {\n      // On the client we delete results from the cache as they're resolved\n      cachedOps$ = pipe(cachedOps$, tap(invalidate));\n    }\n\n    return merge([forwardedOps$, cachedOps$]);\n  };\n\n  ssr.restoreData = (restore: SSRData) => {\n    for (const key in restore) {\n      // We only restore data that hasn't been previously invalidated\n      if (data[key] !== null) {\n        data[key] = restore[key];\n      }\n    }\n  };\n\n  ssr.extractData = () => {\n    const result: SSRData = {};\n    for (const key in data) if (data[key] != null) result[key] = data[key]!;\n    return result;\n  };\n\n  if (params && params.initialState) {\n    ssr.restoreData(params.initialState);\n  }\n\n  return ssr;\n};\n","import { print } from 'graphql';\n\nimport {\n  filter,\n  make,\n  merge,\n  mergeMap,\n  pipe,\n  share,\n  Source,\n  takeUntil,\n} from 'wonka';\n\nimport { makeResult, makeErrorResult, makeOperation } from '../utils';\n\nimport {\n  Exchange,\n  ExecutionResult,\n  Operation,\n  OperationContext,\n  OperationResult,\n} from '../types';\n\nexport interface ObserverLike<T> {\n  next: (value: T) => void;\n  error: (err: any) => void;\n  complete: () => void;\n}\n\n/** An abstract observable interface conforming to: https://github.com/tc39/proposal-observable */\nexport interface ObservableLike<T> {\n  subscribe(\n    observer: ObserverLike<T>\n  ): {\n    unsubscribe: () => void;\n  };\n}\n\nexport interface SubscriptionOperation {\n  query: string;\n  variables?: Record<string, unknown>;\n  key: string;\n  context: OperationContext;\n}\n\nexport type SubscriptionForwarder = (\n  operation: SubscriptionOperation\n) => ObservableLike<ExecutionResult>;\n\n/** This is called to create a subscription and needs to be hooked up to a transport client. */\nexport interface SubscriptionExchangeOpts {\n  // This has been modelled to work with subscription-transport-ws\n  // See: https://github.com/apollographql/subscriptions-transport-ws#requestoptions--observableexecutionresult-returns-observable-to-execute-the-operation\n  forwardSubscription: SubscriptionForwarder;\n\n  /** This flag may be turned on to allow your subscriptions-transport to handle all operation types */\n  enableAllOperations?: boolean;\n}\n\nexport const subscriptionExchange = ({\n  forwardSubscription,\n  enableAllOperations,\n}: SubscriptionExchangeOpts): Exchange => ({ client, forward }) => {\n  const createSubscriptionSource = (\n    operation: Operation\n  ): Source<OperationResult> => {\n    // This excludes the query's name as a field although subscription-transport-ws does accept it since it's optional\n    const observableish = forwardSubscription({\n      key: operation.key.toString(36),\n      query: print(operation.query),\n      variables: operation.variables,\n      context: { ...operation.context },\n    });\n\n    return make<OperationResult>(({ next, complete }) => {\n      let isComplete = false;\n      let sub;\n\n      Promise.resolve().then(() => {\n        if (isComplete) return;\n\n        sub = observableish.subscribe({\n          next: result => next(makeResult(operation, result)),\n          error: err => next(makeErrorResult(operation, err)),\n          complete: () => {\n            if (!isComplete) {\n              isComplete = true;\n              if (operation.kind === 'subscription') {\n                client.reexecuteOperation(\n                  makeOperation('teardown', operation, operation.context)\n                );\n              }\n\n              complete();\n            }\n          },\n        });\n      });\n\n      return () => {\n        isComplete = true;\n        if (sub) sub.unsubscribe();\n      };\n    });\n  };\n\n  const isSubscriptionOperation = (operation: Operation): boolean => {\n    const { kind } = operation;\n    return (\n      kind === 'subscription' ||\n      (!!enableAllOperations && (kind === 'query' || kind === 'mutation'))\n    );\n  };\n\n  return ops$ => {\n    const sharedOps$ = share(ops$);\n    const subscriptionResults$ = pipe(\n      sharedOps$,\n      filter(isSubscriptionOperation),\n      mergeMap(operation => {\n        const { key } = operation;\n        const teardown$ = pipe(\n          sharedOps$,\n          filter(op => op.kind === 'teardown' && op.key === key)\n        );\n\n        return pipe(createSubscriptionSource(operation), takeUntil(teardown$));\n      })\n    );\n\n    const forward$ = pipe(\n      sharedOps$,\n      filter(op => !isSubscriptionOperation(op)),\n      forward\n    );\n\n    return merge([subscriptionResults$, forward$]);\n  };\n};\n","import { filter, pipe, tap } from 'wonka';\nimport { Exchange, Operation, OperationResult } from '../types';\n\n/** A default exchange for debouncing GraphQL requests. */\nexport const dedupExchange: Exchange = ({ forward, dispatchDebug }) => {\n  const inFlightKeys = new Set<number>();\n\n  const filterIncomingOperation = (operation: Operation) => {\n    const { key, kind } = operation;\n    if (kind === 'teardown') {\n      inFlightKeys.delete(key);\n      return true;\n    }\n\n    if (kind !== 'query' && kind !== 'subscription') {\n      return true;\n    }\n\n    const isInFlight = inFlightKeys.has(key);\n    inFlightKeys.add(key);\n\n    if (isInFlight) {\n      dispatchDebug({\n        type: 'dedup',\n        message: 'An operation has been deduped.',\n        operation,\n      });\n    }\n\n    return !isInFlight;\n  };\n\n  const afterOperationResult = ({ operation, hasNext }: OperationResult) => {\n    if (!hasNext) {\n      inFlightKeys.delete(operation.key);\n    }\n  };\n\n  return ops$ => {\n    const forward$ = pipe(ops$, filter(filterIncomingOperation));\n    return pipe(forward(forward$), tap(afterOperationResult));\n  };\n};\n","/* eslint-disable @typescript-eslint/no-use-before-define */\nimport { filter, merge, mergeMap, pipe, share, takeUntil, onPush } from 'wonka';\n\nimport { Exchange } from '../types';\nimport {\n  makeFetchBody,\n  makeFetchURL,\n  makeFetchOptions,\n  makeFetchSource,\n} from '../internal';\n\n/** A default exchange for fetching GraphQL requests. */\nexport const fetchExchange: Exchange = ({ forward, dispatchDebug }) => {\n  return ops$ => {\n    const sharedOps$ = share(ops$);\n    const fetchResults$ = pipe(\n      sharedOps$,\n      filter(operation => {\n        return operation.kind === 'query' || operation.kind === 'mutation';\n      }),\n      mergeMap(operation => {\n        const { key } = operation;\n        const teardown$ = pipe(\n          sharedOps$,\n          filter(op => op.kind === 'teardown' && op.key === key)\n        );\n\n        const body = makeFetchBody(operation);\n        const url = makeFetchURL(operation, body);\n        const fetchOptions = makeFetchOptions(operation, body);\n\n        dispatchDebug({\n          type: 'fetchRequest',\n          message: 'A fetch request is being executed.',\n          operation,\n          data: {\n            url,\n            fetchOptions,\n          },\n        });\n\n        return pipe(\n          makeFetchSource(operation, url, fetchOptions),\n          takeUntil(teardown$),\n          onPush(result => {\n            const error = !result.data ? result.error : undefined;\n\n            dispatchDebug({\n              type: error ? 'fetchError' : 'fetchSuccess',\n              message: `A ${\n                error ? 'failed' : 'successful'\n              } fetch response has been returned.`,\n              operation,\n              data: {\n                url,\n                fetchOptions,\n                value: error || result,\n              },\n            });\n          })\n        );\n      })\n    );\n\n    const forward$ = pipe(\n      sharedOps$,\n      filter(operation => {\n        return operation.kind !== 'query' && operation.kind !== 'mutation';\n      }),\n      forward\n    );\n\n    return merge([fetchResults$, forward$]);\n  };\n};\n","import { filter, pipe, tap } from 'wonka';\nimport { Operation, ExchangeIO, ExchangeInput } from '../types';\nimport { noop } from '../utils';\n\n/** This is always the last exchange in the chain; No operation should ever reach it */\nexport const fallbackExchange: ({\n  dispatchDebug,\n}: Pick<ExchangeInput, 'dispatchDebug'>) => ExchangeIO = ({\n  dispatchDebug,\n}) => ops$ =>\n  pipe(\n    ops$,\n    tap<Operation>(operation => {\n      if (\n        operation.kind !== 'teardown' &&\n        process.env.NODE_ENV !== 'production'\n      ) {\n        const message = `No exchange has handled operations of kind \"${operation.kind}\". Check whether you've added an exchange responsible for these operations.`;\n\n        dispatchDebug({\n          type: 'fallbackCatch',\n          message,\n          operation,\n        });\n        console.warn(message);\n      }\n    }),\n    /* All operations that skipped through the entire exchange chain should be filtered from the output */\n    filter<any>(() => false)\n  );\n\nexport const fallbackExchangeIO: ExchangeIO = fallbackExchange({\n  dispatchDebug: noop,\n});\n","import { Exchange, ExchangeInput } from '../types';\n\n/** This composes an array of Exchanges into a single ExchangeIO function */\nexport const composeExchanges = (exchanges: Exchange[]) => ({\n  client,\n  forward,\n  dispatchDebug,\n}: ExchangeInput) =>\n  exchanges.reduceRight(\n    (forward, exchange) =>\n      exchange({\n        client,\n        forward,\n        dispatchDebug(event) {\n          dispatchDebug({\n            timestamp: Date.now(),\n            source: exchange.name,\n            ...event,\n          });\n        },\n      }),\n    forward\n  );\n","import { pipe, tap } from 'wonka';\nimport { Exchange, Operation } from '../types';\nimport { CombinedError } from '../utils';\n\nexport const errorExchange = ({\n  onError,\n}: {\n  onError: (error: CombinedError, operation: Operation) => void;\n}): Exchange => ({ forward }) => ops$ => {\n  return pipe(\n    forward(ops$),\n    tap(({ error, operation }) => {\n      if (error) {\n        onError(error, operation);\n      }\n    })\n  );\n};\n","/* eslint-disable @typescript-eslint/no-use-before-define */\n\nimport {\n  filter,\n  make,\n  makeSubject,\n  onEnd,\n  onPush,\n  onStart,\n  pipe,\n  share,\n  Source,\n  take,\n  takeUntil,\n  publish,\n  subscribe,\n  switchMap,\n  fromValue,\n  merge,\n  map,\n  Subscription,\n} from 'wonka';\n\nimport { TypedDocumentNode } from '@graphql-typed-document-node/core';\nimport { DocumentNode } from 'graphql';\n\nimport { composeExchanges, defaultExchanges } from './exchanges';\nimport { fallbackExchange } from './exchanges/fallback';\n\nimport {\n  Exchange,\n  ExchangeInput,\n  GraphQLRequest,\n  Operation,\n  OperationContext,\n  OperationResult,\n  OperationType,\n  RequestPolicy,\n  PromisifiedSource,\n  DebugEvent,\n} from './types';\n\nimport {\n  createRequest,\n  withPromise,\n  maskTypename,\n  noop,\n  makeOperation,\n  getOperationType,\n} from './utils';\n\n/** Options for configuring the URQL [client]{@link Client}. */\nexport interface ClientOptions {\n  /** Target endpoint URL such as `https://my-target:8080/graphql`. */\n  url: string;\n  /** Any additional options to pass to fetch. */\n  fetchOptions?: RequestInit | (() => RequestInit);\n  /** An alternative fetch implementation. */\n  fetch?: typeof fetch;\n  /** An ordered array of Exchanges. */\n  exchanges?: Exchange[];\n  /** Activates support for Suspense. */\n  suspense?: boolean;\n  /** The default request policy for requests. */\n  requestPolicy?: RequestPolicy;\n  /** Use HTTP GET for queries. */\n  preferGetMethod?: boolean;\n  /** Mask __typename from results. */\n  maskTypename?: boolean;\n}\n\nexport interface Client {\n  new (options: ClientOptions): Client;\n\n  operations$: Source<Operation>;\n\n  /** Start an operation from an exchange */\n  reexecuteOperation: (operation: Operation) => void;\n  /** Event target for monitoring, e.g. for @urql/devtools */\n  subscribeToDebugTarget?: (onEvent: (e: DebugEvent) => void) => Subscription;\n\n  // These are variables derived from ClientOptions\n  url: string;\n  fetch?: typeof fetch;\n  fetchOptions?: RequestInit | (() => RequestInit);\n  suspense: boolean;\n  requestPolicy: RequestPolicy;\n  preferGetMethod: boolean;\n  maskTypename: boolean;\n\n  createOperationContext(\n    opts?: Partial<OperationContext> | undefined\n  ): OperationContext;\n\n  createRequestOperation<Data = any, Variables = object>(\n    kind: OperationType,\n    request: GraphQLRequest<Data, Variables>,\n    opts?: Partial<OperationContext> | undefined\n  ): Operation<Data, Variables>;\n\n  /** Executes an Operation by sending it through the exchange pipeline It returns an observable that emits all related exchange results and keeps track of this observable's subscribers. A teardown signal will be emitted when no subscribers are listening anymore. */\n  executeRequestOperation<Data = any, Variables = object>(\n    operation: Operation<Data, Variables>\n  ): Source<OperationResult<Data, Variables>>;\n\n  query<Data = any, Variables extends object = {}>(\n    query: DocumentNode | TypedDocumentNode<Data, Variables> | string,\n    variables?: Variables,\n    context?: Partial<OperationContext>\n  ): PromisifiedSource<OperationResult<Data, Variables>>;\n\n  readQuery<Data = any, Variables extends object = {}>(\n    query: DocumentNode | TypedDocumentNode<Data, Variables> | string,\n    variables?: Variables,\n    context?: Partial<OperationContext>\n  ): OperationResult<Data, Variables> | null;\n\n  executeQuery<Data = any, Variables = object>(\n    query: GraphQLRequest<Data, Variables>,\n    opts?: Partial<OperationContext> | undefined\n  ): Source<OperationResult<Data, Variables>>;\n\n  subscription<Data = any, Variables extends object = {}>(\n    query: DocumentNode | TypedDocumentNode<Data, Variables> | string,\n    variables?: Variables,\n    context?: Partial<OperationContext>\n  ): Source<OperationResult<Data, Variables>>;\n\n  executeSubscription<Data = any, Variables = object>(\n    query: GraphQLRequest<Data, Variables>,\n    opts?: Partial<OperationContext> | undefined\n  ): Source<OperationResult<Data, Variables>>;\n\n  mutation<Data = any, Variables extends object = {}>(\n    query: DocumentNode | TypedDocumentNode<Data, Variables> | string,\n    variables?: Variables,\n    context?: Partial<OperationContext>\n  ): PromisifiedSource<OperationResult<Data, Variables>>;\n\n  executeMutation<Data = any, Variables = object>(\n    query: GraphQLRequest<Data, Variables>,\n    opts?: Partial<OperationContext> | undefined\n  ): Source<OperationResult<Data, Variables>>;\n}\n\nexport const Client: new (opts: ClientOptions) => Client = function Client(\n  this: Client | {},\n  opts: ClientOptions\n) {\n  if (process.env.NODE_ENV !== 'production' && !opts.url) {\n    throw new Error('You are creating an urql-client without a url.');\n  }\n\n  const replays = new Map<number, OperationResult>();\n  const active: Map<number, Source<OperationResult>> = new Map();\n  const queue: Operation[] = [];\n\n  // This subject forms the input of operations; executeOperation may be\n  // called to dispatch a new operation on the subject\n  const { source: operations$, next: nextOperation } = makeSubject<Operation>();\n\n  // We define a queued dispatcher on the subject, which empties the queue when it's\n  // activated to allow `reexecuteOperation` to be trampoline-scheduled\n  let isOperationBatchActive = false;\n  function dispatchOperation(operation?: Operation | void) {\n    isOperationBatchActive = true;\n    if (operation) nextOperation(operation);\n    while ((operation = queue.shift())) nextOperation(operation);\n    isOperationBatchActive = false;\n  }\n\n  /** Defines how result streams are created */\n  const makeResultSource = (operation: Operation) => {\n    let result$ = pipe(\n      results$,\n      filter(\n        (res: OperationResult) =>\n          res.operation.kind === operation.kind &&\n          res.operation.key === operation.key\n      )\n    );\n\n    // Mask typename properties if the option for it is turned on\n    if (client.maskTypename) {\n      result$ = pipe(\n        result$,\n        map(res => ({ ...res, data: maskTypename(res.data) }))\n      );\n    }\n\n    // A mutation is always limited to just a single result and is never shared\n    if (operation.kind === 'mutation') {\n      return pipe(\n        result$,\n        onStart(() => dispatchOperation(operation)),\n        take(1)\n      );\n    }\n\n    const source = pipe(\n      result$,\n      // End the results stream when an active teardown event is sent\n      takeUntil(\n        pipe(\n          operations$,\n          filter(op => op.kind === 'teardown' && op.key === operation.key)\n        )\n      ),\n      switchMap(result => {\n        if (operation.kind !== 'query' || result.stale) {\n          return fromValue(result);\n        }\n\n        return merge([\n          fromValue(result),\n          // Mark a result as stale when a new operation is sent for it\n          pipe(\n            operations$,\n            filter(\n              op =>\n                op.kind === 'query' &&\n                op.key === operation.key &&\n                op.context.requestPolicy !== 'cache-only'\n            ),\n            take(1),\n            map(() => ({ ...result, stale: true }))\n          ),\n        ]);\n      }),\n      onPush(result => {\n        replays.set(operation.key, result);\n      }),\n      onEnd(() => {\n        // Delete the active operation handle\n        replays.delete(operation.key);\n        active.delete(operation.key);\n        // Delete all queued up operations of the same key on end\n        for (let i = queue.length - 1; i >= 0; i--)\n          if (queue[i].key === operation.key) queue.splice(i, 1);\n        // Dispatch a teardown signal for the stopped operation\n        dispatchOperation(\n          makeOperation('teardown', operation, operation.context)\n        );\n      }),\n      share\n    );\n\n    return source;\n  };\n\n  const instance: Client =\n    this instanceof Client ? this : Object.create(Client.prototype);\n  const client: Client = Object.assign(instance, {\n    url: opts.url,\n    fetchOptions: opts.fetchOptions,\n    fetch: opts.fetch,\n    suspense: !!opts.suspense,\n    requestPolicy: opts.requestPolicy || 'cache-first',\n    preferGetMethod: !!opts.preferGetMethod,\n    maskTypename: !!opts.maskTypename,\n\n    operations$,\n\n    reexecuteOperation(operation: Operation) {\n      // Reexecute operation only if any subscribers are still subscribed to the\n      // operation's exchange results\n      if (operation.kind === 'mutation' || active.has(operation.key)) {\n        queue.push(operation);\n        if (!isOperationBatchActive) {\n          Promise.resolve().then(dispatchOperation);\n        }\n      }\n    },\n\n    createOperationContext(opts) {\n      if (!opts) opts = {};\n\n      return {\n        url: client.url,\n        fetchOptions: client.fetchOptions,\n        fetch: client.fetch,\n        preferGetMethod: client.preferGetMethod,\n        ...opts,\n        suspense: opts.suspense || (opts.suspense !== false && client.suspense),\n        requestPolicy: opts.requestPolicy || client.requestPolicy,\n      };\n    },\n\n    createRequestOperation(kind, request, opts) {\n      const requestOperationType = getOperationType(request.query);\n      if (\n        process.env.NODE_ENV !== 'production' &&\n        kind !== 'teardown' &&\n        requestOperationType !== kind\n      ) {\n        throw new Error(\n          `Expected operation of type \"${kind}\" but found \"${requestOperationType}\"`\n        );\n      }\n      return makeOperation(kind, request, client.createOperationContext(opts));\n    },\n\n    executeRequestOperation(operation) {\n      if (operation.kind === 'mutation') {\n        return makeResultSource(operation);\n      }\n\n      return make(observer => {\n        let source = active.get(operation.key);\n\n        if (!source) {\n          active.set(operation.key, (source = makeResultSource(operation)));\n        }\n\n        const isNetworkOperation =\n          operation.context.requestPolicy === 'cache-and-network' ||\n          operation.context.requestPolicy === 'network-only';\n\n        return pipe(\n          source,\n          onStart(() => {\n            const prevReplay = replays.get(operation.key);\n\n            if (operation.kind === 'subscription') {\n              return dispatchOperation(operation);\n            } else if (isNetworkOperation) {\n              dispatchOperation(operation);\n            }\n\n            if (\n              prevReplay != null &&\n              prevReplay === replays.get(operation.key)\n            ) {\n              observer.next(\n                isNetworkOperation ? { ...prevReplay, stale: true } : prevReplay\n              );\n            } else if (!isNetworkOperation) {\n              dispatchOperation(operation);\n            }\n          }),\n          onEnd(observer.complete),\n          subscribe(observer.next)\n        ).unsubscribe;\n      });\n    },\n\n    executeQuery(query, opts) {\n      const operation = client.createRequestOperation('query', query, opts);\n      return client.executeRequestOperation(operation);\n    },\n\n    executeSubscription(query, opts) {\n      const operation = client.createRequestOperation(\n        'subscription',\n        query,\n        opts\n      );\n      return client.executeRequestOperation(operation);\n    },\n\n    executeMutation(query, opts) {\n      const operation = client.createRequestOperation('mutation', query, opts);\n      return client.executeRequestOperation(operation);\n    },\n\n    query(query, variables, context) {\n      if (!context || typeof context.suspense !== 'boolean') {\n        context = { ...context, suspense: false };\n      }\n\n      return withPromise(\n        client.executeQuery(createRequest(query, variables), context)\n      );\n    },\n\n    readQuery(query, variables, context) {\n      let result: OperationResult | null = null;\n\n      pipe(\n        client.query(query, variables, context),\n        subscribe(res => {\n          result = res;\n        })\n      ).unsubscribe();\n\n      return result;\n    },\n\n    subscription(query, variables, context) {\n      return client.executeSubscription(\n        createRequest(query, variables),\n        context\n      );\n    },\n\n    mutation(query, variables, context) {\n      return withPromise(\n        client.executeMutation(createRequest(query, variables), context)\n      );\n    },\n  } as Client);\n\n  let dispatchDebug: ExchangeInput['dispatchDebug'] = noop;\n  if (process.env.NODE_ENV !== 'production') {\n    const { next, source } = makeSubject<DebugEvent>();\n    client.subscribeToDebugTarget = (onEvent: (e: DebugEvent) => void) =>\n      pipe(source, subscribe(onEvent));\n    dispatchDebug = next as ExchangeInput['dispatchDebug'];\n  }\n\n  const exchanges =\n    opts.exchanges !== undefined ? opts.exchanges : defaultExchanges;\n\n  // All exchange are composed into a single one and are called using the constructed client\n  // and the fallback exchange stream\n  const composedExchange = composeExchanges(exchanges);\n\n  // All exchanges receive inputs using which they can forward operations to the next exchange\n  // and receive a stream of results in return, access the client, or dispatch debugging events\n  // All operations then run through the Exchange IOs in a pipeline-like fashion\n  const results$ = share(\n    composedExchange({\n      client,\n      dispatchDebug,\n      forward: fallbackExchange({ dispatchDebug }),\n    })(operations$)\n  );\n\n  // Prevent the `results$` exchange pipeline from being closed by active\n  // cancellations cascading up from components\n  pipe(results$, publish);\n\n  return client;\n} as any;\n\nexport const createClient = (Client as any) as (opts: ClientOptions) => Client;\n"],"names":["collectTypes","let","types","i","key","node","_extends","Kind","query","maskTypename","keys","value","stale","makeOperation","applyDefinitions","const","console","body","Map","operationCache","forward","kind","requestPolicy","map","operation","message","response","typenames","merge","addMetadata","op","sharedOps$","invalidateQueue","length","params","cachedOps$","result","error","hasNext","data","forwardedOps$","stringify","extensions","restore","make","observableish","err","inFlightKeys","add","type","filter","ops$","fetchOptions","dispatchDebug","event","ref","shift","res","results$","result$","take","dispatchOperation","fromValue","client","operations$","this","instance","reexecuteOperation","opts","preferGetMethod","url","subscription","withPromise","composedExchange"],"mappings":"8cAgBMA,kOAKF,SAAKC,IAAkBC,MAAcF,gDAC5BG,kCAE+B,iBAC/BC,+CAELJ,6BAS+C,+DAWnDK,oEACOA,oEAKGA,GAAIA,gBAEIC,wEAEJC,kFAgBZC,sJCzEOC,EACwC,kDAKnCC,kHAKI,+BAGSC,oGCLAC,kGCUtBC,EACiC,2GAyBzC,0FCjCKC,KAQAC,mGAG0BJ,2BAOxBK,2CAuBND,iCAKIE,MAAeC,2TCnCnBH,kBACMI,IAAcC,+BAKCZ,yFAWjBa,4EAOsBC,oEAUFC,2DAkBdC,8GAlBcD,4JA0DdE,uMAwBOrB,QACsBsB,eAEH,YAATC,iHA9CmB,8EAX1CC,sBAGiBC,EAAYC,2BAH7BF,CAOIG,kNCGRhB,kCAKMiB,sEAGgBC,wJAgBlBC,4GAgBEC,EAGCf,4DAICgB,uBA/DRC,yEAQAC,0NAyD0BR,EADlBS,yDANGf,oCAgBPgB,4GAlHJzB,8BACIwB,aAAuCE,uIAQxBJ,wEAIFA,+DAOSK,8GAiHbtC,GAAOuC,6TCxGhBvC,IAAcgB,0BA+CVW,6FAMaA,wBALO,6FAjDgBP,gBAOnCoB,uEACL3C,mFAMQ4C,gGAKaC,qYCxEV,aAATzB,wBAKJ0B,GAAaC,6CAITC,EADYF,kBAEHC,qCAeeE,4EClBhB,sCAENnB,SADaoB,mCAOTC,yBAGJH,gGAgCNlC,yPC1CMS,8DCTF6B,sDAIOC,iHCLEC,SACDlB,uCAHJc,yDC+IVpC,OAAgB,gGAcNS,EAAkBgC,UAAuB,4CAmB5BC,qFAAdC,4CAKHlC,mCAEAmC,wBAyCOC,IAAAA,CAAQpC,uBACfA,KADeA,OAIoCrB,8CAEnD0D,qBACe1D,wEAHoCA,gBAb/CoB,iDAAwBX,wBAZfkD,KAdLvC,0CAAAA,MAgDZ2B,+FAAAA,4BAKIa,4CACDC,iBAIUC,sDAEOC,aAIpBC,8EAGeC,EAAT9C,iDACF+C,iMAYoB,uBAEpBA,YAJFC,gIAcEP,yBAIMK,6FAYDxB,oKAmBCiB,4LAQ6CA,sEAanCE,wSAoBhBvC,yJASEY,eASGA,6BAGTmC,gCACe,gGAablB,wEAEoBmB,sDAiBtBC"}

Выполнить команду


Для локальной разработки. Не используйте в интернете!