{"version":3,"sources":["../../../../src/link/error/index.ts","../../../../src/react/context/ApolloProvider.tsx","../../../../src/utilities/common/cloneDeep.ts","../../../../src/utilities/common/maybeDeepFreeze.ts","../../../../src/link/core/from.ts","../../../../src/react/hooks/useMutation.ts","../../../../src/react/hooks/useLazyQuery.ts","../../../../src/react/hooks/useSubscription.ts","../../src/index.ts","../../../../src/utilities/common/filterInPlace.ts","../../../../src/utilities/graphql/transform.ts","../../../../src/cache/inmemory/reactiveVars.ts","../../../src/core/networkStatus.ts","../../../../src/utilities/common/arrays.ts","../../../../src/utilities/globals/global.ts","../../../../src/utilities/graphql/fragments.ts","../../../../src/utilities/common/compact.ts","../../../../src/react/context/ApolloContext.ts","../../../../src/utilities/common/objects.ts","../../../../src/utilities/globals/DEV.ts","../../../../src/utilities/globals/fix-graphql.ts","../../../../src/utilities/globals/index.ts","../../../src/errors/index.ts","../../../../src/utilities/graphql/storeUtils.ts","../../../../src/link/persisted-queries/index.ts","../../../../src/utilities/common/makeUniqueId.ts","../../../../src/link/http/serializeFetchParameter.ts","../../../../../src/link/http/iterators/nodeStream.ts","../../../../../src/link/http/iterators/reader.ts","../../../../src/link/http/responseIterator.ts","../../../../src/utilities/common/responseIterator.ts","../../../../../src/link/http/iterators/async.ts","../../../../../src/link/http/iterators/promise.ts","../../../../src/link/utils/throwServerError.ts","../../../../src/link/http/parseAndCheckHttpResponse.ts","../../../../src/link/http/checkFetcher.ts","../../../../src/link/http/selectHttpOptionsAndBody.ts","../../../../src/link/utils/fromError.ts","../../../../src/link/http/createHttpLink.ts","../../../../src/link/http/selectURI.ts","../../../../src/link/http/createSignalIfSupported.ts","../../../../src/link/http/rewriteURIForGET.ts","../../../../src/link/http/HttpLink.ts","../../../../src/link/core/execute.ts","../../../../src/utilities/observables/asyncMap.ts","../../../../src/utilities/common/errorHandling.ts","../../../../src/utilities/observables/iteration.ts","../../../../src/utilities/observables/subclassing.ts","../../../../src/utilities/observables/Concast.ts","../../../src/core/ObservableQuery.ts","../../../src/core/LocalState.ts","../../../src/core/QueryInfo.ts","../../../src/core/QueryManager.ts","../../../../src/utilities/common/incrementalResult.ts","../../../src/core/ApolloClient.ts","../../src/version.ts","../../../../src/cache/core/cache.ts","../../../../../src/cache/core/types/common.ts","../../../../src/cache/inmemory/entityStore.ts","../../../../src/cache/inmemory/readFromStore.ts","../../../../src/cache/inmemory/key-extractor.ts","../../../../src/cache/inmemory/policies.ts","../../../../src/utilities/common/stringifyForDisplay.ts","../../../../src/cache/inmemory/writeToStore.ts","../../../../src/cache/inmemory/inMemoryCache.ts","../../../../src/react/hooks/useSyncExternalStore.ts","../../../../src/react/hooks/useQuery.ts","../../../../src/utilities/common/mergeOptions.ts","../../../../src/cache/inmemory/object-canon.ts","../../../../src/utilities/globals/maybe.ts","../../../../src/cache/inmemory/helpers.ts","../../../../src/react/parser/index.ts","../../../../src/link/core/ApolloLink.ts","../../../../src/link/utils/createOperation.ts","../../../../src/link/utils/transformOperation.ts","../../../../src/link/utils/validateOperation.ts","../../../../src/react/hooks/useApolloClient.ts","../../../../src/utilities/common/canUse.ts","../../../../src/utilities/graphql/getFromAST.ts","../../../../src/utilities/graphql/directives.ts","../../../../src/utilities/common/mergeDeep.ts"],"names":["errorHandler","link","operation","forward","observer","sub","retriedSub","retriedResult","subscribe","next","result","errors","graphQLErrors","response","bind","error","complete","networkError","e","unsubscribe","onError","request","this","ApolloProvider","client","children","ApolloContext","Consumer","context","Object","assign","Provider","value","toString","prototype","cloneDeep","cloneDeepHelper","val","seen","call","Map","has","get","slice","set","forEach","child","i","create","getPrototypeOf","keys","key","deepFreeze","workSet","Set","obj","__DEV__","isFrozen","freeze","TypeError","shallowFreeze","getOwnPropertyNames","name","add","maybeDeepFreeze","from","useMutation","mutation","options","Mutation","called","loading","setResult","ref","mutationId","isMounted","current","execute","executeOptions","baseOptions","ignoreResults","data","clientOptions","mutate","then","length","onCompleted","catch","reset","EAGER_METHODS","useLazyQuery","query","internalState","execOptionsRef","merged","useQueryResult","useQuery","skip","initialFetchPolicy","observable","getDefaultFetchPolicy","eagerMethods","method","forceUpdate","apply","arguments","fetchPolicy","promise","asyncUpdate","queryResult","useSubscription","subscription","hasIssuedDeprecationWarningRef","Subscription","variables","onSubscriptionData","onSubscriptionComplete","setObservable","canResetObservableRef","shouldResubscribe","fetchResult","onData","subscriptionData","onComplete","filterInPlace","array","test","target","elem","TYPENAME_FIELD","kind","isEmpty","op","fragmentMap","selectionSet","selections","every","selection","nullIfDocIsEmpty","doc","getDirectiveMatcher","directives","directive","some","dir","removeDirectivesFromDocument","variablesInUse","variablesToRemove","fragmentSpreadsInUse","fragmentSpreadsToRemove","modifiedDoc","Variable","enter","node","_key","parent","Field","arg","push","getAllFragmentSpreadsFromSelectionSet","frag","FragmentSpread","Directive","v","config","argMatcher","argument","aConfig","getArgumentMatcher","OperationDefinition","variableDefinitions","filter","varDef","variable","argConfig","Argument","removeArgumentsFromDocument","fs","def","FragmentDefinition","removeFragmentSpreadFromDocument","addTypenameToDocument","SelectionSet","lastIndexOf","field","d","added","connectionRemoveConfig","willRemove","removeConnectionDirectiveFromDocument","allFragments","buildQueryFromSelectionSet","document","removeClientSetsFromDocument","remove","cacheSlot","cacheInfoMap","WeakMap","getCacheInfo","cache","info","vars","dep","forgetCache","rv","recallCache","attachCache","makeVar","caches","listeners","newValue","dirty","broadcast","oldListeners","Array","clear","listener","getValue","attach","onNextChange","delete","broadcastWatches","NetworkStatus","isNetworkRequestInFlight","networkStatus","isNonEmptyArray","isArray","globalThis","window","self","global","getFragmentQueryDocument","fragmentName","actualFragmentName","fragments","definitions","definition","createFragmentMap","symTable","fragment","getFragmentFromSelection","compact","objects","contextKey","Symbol","for","getApolloContext","defineProperty","enumerable","writable","configurable","displayName","isNonNullObject","__","GLOBAL_KEY","join","Boolean","maybe","isApolloError","err","hasOwnProperty","clientErrors","errorMessage","extraInfo","message","concat","replace","generateErrorMessage","__proto__","ApolloError","Error","makeReference","id","__ref","String","isReference","isDocumentNode","valueToObjectRepresentation","argObj","isIntValue","isFloatValue","Number","isBooleanValue","isStringValue","isObjectValue","fields","map","isVariable","variableValue","isListValue","values","listValue","nestedArgArrayObj","isEnumValue","isNullValue","storeKeyNameFromField","directivesObj","getStoreKeyName","KNOWN_DIRECTIVES","fieldName","args","filterKeys","sort","stringify","completeFieldName","stringifiedArgs","indexOf","setStringify","s","previous","JSON","stringifyReplacer","reduce","copy","argumentsObjectFromField","resultKeyNameFromField","alias","getTypenameFromResult","__typename","isField","typename","isInlineFragment","PersistedQueryLink","collectErrorsByMessage","collected","defaultOptions","disable","errorMessages","PersistedQueryNotSupported","PersistedQueryNotFound","getContext","status","useGETForHashedQueries","hashesByQuery","nextHashesChildKey","createPersistedQueryLink","generateHash","retried","fetchOptions","hashes","hashesChildKey","prefixCounts","makeUniqueId","prefix","count","Math","random","p","label","serialized","parseError","nodeStreamIterator","stream","done","waiting","chunk","shiftedArr","shift","pair","onEnd","undefined","removeListener","on","iterator","Promise","resolve","reject","canUse","asyncIterator","readerIterator","reader","read","responseIterator","body","isAsyncIterableIterator","source","getReader","isReadableStream","isStreamableBlob","arrayBuffer","isBlob","resolved","promiseIterator","pipe","isNodeReadableStream","throwServerError","statusCode","parseHeaders","headerText","headersInit","split","line","trim","toLowerCase","parseJsonBody","bodyText","parse","getResult","handleError","readJsonBody","operations","text","operationName","fallbackHttpConfig","http","includeQuery","includeExtensions","preserveHeaderCase","headers","accept","defaultPrinter","ast","printer","selectHttpOptionsAndBodyInternal","configs","credentials","removeDuplicateHeaders","extensions","headerData","originalName","normalizedHeaders","fromError","errorValue","backupFetch","fetch","linkOptions","uri","preferredFetch","print","useGETForQueries","includeUnusedVariables","requestOptions","fetcher","linkConfig","ApolloLink","chosenURI","fallbackURI","selectURI","clientAwarenessHeaders","clientAwareness","version","controller","contextHeaders","contextConfig","size","signal","AbortController","createSignalIfSupported","_controller","queryParams","addQueryParam","encodeURIComponent","serializedVariables","serializedExtensions","preFragment","fragmentStart","substr","queryParamsPrefix","newURI","rewriteURIForGET","setContext","ctype","TextDecoder","decoder","contentType","delimiter","boundaryVal","includes","substring","boundary","buffer","running","decode","bi","readMultipartBody","abort","asyncMap","mapFn","catchFn","activeCallbackCount","completed","promiseQueue","callback","makeCallback","examiner","delegate","both","handler","caught","graphQLResultHasError","iterateObserversSafely","observers","observersWithMethod","obs","fixObservableSubclass","subclass","species","isPromiseLike","sources","addObserver","removeObserver","handlers","latest","notify","setTimeout","nextResultListeners","cancel","reason","_","iterable","start","deliverLastMessage","nextOrError","beforeNext","queryManager","queryInfo","subObserver","_subscription","_observer","defaultSubscriptionObserverErrorCallback","first","last","reobserve","tearDownQuery","subscriptions","isTornDown","defaultFetchPolicy","queryId","generateQueryId","opDef","queryName","transform","removeQuery","getCurrentResult","saveAsLastResult","lastResult","getLastResult","ready","hasForcedResolvers","diff","getDiff","returnPartialData","partial","partialRefetch","logMissingFieldErrors","missing","updateLastResult","isDifferentFromLastResult","newResult","getLast","variablesMustMatch","getLastError","resetLastResults","resetQueryStoreErrors","resetErrors","refetch","reobserveOptions","pollInterval","queryDef","resetLastWrite","fetchMore","fetchMoreOptions","combinedOptions","qid","originalNetworkStatus","notifyOnNetworkStatusChange","observe","updatedQuerySet","fetchQuery","fetchMoreResult","batch","update","updateQuery","optimistic","writeQuery","onWatchUpdated","watch","finally","reobserveCacheFirst","subscribeToMore","startGraphQLSubscription","setOptions","newOptions","setVariables","broadcastQueries","startPolling","updatePolling","stopPolling","applyNextFetchPolicy","nextFetchPolicy","newNetworkStatus","setObservableQuery","fetchQueryObservable","ssrMode","pollingInfo","interval","maybeFetch","poll","clearTimeout","timeout","assumeImmutableResults","useDisposableConcast","oldVariables","oldFetchPolicy","mergedOptions","concast","reportResult","reportError","lastError","errorResult","hasObservers","stopQuery","obsQuery","resolvers","fragmentMatcher","addResolvers","setFragmentMatcher","resolverGroup","mergeDeep","setResolvers","getResolvers","runResolvers","remoteResult","onlyRunForcedResolvers","resolveDocument","localResult","getFragmentMatcher","clientQuery","serverQuery","prepareContext","getCacheKey","identify","addExportedVariables","buildRootValueFromCache","shouldForceResolvers","forceResolvers","rootValue","mainDefinition","definitionOperation","defaultOperationType","charAt","toUpperCase","execContext","exportedVariables","resolveSelectionSet","resultsToMerge","resolveField","fieldResult","typeCondition","fragmentResult","all","aliasedFieldName","aliasUsed","defaultResult","resultPromise","resolverType","resolverMap","withValue","resolveSubSelectedArray","item","destructiveMethodCounts","wrapDestructiveCacheMethod","methodName","original","cancelNotifyTimeout","lastRequestId","stopped","observableQuery","init","lastDiff","getDiffOptions","updateWatch","oq","updateLastDiff","canonizeResults","setDiff","oldDiff","notifyTimeout","oqListener","fromOptimisticTransaction","shouldNotify","stop","QueryInfo","watchOptions","watcher","lastWatch","lastWrite","shouldWrite","dmCount","markResult","cacheWriteBehavior","incremental","path","isNaN","merge","shouldWriteResult","errorPolicy","performTransaction","overwrite","diffOptions","markReady","markError","ignoreErrors","writeWithErrors","queryDeduplication","onBroadcast","localState","queries","fetchCancelFns","transformCache","queryIdCounter","requestIdCounter","mutationIdCounter","inFlightLinkObservables","mutationStore","_info","stopQueryNoBroadcast","cancelPendingFetches","optimisticResponse","updateQueries","refetchQueries","awaitRefetchQueries","updateWithProxyFn","onQueryUpdated","keepRootFields","mutationStoreValue","stopQueryInStore","queryNamesAndDocs","legacyQueryOptions","aqr","includedQueriesById","oldNetworkStatus","hasSuggestedDevtools","resetStoreCallbacks","clearStoreCallbacks","ssrForceFetchDelay","connectToDevTools","__APOLLO_CLIENT__","typeDefs","clientAwarenessName","clientAwarenessVersion","HttpLink","empty","disableNetworkFetches","watchQuery","resetStore","reFetchObservableQueries","top","__APOLLO_DEVTOOLS_GLOBAL_HOOK__","nav","navigator","ua","userAgent","url","devToolsHookCb","action","state","getQueryStore","mutations","dataWithOptimisticResults","extract","mergeOptions","readQuery","readFragment","writeFragment","__actionHookForDevTools","cb","__requestRaw","payload","clearStore","discardWatches","fn","onResetStore","c","onClearStore","includeStandby","results","getObservableQueries","include","restore","serializedState","setLocalStateFragmentMatcher","setLink","newLink","getFragmentDoc","updateResult","optimisticId","recordOptimisticTransaction","transaction","transformDocument","transformForLink","object","gc","modify","rootId","write","dataId","updateFragment","MissingFieldError","DELETE","delModifier","INVALIDATE","policies","group","rootIds","refs","getFieldValue","objectOrReference","storeFieldName","canRead","objOrRef","toReference","objOrIdOrRef","mergeIntoStore","toObject","lookup","depend","storeObject","rootTypenamesById","dependOnExistence","older","newer","existing","incoming","storeObjectReconciler","caching","__exists","hasKeyArgs","readField","fieldNameOrOptions","store","fieldValue","storage","getStorage","getStoreFieldName","evict","limit","evicted","extraRootIds","getRootIdSet","__META","newData","retain","release","ids","snapshot","findChildRefIds","idsToRemove","root","makeCacheKey","keyMaker","lookupArray","resetCaching","makeDepKey","maybeDependOnExistenceOfEntity","entityId","supportsResultCaching","EntityStore","resultCaching","seed","stump","storageTrie","addLayer","layerId","replay","removeLayer","Root","Layer","ownStoreObject","parentStoreObject","fromParent","existingObject","incomingObject","property","existingValue","incomingValue","execSelectionSetKeyArgs","knownResults","addTypename","canon","executeSelectionSet","peekArgs","other","peek","admit","enclosingRef","execSelectionSetImpl","max","resultCacheMaxSize","keyArgs","varString","executeSubSelectedArray","execSubSelectedArrayImpl","resetCanon","diffQueryAgainstStore","rootRef","execResult","firstMissing","isFresh","isKnown","objectsToMerge","missingMerger","handleMissing","resultName","rootIdsByTypename","pass","lookupFragment","FRAGMENT_SPREAD","fragmentMatches","finalResult","frozen","childResult","assertSelectionSetForIdValue","tree","specifierInfoCache","lookupSpecifierInfo","spec","cacheKey","keyFieldsFnFromSpecifier","specifier","keyFieldsFn","keyObject","collectSpecifierPaths","schemaKeyPath","extracted","extractKeyPath","extractKey","keyArgsFnFromSpecifier","keyArgsFn","keyPath","firstKey","firstChar","variableName","varKeyPath","find","directiveArgs","suffix","extractor","merger","getSpecifierPaths","toMerge","paths","normalize","reducer","argsFromFieldSpecifier","nullKeyFieldsFn","simpleKeyArgsFn","_args","mergeTrueFn","mergeObjects","mergeFalseFn","typePolicies","toBeAdded","supertypeMap","fuzzySubtypes","usingPossibleTypes","dataIdFromObject","setRootTypename","possibleTypes","addPossibleTypes","addTypePolicies","partialContext","ROOT_QUERY","normalizeReadFieldOptions","policy","getTypePolicy","keyFn","specifierOrId","queryType","mutationType","subscriptionType","updateTypePolicy","keyFields","setMerge","getFieldPolicy","which","old","supertype","getSupertypeSet","subtype","match","RegExp","supertypes","rest","inbox","splice","createIfMissing","fieldPolicies","supertypeSet","typenameSupertypeSet","needToCheckFuzzySubtypes","checkingFuzzySubtypes","regExp","fuzzyString","fieldSpec","specifierOrString","readOptions","makeFieldFunctionOptions","getReadFunction","getMergeFunction","parentTypename","childTypename","runMergeFunction","makeMergeObjectsFunction","readFieldArgs","argc","undefId","stringifyForDisplay","eType","iType","getContextFlavor","clientOnly","deferred","flavored","flavors","writeToStore","operationDefinition","written","incomingById","processSelectionSet","mergeTree","fieldNodeSet","entityRef","applied","applyMerges","childTree","existingRef","incomingObj","getChild","parentType","typeDotName","warnings","childTypenames","warnAboutDataLoss","flattenFields","resultFieldKey","getChildMergeTree","processFieldValue","maybeRecycleChildMergeTree","dataRef","sets","mergeMergeTrees","mergeTreeIsEmpty","fieldMap","limitingTrie","flatten","inheritedContext","visitedNode","visited","if","getStorageArgs","eVal","iVal","aVal","pop","emptyMergeTreePool","left","right","needToMergeMaps","leftTree","watches","typenameDocumentCache","txCount","rootStore","optimisticData","resetResultCache","resetResultIdentities","previousReader","storeReader","storeWriter","maybeBroadcastWatch","broadcastWatch","immediate","forget","removeOptimistic","idToRemove","newOptimisticData","perform","layer","alreadyDirty","didWarnUncachedGetSnapshot","useSyncExternalStore","getSnapshot","getServerSnapshot","inst","checkIfSnapshotChanged","useInternalState","useApolloClient","stateRef","setTick","tick","asyncResolveFns","optionsToIgnoreOnce","WeakSet","ssrDisabledResult","skipStandbyResult","toQueryResultCache","Query","previousResult","previousData","watchQueryOptions","renderPromises","useOptions","useObservableQuery","onNext","unsafeHandlePartialRefetch","toQueryResult","createWatchQueryOptions","queryHookOptions","currentWatchQueryOptions","getObsQueryOptions","InternalState","ssr","globalDefaults","otherOptions","getSSRObservable","obsQueryFields","ssrAllowed","registerSSRObservable","addObservableQueryPromise","nextResult","handleErrorOrCompleted","resultWithoutPartial","defaults","stringifyCanon","stringifyCache","known","pool","passes","keysByJSON","shallowCopy","sortedKeys","json","sorted","canonicalStringify","resetCanonicalStringify","canonical","ObjectCanon","thunk","hasOwn","isNullish","defaultDataIdFromObject","_id","defaultConfig","normalizeConfig","shouldCanonizeResults","getTypenameFromStoreObject","TypeOrFieldNameRegExp","fieldNameFromStoreName","selectionSetMatchesResult","storeValueIsStoreObject","makeProcessedFieldsMerger","extractFragmentContext","DocumentType","type","cached","x","passthrough","of","toLink","isTerminating","links","y","leftLink","rightLink","starting","createOperation","transformedOperation","transformOperation","OPERATION_FIELDS","validateOperation","second","firstLink","nextLink","setOnError","override","canUseWeakMap","canUseWeakSet","canUseSymbol","canUseAsyncIteratorSymbol","canUseDOM","usingJSDOM","canUseLayoutEffect","checkDocument","getOperationDefinition","getOperationName","getFragmentDefinitions","getQueryDefinition","getFragmentDefinition","fragmentDef","getMainDefinition","queryDoc","fragmentDefinition","getDefaultValues","defaultValues","defs","defaultValue","shouldInclude","isInclusionDirective","directiveArguments","directiveName","ifArgument","ifValue","getInclusionDirectives","evaledValue","hasDirectives","names","nameSet","uniqueCount","hasClientExports","mergeDeepArray","DeepMerger","defaultReconciler","reconciler","isObject","pastCopies","sourceKey","targetValue","shallowCopyForMerge"],"mappings":"sKAiGA,kBAEE,WAAYA,GAAZ,MACE,cAAO,K,OACP,EAAKC,KA3EH,SAAkBD,GACtB,OAAO,IAAI,KAAW,SAACE,EAAWC,GAChC,OAAO,IAAI,KAAW,SAAAC,GACpB,IAAIC,EACAC,EACAC,EAEJ,IACEF,EAAMF,EAAQD,GAAWM,UAAU,CACjCC,KAAM,SAAAC,GACAA,EAAOC,SACTJ,EAAgBP,EAAa,CAC3BY,cAAeF,EAAOC,OACtBE,SAAUH,EACVR,UAAS,EACTC,QAAO,KAIPG,EAAaC,EAAcC,UAAU,CACnCC,KAAML,EAASK,KAAKK,KAAKV,GACzBW,MAAOX,EAASW,MAAMD,KAAKV,GAC3BY,SAAUZ,EAASY,SAASF,KAAKV,KAKvCA,EAASK,KAAKC,IAEhBK,MAAO,SAAAE,IACLV,EAAgBP,EAAa,CAC3BE,UAAS,EACTe,aAAY,EAEZL,cACEK,GACAA,EAAaP,QACbO,EAAaP,OAAOC,OACtBR,QAAO,KAGPG,EAAaC,EAAcC,UAAU,CACnCC,KAAML,EAASK,KAAKK,KAAKV,GACzBW,MAAOX,EAASW,MAAMD,KAAKV,GAC3BY,SAAUZ,EAASY,SAASF,KAAKV,KAIrCA,EAASW,MAAME,IAEjBD,SAAU,WAGHT,GACHH,EAASY,SAASF,KAAKV,EAAvBA,MAIN,MAAOc,GACPlB,EAAa,CAAEiB,aAAcC,EAAGhB,UAAS,EAAEC,QAAO,IAClDC,EAASW,MAAMG,GAGjB,OAAO,WACDb,GAAKA,EAAIc,cACTb,GAAYD,EAAIc,qBAUZC,CAAQpB,G,EASxB,OAb+B,iBAOtB,YAAAqB,QAAP,SACEnB,EACAC,GAEA,OAAOmB,KAAKrB,KAAKoB,QAAQnB,EAAWC,IAExC,EAbA,CAA+B,M,kCCjG/B,6DAYaoB,EAAqD,SAAC,G,IACjEC,EAAM,SACNC,EAAQ,WAEFC,EAAgB,cACtB,OACE,gBAACA,EAAcC,SAAQ,MACpB,SAACC,GAWA,YAXA,IAAAA,MAAA,IACIJ,GAAUI,EAAQJ,SAAWA,IAC/BI,EAAUC,OAAOC,OAAO,GAAIF,EAAS,CAAEJ,OAAM,KAG/C,QACE,YAAQ,EACR,oHAEA,yBAGA,gBAACE,EAAcK,SAAQ,CAACC,MAAOJ,GAC5BH,Q,kCChCL,sCAAAQ,EAAaJ,OAAOK,UAAZ,SAKV,SAAUC,EAAaH,GAC3B,OAAOI,EAAgBJ,GAGzB,SAASI,EAAmBC,EAAQC,GAClC,OAAQL,EAASM,KAAKF,IACtB,IAAK,iBAEH,IADAC,EAAOA,GAAQ,IAAIE,KACVC,IAAIJ,GAAM,OAAOC,EAAKI,IAAIL,GACnC,IAAM,EAAmBA,EAAYM,MAAM,GAK3C,OAJAL,EAAKM,IAAIP,EAAK,GACd,EAAKQ,SAAQ,SAAUC,EAAOC,GAC5B,EAAKA,GAAKX,EAAgBU,EAAOR,MAE5B,EAGT,IAAK,kBAEH,IADAA,EAAOA,GAAQ,IAAIE,KACVC,IAAIJ,GAAM,OAAOC,EAAKI,IAAIL,GAGnC,IAAM,EAAOR,OAAOmB,OAAOnB,OAAOoB,eAAeZ,IAKjD,OAJAC,EAAKM,IAAIP,EAAK,GACdR,OAAOqB,KAAKb,GAAKQ,SAAQ,SAAAM,GACvB,EAAKA,GAAOf,EAAiBC,EAAYc,GAAMb,MAE1C,EAGT,QACE,OAAOD,K,kCCpCX,mDAGA,SAASe,EAAWpB,GAClB,IAAMqB,EAAU,IAAIC,IAAI,CAACtB,IAQzB,OAPAqB,EAAQR,SAAQ,SAAAU,GACV,YAAgBA,IASxB,SAAyCA,GACvC,GAAIC,UAAY3B,OAAO4B,SAASF,GAC9B,IACE1B,OAAO6B,OAAOH,GACd,MAAOrC,GAIP,GAAIA,aAAayC,UAAW,OAAO,KACnC,MAAMzC,EAGV,OAAOqC,EArBuBK,CAAcL,KAASA,GACjD1B,OAAOgC,oBAAoBN,GAAKV,SAAQ,SAAAiB,GAClC,YAAgBP,EAAIO,KAAQT,EAAQU,IAAIR,EAAIO,UAI/C9B,EAkBH,SAAUgC,EAAmBT,GAIjC,OAHIC,SACFJ,EAAWG,GAENA,I,kCClCT,sCAEaU,EAFb,MAEoB,EAAWA,M,+HCmBzB,SAAUC,EAMdC,EACAC,GAEA,IAAM5C,EAAS,YAAuB,OAAP4C,QAAO,IAAPA,OAAO,EAAPA,EAAS5C,QACxC,YAAmB2C,EAAU,IAAaE,UACpC,MAAsB,mBAAwC,CAClEC,QAAQ,EACRC,SAAS,EACT/C,OAAM,IAHDd,EAAM,KAAE8D,EAAS,KAMlBC,EAAM,iBAAO,CACjB/D,OAAM,EACNgE,WAAY,EACZC,WAAW,EACXnD,OAAM,EACN2C,SAAQ,EACRC,QAAO,IAMPvC,OAAOC,OAAO2C,EAAIG,QAAS,CAAEpD,OAAM,EAAE4C,QAAO,EAAED,SAAQ,IAGxD,IAAMU,EAAU,uBAAY,SAC1BC,QAAA,IAAAA,MAAA,IAOM,MAA8BL,EAAIG,QAAjCpD,EAAM,SAAE4C,EAAO,UAAED,EAAQ,WAC1BY,EAAc,OAAH,IAAG,CAAH,eAAQX,GAAO,CAAED,SAAQ,IACrCM,EAAIG,QAAQlE,OAAO6D,SAAYQ,EAAYC,gBAAiBP,EAAIG,QAAQD,WAC3EH,EAAUC,EAAIG,QAAQlE,OAAS,CAC7B6D,SAAS,EACTxD,WAAO,EACPkE,UAAM,EACNX,QAAQ,EACR9C,OAAM,IAIV,IAAMkD,IAAeD,EAAIG,QAAQF,WAC3BQ,EAAgB,YACpBH,EACAD,GAGF,OAAOtD,EAAO2D,OAAOD,GAAeE,MAAK,SAACvE,G,UAChCoE,EAAiBpE,EAAb,KAAEF,EAAWE,EAAL,OACdE,EACJJ,GAAUA,EAAO0E,OAAS,EACtB,IAAI,IAAY,CAAEzE,cAAeD,SACjC,EAEN,GACE+D,IAAeD,EAAIG,QAAQF,aAC1BQ,EAAcF,cACf,CACA,IAAM,EAAS,CACbV,QAAQ,EACRC,SAAS,EACTU,KAAI,EACJlE,MAAK,EACLS,OAAM,GAGJiD,EAAIG,QAAQD,YAAc,YAAMF,EAAIG,QAAQlE,OAAQ,IACtD8D,EAAUC,EAAIG,QAAQlE,OAAS,GAKnC,OAFgC,QAAhC,EAAmB,QAAnB,EAAA+D,EAAIG,QAAQR,eAAO,eAAEkB,mBAAW,gBAAGzE,EAASoE,KAAOC,GACzB,QAA1B,EAAAJ,EAAeQ,mBAAW,gBAAGzE,EAASoE,KAAOC,GACtCrE,KACN0E,OAAM,SAACxE,G,YACR,GACE2D,IAAeD,EAAIG,QAAQF,YAC3BD,EAAIG,QAAQD,UACZ,CACA,IAAM,EAAS,CACbJ,SAAS,EACTxD,MAAK,EACLkE,UAAM,EACNX,QAAQ,EACR9C,OAAM,GAGH,YAAMiD,EAAIG,QAAQlE,OAAQ,IAC7B8D,EAAUC,EAAIG,QAAQlE,OAAS,GAInC,IAAuB,QAAnB,EAAA+D,EAAIG,QAAQR,eAAO,eAAEhD,UAAW8D,EAAc9D,QAIhD,OAH4B,QAA5B,EAAmB,QAAnB,EAAAqD,EAAIG,QAAQR,eAAO,eAAEhD,eAAO,gBAAGL,EAAOmE,GAChB,QAAtB,EAAAJ,EAAe1D,eAAO,gBAAGL,EAAOmE,GAEzB,CAAED,UAAM,EAAQtE,OAAQI,GAGjC,MAAMA,OAEP,IAEGyE,EAAQ,uBAAY,WACpBf,EAAIG,QAAQD,WACdH,EAAU,CAAEF,QAAQ,EAAOC,SAAS,EAAO/C,OAAM,MAElD,IAUH,OARA,qBAAU,WAGR,OAFAiD,EAAIG,QAAQD,WAAY,EAEjB,WACLF,EAAIG,QAAQD,WAAY,KAEzB,IAEI,CAACE,EAAO,aAAIW,MAAK,GAAK9E,M,gHCpIzB+E,EAAgB,CACpB,UACA,YACA,YACA,cACA,eACA,mBAGI,SAAUC,EACdC,EACAvB,GAEA,IAAMwB,EAAgB,YACpB,YAAgBxB,GAAWA,EAAQ5C,QACnCmE,GAGIE,EAAiB,mBACjBC,EAASD,EAAejB,QAC1B,YAAaR,EAASyB,EAAejB,SACrCR,EAEE2B,EAAiBH,EAAcI,SAAQ,2BACxCF,GAAM,CACTG,MAAOJ,EAAejB,WAGlBsB,EACJH,EAAeI,WAAW/B,QAAQ8B,oBAClCN,EAAcQ,wBAEV1F,EACJmB,OAAOC,OAAOiE,EAAgB,CAC5BzB,SAAUuB,EAAejB,UAIvByB,EAAe,mBAAQ,WAE3B,IADA,IAAMA,EAAoC,G,WAC/BlD,GACT,IAAMmD,EAAS5F,EAAOyC,GACtBkD,EAAalD,GAAO,WAMlB,OALK0C,EAAejB,UAClBiB,EAAejB,QAAU/C,OAAOmB,OAAO,MAEvC4C,EAAcW,eAETD,EAAOE,MAAMlF,KAAMmF,aARZ,MAAAhB,EAAA,eAAa,C,EAAjB,MAYd,OAAOY,IACN,IAyBH,OAvBAxE,OAAOC,OAAOpB,EAAQ2F,GAuBf,CArBS,uBAEd,SAAAvB,GACAe,EAAejB,QAAUE,EAAgB,2BACpCA,GAAc,CACjB4B,YAAa5B,EAAe4B,aAAeR,IACzC,CACFQ,YAAaR,GAGf,IAAMS,EAAUf,EACbgB,cACAxB,MAAK,SAAAyB,GAAe,OAAAhF,OAAOC,OAAO+E,EAAd,MAMvB,OAFAF,EAAQpB,OAAM,eAEPoB,IACN,IAEcjG,K,kCC9FnB,4EAeM,SAAUoG,EACdC,EACA3C,GAEA,IAAM4C,EAAiC,kBAAO,GACxCxF,EAAS,YAAuB,OAAP4C,QAAO,IAAPA,OAAO,EAAPA,EAAS5C,QACxC,YAAmBuF,EAAc,IAAaE,cACxC,MAAsB,mBAAoC,CAC9D1C,UAAiB,OAAPH,QAAO,IAAPA,OAAO,EAAPA,EAAS6B,MACnBlF,WAAO,EACPkE,UAAM,EACNiC,UAAkB,OAAP9C,QAAO,IAAPA,OAAO,EAAPA,EAAS8C,YAJfxG,EAAM,KAAE8D,EAAS,KAOnBwC,EAA+BpC,UAClCoC,EAA+BpC,SAAU,GAE9B,OAAPR,QAAO,IAAPA,OAAO,EAAPA,EAAS+C,qBACX,SAAU,IACR,KAAQ,EAAM,OACV,oIACA,8HAIG,OAAP/C,QAAO,IAAPA,OAAO,EAAPA,EAASgD,yBACX,SAAU,IACR,KAAQ,EAAU,WACd,gJACA,sIAKJ,MAA8B,oBAAS,WAC3C,OAAW,OAAPhD,QAAO,IAAPA,OAAO,EAAPA,EAAS6B,MACJ,KAGFzE,EAAOhB,UAAU,CACtBmF,MAAOoB,EACPG,UAAkB,OAAP9C,QAAO,IAAPA,OAAO,EAAPA,EAAS8C,UACpBR,YAAoB,OAAPtC,QAAO,IAAPA,OAAO,EAAPA,EAASsC,YACtB9E,QAAgB,OAAPwC,QAAO,IAAPA,OAAO,EAAPA,EAASxC,aATfuE,EAAU,KAAEkB,EAAa,KAa1BC,EAAwB,kBAAO,GACrC,qBAAU,WACR,OAAO,WACLA,EAAsB1C,SAAU,KAEjC,IAEH,IAAMH,EAAM,iBAAO,CAAEjD,OAAM,EAAEuF,aAAY,EAAE3C,QAAO,IAiGlD,OAhGA,qBAAU,W,YACJmD,EAA2B,OAAPnD,QAAO,IAAPA,OAAO,EAAPA,EAASmD,kBACA,oBAAtBA,IACTA,IAAsBA,EAAkBnD,KAG/B,OAAPA,QAAO,IAAPA,OAAO,EAAPA,EAAS6B,SACC,OAAP7B,QAAO,IAAPA,OAAO,EAAPA,EAAS6B,UAA6B,QAAnB,EAAAxB,EAAIG,QAAQR,eAAO,eAAE6B,OAAQqB,EAAsB1C,WACzEJ,EAAU,CACRD,SAAS,EACTU,UAAM,EACNlE,WAAO,EACPmG,UAAkB,OAAP9C,QAAO,IAAPA,OAAO,EAAPA,EAAS8C,YAEtBG,EAAc,MACdC,EAAsB1C,SAAU,KAGX,IAAtB2C,GACE/F,IAAWiD,EAAIG,QAAQpD,QACtBuF,IAAiBtC,EAAIG,QAAQmC,eACtB,OAAP3C,QAAO,IAAPA,OAAO,EAAPA,EAASsC,gBAAmC,QAAnB,EAAAjC,EAAIG,QAAQR,eAAO,eAAEsC,gBACtC,OAAPtC,QAAO,IAAPA,OAAO,EAAPA,EAAS6B,UAA6B,QAAnB,EAAAxB,EAAIG,QAAQR,eAAO,eAAE6B,OACxC,YAAa,OAAP7B,QAAO,IAAPA,OAAO,EAAPA,EAAS8C,UAA8B,QAAnB,EAAAzC,EAAIG,QAAQR,eAAO,eAAE8C,cACpDI,EAAsB1C,UAEtBJ,EAAU,CACRD,SAAS,EACTU,UAAM,EACNlE,WAAO,EACPmG,UAAkB,OAAP9C,QAAO,IAAPA,OAAO,EAAPA,EAAS8C,YAEtBG,EAAc7F,EAAOhB,UAAU,CAC7BmF,MAAOoB,EACPG,UAAkB,OAAP9C,QAAO,IAAPA,OAAO,EAAPA,EAAS8C,UACpBR,YAAoB,OAAPtC,QAAO,IAAPA,OAAO,EAAPA,EAASsC,YACtB9E,QAAgB,OAAPwC,QAAO,IAAPA,OAAO,EAAPA,EAASxC,WAEpB0F,EAAsB1C,SAAU,GAGlC/C,OAAOC,OAAO2C,EAAIG,QAAS,CAAEpD,OAAM,EAAEuF,aAAY,EAAE3C,QAAO,MACzD,CAAC5C,EAAQuF,EAAc3C,EAASkD,EAAsB1C,UAEzD,qBAAU,WACR,GAAKuB,EAAL,CAIA,IAAMY,EAAeZ,EAAW3F,UAAU,CACxCC,KAAA,SAAK+G,G,QACG9G,EAAS,CACb6D,SAAS,EAGTU,KAAMuC,EAAYvC,KAClBlE,WAAO,EACPmG,UAAkB,OAAP9C,QAAO,IAAPA,OAAO,EAAPA,EAAS8C,WAEtB1C,EAAU9D,IAEa,QAAnB,EAAA+D,EAAIG,QAAQR,eAAO,eAAEqD,QACvBhD,EAAIG,QAAQR,QAAQqD,OAAO,CACzBjG,OAAM,EACNyD,KAAMvE,KAEoB,QAAnB,EAAA+D,EAAIG,QAAQR,eAAO,eAAE+C,qBAC9B1C,EAAIG,QAAQR,QAAQ+C,mBAAmB,CACrC3F,OAAM,EACNkG,iBAAkBhH,KAIxBK,MAAK,SAAC,G,QACJyD,EAAU,CACRD,SAAS,EACTU,UAAM,EACNlE,MAAK,EACLmG,UAAkB,OAAP9C,QAAO,IAAPA,OAAO,EAAPA,EAAS8C,YAEM,QAA5B,EAAmB,QAAnB,EAAAzC,EAAIG,QAAQR,eAAO,eAAEhD,eAAO,gBAAG,IAEjCJ,SAAQ,W,SACiB,QAAnB,EAAAyD,EAAIG,QAAQR,eAAO,eAAEuD,YACvBlD,EAAIG,QAAQR,QAAQuD,cACQ,QAAnB,EAAAlD,EAAIG,QAAQR,eAAO,eAAEgD,yBAC9B3C,EAAIG,QAAQR,QAAQgD,4BAK1B,OAAO,WACLL,EAAa5F,kBAEd,CAACgF,IAEGzF,I,wFC7JH,EAAW,IAAI,IAGf,EAAoB,IAAI,IAE1B,GAAwB,EACxB,GAAgC,EAIpC,SAAS,EAAU,GACf,OAAK,EAAO,QAAQ,UAAW,KAAK,OAUxC,SAAS,EAAiB,GACtB,IAAI,EAAW,IAAI,IACf,EAAgC,GAgCpC,OA9BA,EAAE,YAAY,SAAQ,YAClB,GAA4B,uBAA5B,EAAmB,KAA+B,CACpD,IAAI,EAAe,EAAmB,KAAK,MACvC,EAbD,GADgB,EAca,EAAmB,KAblC,OAAO,KAAK,UAAU,EAAI,MAAO,EAAI,MAgBlD,EAAe,EAAkB,IAAI,GACrC,IAAiB,EAAa,IAAI,GAGhC,GACF,QAAQ,KAAK,+BAAiC,EAAjC,iMAIL,GACF,EAAU,IAAI,EAAc,EAAe,IAAI,KAGzD,EAAa,IAAI,GAEZ,EAAS,IAAI,KAChB,EAAS,IAAI,GACb,EAAY,KAAK,SAGnB,EAAY,KAAK,GArCvB,IAAyB,KAyCvB,2BACK,GAAG,CACF,YAAO,IA0Bf,SAAS,EAAc,GACnB,IAAE,EAAW,EAAU,GACzB,IAAK,EAAS,IAAI,GAAW,CAC3B,IAAM,EAAS,YAAM,EAAQ,CAC3B,8BAA6B,EAC7B,6BAA8B,IAEhC,IAAK,GAA0B,aAAhB,EAAO,KACpB,MAAM,IAAI,MAAM,iCAElB,EAAS,IACP,EAjCN,SAAkB,GACd,IAAI,EAAU,IAAI,IAAyB,EAAI,aAE/C,EAAM,SAAQ,YACV,EAAK,YAAY,EAAK,IAC1B,OAAO,KAAK,GAAM,SAAQ,YACxB,IAAM,EAAQ,EAAK,GACf,GAA0B,kBAAV,GAClB,EAAQ,IAAI,SAKhB,IAAI,EAAM,EAAI,IAMd,OALE,WACK,EAAI,kBACJ,EAAI,UAGN,EAiBH,CAAS,EAAiB,KAG5B,OAAK,EAAS,IAAI,GAIhB,SAAU,EACd,G,IACA,wDAGwB,kBAAb,IACT,EAAW,CAAC,IAGd,IAAI,EAAS,EAAS,GAWtB,OATA,EAAK,SAAQ,SAAC,EAAK,GACb,GAAoB,aAAb,EAAI,KACb,GAAU,EAAI,IAAI,OAAO,KAEzB,GAAU,EAEZ,GAAU,EAAS,EAAI,MAGlB,EAAc,GAoBvB,IAQiB,EARX,EACD,EADC,EAjBA,WACJ,EAAS,QACT,EAAkB,SAed,EAZA,WACJ,GAAwB,GAWpB,EARA,WACJ,GAAgC,GAO5B,EAJA,WACJ,GAAgC,IAWjB,QAAG,KAEhB,IAKE,EAJF,cAIE,EAHF,0BAGE,EAFF,sCAEE,EADF,uCACE,EAGN,EAAG,QAAW,G,6MC3KR,SAAUkH,EACdC,EACAC,EACAlG,GAEA,IAAImG,EAAS,EAOb,OANAF,EAAMhF,SAAQ,SAAUmF,EAAMjF,GACxB+E,EAAKvF,KAAKjB,KAAM0G,EAAMjF,EAAG8E,KAC3BA,EAAME,KAAYC,KAEnBpG,GACHiG,EAAMxC,OAAS0C,EACRF,E,qBC8CHI,EAA4B,CAChCC,KAAM,QACNpE,KAAM,CACJoE,KAAM,OACNlG,MAAO,eAIX,SAASmG,EACPC,EACAC,GAEA,OAAQD,GAAMA,EAAGE,aAAaC,WAAWC,OACvC,SAAAC,GAAa,MAAmB,mBAAnBA,EAAUP,MACrBC,EAAQE,EAAYI,EAAU3E,KAAK9B,OADxB,MAKjB,SAAS0G,EAAiBC,GACxB,OAAOR,EACL,YAAuBQ,IAAQ,YAAsBA,GACrD,YAAkB,YAAuBA,KAEvC,KACAA,EAGN,SAASC,EACPC,GAEA,OAAO,SAA0BC,GAC/B,OAAOD,EAAWE,MAChB,SAAAC,GACE,OAACA,EAAIlF,MAAQkF,EAAIlF,OAASgF,EAAUhF,KAAK9B,OACxCgH,EAAIlB,MAAQkB,EAAIlB,KAAKgB,OAKxB,SAAUG,EACdJ,EACAF,GAEA,IAAMO,EAA0CrH,OAAOmB,OAAO,MAC1DmG,EAA6C,GAE3CC,EAAgDvH,OAAOmB,OAAO,MAChEqG,EAAwD,GAExDC,EAAcZ,EAChB,YAAMC,EAAK,CACTY,SAAU,CACRC,MAAA,SAAMC,EAAMC,EAAMC,GAO8B,uBAA3CA,EAAkCzB,OAEnCgB,EAAeO,EAAK3F,KAAK9B,QAAS,KAKxC4H,MAAO,CACLJ,MAAA,SAAMC,GACJ,GAAIZ,GAAcY,EAAKZ,aAGKA,EAAWE,MACnC,SAAAD,GAAa,OAAAA,EAAA,WAKbW,EAAKZ,YACLY,EAAKZ,WAAWE,KAAKH,EAAoBC,KA2BzC,OAzBIY,EAAKhD,WAGPgD,EAAKhD,UAAU5D,SAAQ,SAAAgH,GACE,aAAnBA,EAAI7H,MAAMkG,MACZiB,EAAkBW,KAAK,CACrBhG,KAAO+F,EAAI7H,MAAuB8B,KAAK9B,WAM3CyH,EAAKnB,cAGPyB,EAAsCN,EAAKnB,cAAczF,SACvD,SAAAmH,GACEX,EAAwBS,KAAK,CAC3BhG,KAAMkG,EAAKlG,KAAK9B,WAOjB,OAMfiI,eAAgB,CACdT,MAAK,SAACC,GAGJL,EAAqBK,EAAK3F,KAAK9B,QAAS,IAI5CkI,UAAW,CACTV,MAAK,SAACC,GAEJ,GAAIb,EAAoBC,EAApBD,CAAgCa,GAClC,OAAO,UA+BjB,OApBEH,GACA1B,EAAcuB,GAAmB,SAAAgB,GAAK,QAAEA,EAAErG,OAASoF,EAAeiB,EAA5B,SAAqC9E,SAE3EiE,EA6JE,SACJc,EACAzB,GAEA,IAAM0B,EAjBR,SAA4BD,GAC1B,OAAO,SAAyBE,GAC9B,OAAOF,EAAOrB,MACZ,SAACwB,GACC,OAAAD,EAAStI,OACe,aAAxBsI,EAAStI,MAAMkG,MACfoC,EAAStI,MAAM8B,OACdyG,EAAQzG,OAASwG,EAAStI,MAAM8B,KAAK9B,OACnCuI,EAAQzC,MAAQyC,EAAQzC,KAAKwC,QASnBE,CAAmBJ,GAEtC,OAAO1B,EACL,YAAMC,EAAK,CACT8B,oBAAqB,CACnBjB,MAAK,SAACC,GACJ,kCACKA,GAAI,CAEPiB,oBAAqBjB,EAAKiB,oBAAsBjB,EAAKiB,oBAAoBC,QACvE,SAAAC,GACE,OAACR,EAAOrB,MAAK,SAAAc,GAAO,OAAAA,EAAI/F,OAAS8G,EAAOC,SAAS/G,KAA7B,YACpB,OAKV8F,MAAO,CACLJ,MAAK,SAACC,GAKJ,GAF0BW,EAAOrB,MAAK,SAAA+B,GAAa,OAAAA,EAAA,UAE5B,CACrB,IAAI,EAAgB,EASpB,GARIrB,EAAKhD,WACPgD,EAAKhD,UAAU5D,SAAQ,SAAAgH,GACjBQ,EAAWR,KACb,GAAiB,MAKD,IAAlB,EACF,OAAO,QAMfkB,SAAU,CACRvB,MAAK,SAACC,GAEJ,GAAIY,EAAWZ,GACb,OAAO,UA7MDuB,CAA4B7B,EAAmBG,IAO7DA,GACA1B,EAAcyB,GAAyB,SAAA4B,GAAM,QAAEA,EAAGnH,OAASsF,EAAqB6B,EAAnC,SAC1C5F,SAEHiE,EA0ME,SACJc,EACAzB,GAEA,SAASa,EACPC,GAEA,GAAIW,EAAOrB,MAAK,SAAAmC,GAAO,OAAAA,EAAIpH,OAAS2F,EAAK3F,KAAlB,SACrB,OAAO,KAIX,OAAO4E,EACL,YAAMC,EAAK,CACTsB,eAAgB,CAAET,MAAK,GACvB2B,mBAAoB,CAAE3B,MAAK,MAzNf4B,CACZ/B,EACAC,IAIGA,EAGF,IAAM+B,EAAwBxJ,OAAOC,QAAO,SAGjD6G,GAEA,OAAO,YAAMA,EAAK,CAChB2C,aAAc,CACZ9B,MAAA,SAAMC,EAAMC,EAAMC,GAEhB,IACEA,GAC6C,wBAA5CA,EAAmCzB,KAFtC,CAQQ,IAAAK,EAAekB,EAAL,WAClB,GAAKlB,EAaL,IAPaA,EAAWQ,MAAK,SAAAN,GAC3B,OACE,YAAQA,KACkB,eAAzBA,EAAU3E,KAAK9B,OACgC,IAA9CyG,EAAU3E,KAAK9B,MAAMuJ,YAAY,KAAM,OAG7C,CAMA,IAAMC,EAAQ7B,EACd,KACE,YAAQ6B,IACRA,EAAM3C,YACN2C,EAAM3C,WAAWE,MAAK,SAAA0C,GAAK,iBAAAA,EAAE3H,KAAK9B,UAMpC,kCACKyH,GAAI,CACPlB,WAAY,OAAF,IAAE,CAAF,eAAMA,GAAU,IAAEN,IAAc,aAKjD,CACDyD,MAAA,SAAMF,GACJ,OAAOA,IAAUvD,KAIf0D,EAAyB,CAC7B7D,KAAM,SAACgB,GACL,IAAM8C,EAAsC,eAAzB9C,EAAUhF,KAAK9B,MAalC,OAZI4J,IAEC9C,EAAUrC,WACVqC,EAAUrC,UAAUsC,MAAK,SAAAc,GAAO,cAAAA,EAAI/F,KAAK9B,UAE1C,SAAU,IACR,6IAMC4J,IAIL,SAAUC,EAAsClD,GACpD,OAAOM,EACL,CAAC0C,GACD,YAAchD,IAmIlB,SAASoB,EACPzB,GAEA,IAAMwD,EAAqC,GAe3C,OAbAxD,EAAaC,WAAW1F,SAAQ,SAAA4F,IAE3B,YAAQA,IAAc,YAAiBA,KACxCA,EAAUH,aAEVyB,EAAsCtB,EAAUH,cAAczF,SAC5D,SAAAmH,GAAQ,OAAA8B,EAAahC,KAAb,MAEkB,mBAAnBrB,EAAUP,MACnB4D,EAAahC,KAAKrB,MAIfqD,EAMH,SAAUC,EACdC,GAKA,MAA4B,UAHT,YAAkBA,GAC6B9L,UAIzD8L,EAIW,YAAMA,EAAU,CAClCvB,oBAAqB,CACnBjB,MAAK,SAACC,GACJ,kCACKA,GAAI,CACPvJ,UAAW,cASf,SAAU+L,EACdD,GAEA,YAAcA,GAEd,IAAI1C,EAAcL,EAChB,CACE,CACEnB,KAAM,SAACgB,GAA6B,iBAAAA,EAAUhF,KAAK9B,OACnDkK,QAAQ,IAGZF,GAyBF,OAlBI1C,IACFA,EAAc,YAAMA,EAAa,CAC/B6B,mBAAoB,CAClB3B,MAAK,SAACC,GACJ,GAAIA,EAAKnB,cACgBmB,EAAKnB,aAAaC,WAAWC,OAClD,SAAAC,GACE,mBAAQA,IAAuC,eAAzBA,EAAU3E,KAAK9B,SAGvC,OAAO,UAQZsH,I,iCCngBT,oJAgBa6C,EAAY,IAhBzB,OAgB6B,GAEvBC,EAAe,IAAIC,QAKzB,SAASC,EAAaC,GACpB,IAAIC,EAAOJ,EAAa1J,IAAI6J,GAO5B,OANKC,GACHJ,EAAaxJ,IAAI2J,EAAOC,EAAO,CAC7BC,KAAM,IAAInJ,IACVoJ,IAAK,gBAGFF,EAGH,SAAUG,EAAYJ,GAC1BD,EAAaC,GAAOE,KAAK5J,SAAQ,SAAA+J,GAAM,OAAAA,EAAGD,YAAH,MAWnC,SAAUE,EAAYN,GAC1BD,EAAaC,GAAOE,KAAK5J,SAAQ,SAAA+J,GAAM,OAAAA,EAAGE,YAAH,MAGnC,SAAUC,EAAW/K,GACzB,IAAMgL,EAAS,IAAI1J,IACb2J,EAAY,IAAI3J,IAEhBsJ,EAAqB,SAArBA,EAA+BM,GACnC,GAAIzG,UAAUpB,OAAS,GACrB,GAAIrD,IAAUkL,EAAU,CACtBlL,EAAQkL,EACRF,EAAOnK,SAAQ,SAAA0J,GAIbD,EAAaC,GAAOG,IAAIS,MAAMP,GAG9BQ,EAAUb,MAGZ,IAAMc,EAAeC,MAAMrJ,KAAKgJ,GAChCA,EAAUM,QACVF,EAAaxK,SAAQ,SAAA2K,GAAY,OAAAA,EAAA,WAE9B,CAIL,IAAMjB,EAAQJ,EAAUsB,WACpBlB,IACFmB,EAAOnB,GACPD,EAAaC,GAAOG,IAAIE,IAI5B,OAAO5K,GAGT4K,EAAGe,aAAe,SAAAH,GAEhB,OADAP,EAAUlJ,IAAIyJ,GACP,WACLP,EAAUW,OAAOJ,KAIrB,IAAME,EAASd,EAAGE,YAAc,SAAAP,GAG9B,OAFAS,EAAOjJ,IAAIwI,GACXD,EAAaC,GAAOE,KAAK1I,IAAI6I,GACtBA,GAKT,OAFAA,EAAGD,YAAc,SAAAJ,GAAS,OAAAS,EAAOY,OAAPrB,IAEnBK,EAST,SAASQ,EAAUb,GACbA,EAAMsB,kBACRtB,EAAMsB,qB,gCC7GV,IAAYC,EAgDN,SAAUC,EACdC,GAEA,QAAOA,GAAgBA,EAAgB,EAnDzC,6EAAYF,GAMV,yBAMA,mCAMA,6BAMA,yBAOA,mBAKA,qBAKA,qBAzCF,CAAYA,MAAa,M,iCCHnB,SAAUG,EAAmBjM,GACjC,OAAOsL,MAAMY,QAAQlM,IAAUA,EAAMqD,OAAS,EADhD,mC,kCCAA,wBAuBE,iBAAM,WAAM,OAAA8I,eACZ,aAAM,WAAM,OAAAC,WACZ,aAAM,WAAM,OAAAC,SACZ,aAAM,WAAM,OAAAC,MAAO,qE,2KCQf,SAAUC,EACdvC,EACAwC,GAEA,IAAIC,EAAqBD,EAKnBE,EAA2C,GAyDjD,OAxDA1C,EAAS2C,YAAY9L,SAAQ,SAAA+L,GAG3B,GAAwB,wBAApBA,EAAW1G,KACb,MAAM,QAAI,QACR,WAAW,OAAU,EAAU,wBAC7B,OAAU,EAAQ,gBAAW,OAAU,EAAW,WAAQ,KACxD,SACF,2FACF,YAIoB,uBAApB0G,EAAW1G,MACbwG,EAAU5E,KAAK8E,MAMe,qBAAvBH,IACT,QACE,YACA,IADU,EAAM,OAChB,SACE,OAAU,EAAM,6FAElB,6BACFA,EAAqBC,EAAU,GAAG5K,KAAK9B,OAK9B,2BACNgK,GAAQ,CACX2C,YAAa,OAAF,IAAE,CAAF,CACT,CACEzG,KAAM,sBAENhI,UAAW,QACXoI,aAAc,CACZJ,KAAM,eACNK,WAAY,CACV,CACEL,KAAM,iBACNpE,KAAM,CACJoE,KAAM,OACNlG,MAAOyM,QAMdzC,EAAS2C,aAAW,KAmBvB,SAAUE,EACdH,QAAA,IAAAA,MAAA,IAEA,IAAMI,EAAwB,GAI9B,OAHAJ,EAAU7L,SAAQ,SAAAkM,GAChBD,EAASC,EAASjL,KAAK9B,OAAS+M,KAE3BD,EAGH,SAAUE,EACdvG,EACAJ,GAEA,OAAQI,EAAUP,MAChB,IAAK,iBACH,OAAOO,EACT,IAAK,iBACH,IAAM+F,EAAe/F,EAAU3E,KAAK9B,MACpC,GAA2B,oBAAhBqG,EACT,OAAOA,EAAYmG,GAErB,IAAMO,EAAW1G,GAAeA,EAAYmG,GAE5C,OADA,QAAU,YAAU,uBAAqB,OAAAA,IAAgB,kBAClDO,GAAY,KAErB,QACE,OAAO,Q,iCCxIP,SAAUE,I,IACd,sDAEA,IAAMvO,EAASmB,OAAOmB,OAAO,MAY7B,OAVAkM,EAAQrM,SAAQ,SAAAU,GACTA,GACL1B,OAAOqB,KAAKK,GAAKV,SAAQ,SAAAM,GACvB,IAAMnB,EAASuB,EAAYJ,QACb,IAAVnB,IACFtB,EAAOyC,GAAOnB,SAKbtB,EAfT,mC,iCCNA,6CAcMyO,EAdN,MAcmB,EACfC,OAAOC,IAAI,sBACX,qBAEE,SAAUC,IACd,IAAI1N,EAAW,gBAA4BuN,GAU3C,OATKvN,IACHC,OAAO0N,eAAe,gBAAqBJ,EAAY,CACrDnN,MAAOJ,EAAU,gBAAwC,IACzD4N,YAAY,EACZC,UAAU,EACVC,cAAc,IAEhB9N,EAAQ+N,YAAc,iBAEjB/N,I,gCC7BH,SAAUgO,EAAgBrM,GAC9B,OAAe,OAARA,GAA+B,kBAARA,EADhC,mC,yKCMMsM,EAAK,KACLC,EAAa,CAACD,EAAIA,GAAIE,KAAK,OAqBlB,MAnBf,WACE,IACE,OAAOC,QAAQxM,SACf,SAYA,OAXA3B,OAAO0N,eAAejB,EAAA,EAAQwB,EAAY,CAIxC9N,MAA6C,eAAtC,OAAAiO,EAAA,IAAM,WAAM,sBACnBT,YAAY,EACZE,cAAc,EACdD,UAAU,IAIJnB,EAAA,EAAewB,IAIZ,G,gBChBC,EAAwB,cCLtC,QAAU,YAAS,mBAAsB,2C,+HCCrC,SAAUI,EAAcC,GAC5B,OAAOA,EAAIC,eAAe,iBAO5B,IA2BA,cAcE,WAAY,G,IACVxP,EAAa,gBACbyP,EAAY,eACZpP,EAAY,eACZqP,EAAY,eACZC,EAAS,YALX,EAaE,YAAMD,IAAa,K,OACnB,EAAK1P,cAAgBA,GAAiB,GACtC,EAAKyP,aAAeA,GAAgB,GACpC,EAAKpP,aAAeA,GAAgB,KACpC,EAAKuP,QAAUF,GA1DU,SAACH,GAC5B,IAAIK,EAAU,GAmBd,OAjBI,YAAgBL,EAAIvP,gBAAkB,YAAgBuP,EAAIE,iBAC3CF,EAAIvP,eAAiB,IACnC6P,OAAON,EAAIE,cAAgB,IACvBxN,SAAQ,SAAC9B,GACd,IAAMuP,EAAevP,EACjBA,EAAMyP,QACN,2BACJA,GAAW,UAAGF,EAAY,SAI1BH,EAAIlP,eACNuP,GAAW,UAAGL,EAAIlP,aAAauP,QAAO,OAIxCA,EAAUA,EAAQE,QAAQ,MAAO,IAuCAC,CAAqB,GACpD,EAAKJ,UAAYA,EAIhB,EAAaK,UAAYC,EAAY3O,U,EAE1C,OAtCiC,iBAsCjC,EAtCA,CAAiC4O,Q,gCC3CjC,kZA6BM,SAAUC,EAAcC,GAC5B,MAAO,CAAEC,MAAOC,OAAOF,IAGnB,SAAUG,EAAY5N,GAC1B,OAAOyM,QAAQzM,GAAsB,kBAARA,GAAyC,kBAAdA,EAAI0N,OAmBxD,SAAUG,EAAepP,GAC7B,OACE,YAAgBA,IACiB,aAAhCA,EAAuBkG,MACxBoF,MAAMY,QAASlM,EAAuB2M,aAwCpC,SAAU0C,EACdC,EACAxN,EACA9B,EACAkF,GAEA,GAlCF,SAAoBlF,GAClB,MAAsB,aAAfA,EAAMkG,KAiCTqJ,CAAWvP,IA9BjB,SAAsBA,GACpB,MAAsB,eAAfA,EAAMkG,KA6BYsJ,CAAaxP,GACpCsP,EAAOxN,EAAK9B,OAASyP,OAAOzP,EAAMA,YAC7B,GAxCT,SAAwBA,GACtB,MAAsB,iBAAfA,EAAMkG,KAuCFwJ,CAAe1P,IA5C5B,SAAuBA,GACrB,MAAsB,gBAAfA,EAAMkG,KA2CuByJ,CAAc3P,GAChDsP,EAAOxN,EAAK9B,OAASA,EAAMA,WACtB,GA1BT,SAAuBA,GACrB,MAAsB,gBAAfA,EAAMkG,KAyBF0J,CAAc5P,GAAQ,CAC/B,IAAM,EAAe,GACrBA,EAAM6P,OAAOC,KAAI,SAAAvO,GACf,OAAA8N,EAA4B,EAAc9N,EAAIO,KAAMP,EAAIvB,MAAOkF,MAEjEoK,EAAOxN,EAAK9B,OAAS,OAChB,GApCT,SAAoBA,GAClB,MAAsB,aAAfA,EAAMkG,KAmCF6J,CAAW/P,GAAQ,CAC5B,IAAMgQ,GAAiB9K,GAAc,IAAYlF,EAAM8B,KAAK9B,OAC5DsP,EAAOxN,EAAK9B,OAASgQ,OAChB,GA/BT,SAAqBhQ,GACnB,MAAsB,cAAfA,EAAMkG,KA8BF+J,CAAYjQ,GACrBsP,EAAOxN,EAAK9B,OAASA,EAAMkQ,OAAOJ,KAAI,SAAAK,GACpC,IAAMC,EAAoB,GAO1B,OANAf,EACEe,EACAtO,EACAqO,EACAjL,GAEMkL,EAA0BtO,EAAK9B,eAEpC,GAtCT,SAAqBA,GACnB,MAAsB,cAAfA,EAAMkG,KAqCFmK,CAAYrQ,GACrBsP,EAAOxN,EAAK9B,OAAUA,EAAwBA,UACzC,KApCT,SAAqBA,GACnB,MAAsB,cAAfA,EAAMkG,KAmCFoK,CAAYtQ,GAGrB,MAAM,QAAI,QACR,wBAAwB,OAAU,uBAAe,OAAkB,OAAG,KAAtE,4FAGA,YANFsP,EAAOxN,EAAK9B,OAAS,MAUnB,SAAUuQ,EACd/G,EACAtE,GAEA,IAAIsL,EAAqB,KACrBhH,EAAM3C,aACR2J,EAAgB,GAChBhH,EAAM3C,WAAWhG,SAAQ,SAAAiG,GACvB0J,EAAc1J,EAAUhF,KAAK9B,OAAS,GAElC8G,EAAUrC,WACZqC,EAAUrC,UAAU5D,SAAQ,SAAC,G,IAAEiB,EAAI,OAAE9B,EAAK,QACxC,OAAAqP,EACEmB,EAAc1J,EAAUhF,KAAK9B,OAC7B8B,EACA9B,EACAkF,UAOV,IAAIoK,EAAc,KAQlB,OAPI9F,EAAM/E,WAAa+E,EAAM/E,UAAUpB,SACrCiM,EAAS,GACT9F,EAAM/E,UAAU5D,SAAQ,SAAC,G,IAAEiB,EAAI,OAAE9B,EAAK,QACpC,OAAAqP,EAA4BC,EAAQxN,EAAM9B,EAAOkF,OAI9CuL,EAAgBjH,EAAM1H,KAAK9B,MAAOsP,EAAQkB,GASnD,IAAME,EAA6B,CACjC,aACA,UACA,OACA,SACA,OACA,UAGWD,EAAkB5Q,OAAOC,QAAO,SAC3C6Q,EACAC,EACA/J,GAEA,GACE+J,GACA/J,GACAA,EAAU,YACVA,EAAU,WAAV,IACA,CACA,GACEA,EAAU,WAAV,QACCA,EAAU,WAAV,OAAgDxD,OAAS,EAC1D,CACA,IAAMwN,EAAahK,EAAU,WAAV,OACdA,EAAU,WAAV,OACD,GACJgK,EAAWC,OAEX,IAAM,EAAe,GAKrB,OAJAD,EAAWhQ,SAAQ,SAAAM,GACjB,EAAaA,GAAOyP,EAAKzP,MAGpB,UAAG0F,EAAU,WAAV,IAA+B,YAAIkK,EAC3C,GACD,KAED,OAAOlK,EAAU,WAAV,IAIX,IAAImK,EAA4BL,EAEhC,GAAIC,EAAM,CAIR,IAAMK,EAA0BF,EAAUH,GAC1CI,GAAqB,WAAIC,EAAe,KAc1C,OAXIpK,GACFhH,OAAOqB,KAAK2F,GAAYhG,SAAQ,SAAAM,IACS,IAAnCuP,EAAiBQ,QAAQ/P,KACzB0F,EAAW1F,IAAQtB,OAAOqB,KAAK2F,EAAW1F,IAAMkC,OAClD2N,GAAqB,WAAI7P,EAAG,YAAI4P,EAAUlK,EAAW1F,IAAK,KAE1D6P,GAAqB,WAAI7P,OAKxB6P,IACN,CACDG,aAAA,SAAaC,GACX,IAAMC,EAAWN,EAEjB,OADAA,EAAYK,EACLC,KAMPN,EAAY,SAA0B/Q,GACxC,OAAOsR,KAAKP,UAAU/Q,EAAOuR,IAG/B,SAASA,EAAkB7J,EAAc1H,GAOvC,OANI,YAAgBA,KAAWsL,MAAMY,QAAQlM,KAC3CA,EAAQH,OAAOqB,KAAKlB,GAAO8Q,OAAOU,QAAO,SAACC,EAAMtQ,GAE9C,OADAsQ,EAAKtQ,GAAOnB,EAAMmB,GACXsQ,IACN,KAEEzR,EAGH,SAAU0R,EACdlI,EACAtE,GAEA,GAAIsE,EAAM/E,WAAa+E,EAAM/E,UAAUpB,OAAQ,CAC7C,IAAM,EAAiB,GAIvB,OAHAmG,EAAM/E,UAAU5D,SAAQ,SAAC,G,IAAEiB,EAAI,OAAE9B,EAAK,QACpC,OAAAqP,EAA4B,EAAQvN,EAAM9B,EAAOkF,MAE5C,EAET,OAAO,KAGH,SAAUyM,EAAuBnI,GACrC,OAAOA,EAAMoI,MAAQpI,EAAMoI,MAAM5R,MAAQwJ,EAAM1H,KAAK9B,MAGhD,SAAU6R,EACdnT,EACA4H,EACAD,GAEA,GAAiC,kBAAtB3H,EAAOoT,WAChB,OAAOpT,EAAOoT,WAGhB,IAAwB,UAAAxL,EAAaC,WAAb,eAAyB,CAA5C,IAAME,EAAS,KAClB,GAAIsL,EAAQtL,IACV,GAA6B,eAAzBA,EAAU3E,KAAK9B,MACjB,OAAOtB,EAAOiT,EAAuBlL,QAElC,CACL,IAAMuL,EAAWH,EACfnT,EACA,YAAyB+H,EAAWJ,GAAcC,aAClDD,GAEF,GAAwB,kBAAb2L,EACT,OAAOA,IAMT,SAAUD,EAAQtL,GACtB,MAA0B,UAAnBA,EAAUP,KAGb,SAAU+L,EACdxL,GAEA,MAA0B,mBAAnBA,EAAUP,O,uEChSFgM,E,0DAmBjB,SAASC,EACPvT,GAEA,IAAMwT,EAAoCvS,OAAOmB,OAAO,MAIxD,OAHI,YAAgBpC,IAClBA,EAAciC,SAAQ,SAAA9B,GAAS,OAAAqT,EAAUrT,EAAMyP,SAAhB,KAE1B4D,EA1BQF,MAAkB,IA6BnC,IAAMG,EAAiB,CACrBC,QAAS,SAAC,G,IAAE1T,EAAa,gBAAEV,EAAS,YAC5BqU,EAAgBJ,EAAuBvT,GAG7C,GAAI2T,EAAcC,2BAChB,OAAO,EAGT,GAAID,EAAcE,uBAChB,OAAO,EAGD,IAAA5T,EAAaX,EAAUwU,aAAf,SAGhB,SACE7T,IACAA,EAAS8T,QACY,MAApB9T,EAAS8T,QAAsC,MAApB9T,EAAS8T,SAOzCC,wBAAwB,GAQlB,IAAAxE,EAAmBvO,OAAOK,UAAZ,eAEhB2S,EAAgB,IAAIxI,QAKtByI,EAAqB,EAEZC,EAA2B,SACtC3Q,GASA,QACE,YAAO,IAC+B,oBAArB,EAAM,QACW,oBAAzBA,EAAQ4Q,cAEjB,8IAGA,8CAGA,oBASE,EAAQ,cATJ,I,IAMJ,gBAA+B,GAAO,EAAQ,0DAChD,QAAO,UACP,kBAGE,cAAwB,EAAQ,yBAE9B,GAA0B,EAE1B,EAAiB,UAAC,IACtB,EAAI,SAAgB,GAAyC,mDAiB7D,gBACS,SACP,KAGM,QAAK,cAAe,8EAE5B,MAAW,EAAW,MACpB,WAAI,KAAqC,YACzC,IAAI,EAEA,EADA,KAEE,GACJ,EACE,WAAQ,KAKV,IAAI,EAAQ,EAAMnU,SAAQ,EAAa,EAAO,a,IAC5CoU,IAAc,GAAC,cAEf,GAAM,EAEN,IAAM,EAAc,GAChB,EAAgBpU,KAAiB,OACnC,gBACD,kB,IAKC,EAAa,GACbI,EAAaP,QACX,SAAgB,OAClB,gBACD,kB,IAGC,EAAQ,CACR,WACA,aAAS,EACT,YACA,uC,GAMF,GACyB,KACvB,EAAC,GACD,yBAEA,EAoBD,OApBmBqG,GAElB,EAAU,cACR,EAAM,Y,KACJ,CACA,gBACD,qBAKC,aAAQ,CACT,iBAGD,GACD,oCAGD,EAAO,mBAIX,KAEA,EAAM,C,KACJ,SAAQlG,GACT,gDAEC,eAAQI,GACT,qDAED,6BA6CA,OAzCA,EAAM,Y,KACJ,CACA,gBACD,uBAQD,GACA,IA1KR,SAAkCf,GAChC,OAAOA,EAAUyF,MAAMgJ,YAAY5F,MACjC,SAAA0C,GAAK,MAAW,wBAAXA,EAAEvD,MAAF,aAAoCuD,EAAEvL,aA0KvC,CACG,KAAE,0BACD,qBAAuBgV,OAAa,WAElC,OADF,EAAO,EACL,CAIA,+DAIP,MAGC,EArIJ,SAAc,G,IAIZ,GAA2B,kBAApB,EACR,YAED,IAAI,EAAO,SAET,OAFWC,GACb,EAAO,IAAe,EAAKA,EAAQ,cAAe,OAC9C,EAAO,KAAc,EAAC,GACtBA,EAAOC,GACZ,UA2HO,CAAS,GAAC,MAAW,YACnB,aAAgB,gBAChB,QApQW,EAqQX,cAED,EAAe,EAAO,GAAc,UAAE,MAAC,uBAG3C,oBAGC,WAAkBrO,GAClB,yB,iCClSR,sCAAMsO,EAAe,IAAI7S,IAInB,SAAU8S,EAAaC,GAC3B,IAAMC,EAAQH,EAAa3S,IAAI6S,IAAW,EAE1C,OADAF,EAAazS,IAAI2S,EAAQC,EAAQ,GAC1B,UAAGD,EAAM,YAAIC,EAAK,YAAIC,KAAKC,SAASzT,SAAS,IAAIU,MAAM,M,wHCDnD,EAA0B,SAACgT,EAAQC,GAC9C,IAAIC,EACJ,IACEA,EAAavC,KAAKP,UAAU4C,GAC5B,MAAOzU,GACP,IAAM4U,EAAa,QAAI,QACrB,2BAA2B,OAAK,4BAA2B,OAAS,EAChD,sBAEtB,MADAA,EAAWA,WAAa5U,EAClB4U,EAER,OAAOD,G,QCJK,SAAUE,EACtBC,GAEA,IAAI,EAA+B,KAC/BjV,EAAsB,KACtBkV,GAAO,EACLhR,EAAkB,GAElBiR,EAOA,GAEN,SAASzO,EAAO0O,GACd,IAAIpV,EAAJ,CACA,GAAImV,EAAQ7Q,OAAQ,CAClB,IAAM+Q,EAAaF,EAAQG,QAC3B,GAAI/I,MAAMY,QAAQkI,IAAeA,EAAW,GAC1C,OAAOA,EAAW,GAAG,CAAEpU,MAAOmU,EAAOF,MAAM,IAG/ChR,EAAK6E,KAAKqM,IAEZ,SAAS/U,EAAQ+O,GACfpP,EAAQoP,EACI+F,EAAQvT,QAChBE,SAAQ,SAAUyT,GACpBA,EAAK,GAAGnG,OAET,GAAW,IAEd,SAASoG,IACPN,GAAO,EACKC,EAAQvT,QAChBE,SAAQ,SAAUyT,GACpBA,EAAK,GAAG,CAAEtU,WAAOwU,EAAWP,MAAM,QAEnC,GAAW,IAGd,EAAU,WACR,EAAU,KACVD,EAAOS,eAAe,OAAQhP,GAC9BuO,EAAOS,eAAe,QAASrV,GAC/B4U,EAAOS,eAAe,MAAOF,GAC7BP,EAAOS,eAAe,SAAUF,GAChCP,EAAOS,eAAe,QAASF,IAEjCP,EAAOU,GAAG,OAAQjP,GAClBuO,EAAOU,GAAG,QAAStV,GACnB4U,EAAOU,GAAG,MAAOH,GACjBP,EAAOU,GAAG,SAAUH,GACpBP,EAAOU,GAAG,QAASH,GAWnB,IAAMI,EAAkC,CACtClW,KAAA,WACE,OAVK,IAAImW,SAAQ,SAAUC,EAASC,GACpC,OAAI/V,EAAc+V,EAAO/V,GACrBkE,EAAKI,OAAewR,EAAQ,CAAE7U,MAAOiD,EAAKoR,QAAcJ,MAAM,IAC9DA,EAAaY,EAAQ,CAAE7U,WAAOwU,EAAWP,MAAM,SACnDC,EAAQpM,KAAK,CAAC+M,EAASC,SAgB3B,OANIC,EAAA,IACFJ,EAASvH,OAAO4H,eAAiB,WAC/B,OAAO1V,OAIJqV,EChFK,SAAUM,EACtBC,GAEA,IAAMP,EAA8B,CAClClW,KAAI,WACF,OAAOyW,EAAOC,SAUlB,OANIJ,EAAA,IACFJ,EAASvH,OAAO4H,eAAiB,WAC/B,OAAO1V,OAIJqV,ECPH,SAAUS,EACdvW,GAEA,IAAIwW,EAAgBxW,EAIpB,GAFmBA,ECpBcwW,ODoBHA,EAAOxW,EAASwW,MCb1C,SACJrV,GAEA,SACE+U,EAAA,IACC/U,EAAqCoN,OAAO4H,gBDU3CM,CAAwBD,GAAO,OEtBvB,SACZE,G,MAEMZ,EAAWY,EAAOnI,OAAO4H,iBAC/B,UACEvW,KAAA,WACE,OAAOkW,EAASlW,UAEjB2O,OAAO4H,eAAR,WACE,OAAO1V,MAET,EFWwC0V,CAAiBK,GAE3D,GCrBI,SAA2BrV,GAC/B,QAAUA,EAA8BwV,UDoBpCC,CAAiBJ,GAAO,OAAOJ,EAAkBI,EAAKG,aAI1D,GCZI,SAA2BxV,GAC/B,QAAUA,EAAegU,ODWrB0B,CAAiBL,GACnB,OAAOJ,EACJI,EAAKrB,SAA0CwB,aAIpD,GCdI,SAAiBxV,GACrB,QAAUA,EAAe2V,YDarBC,CAAOP,GAAO,OG3BN,SACZ1Q,GAEA,IAAIkR,GAAW,EAETlB,EAA+B,CACnClW,KAAA,WACE,OAAIoX,EACKjB,QAAQC,QAAQ,CACrB7U,WAAOwU,EACPP,MAAM,KAEV4B,GAAW,EACJ,IAAIjB,SAAQ,SAAUC,EAASC,GACpCnQ,EACGvB,MAAK,SAAUpD,GACd6U,EAAQ,CAAE7U,MAAOA,EAAuBiU,MAAM,OAE/C1Q,MAAMuR,SAWf,OANIC,EAAA,IACFJ,EAASvH,OAAO4H,eAAiB,WAC/B,OAAO1V,OAIJqV,EHFkBmB,CAAmBT,EAAKM,eAEjD,GCZI,SAA+B3V,GACnC,QAAUA,EAA6B+V,KDWnCC,CAAqBX,GAAO,OAAOtB,EAAsBsB,GAE7D,MAAM,IAAIvG,MACR,8EItCG,IAAMmH,EAAmB,SAC9BpX,EACAH,EACA8P,GAEA,IAAMzP,EAAQ,IAAI+P,MAAMN,GAKxB,MAJAzP,EAAM+C,KAAO,cACb/C,EAAMF,SAAWA,EACjBE,EAAMmX,WAAarX,EAAS8T,OAC5B5T,EAAML,OAASA,EACTK,GCXA,EAAmBc,OAAOK,UAAZ,eAqFhB,SAAUiW,EAAaC,GAC3B,IAAMC,EAAsC,GAU5C,OATAD,EAAWE,MAAM,MAAMzV,SAAQ,SAAC0V,GAC9B,IAAMxV,EAAIwV,EAAKrF,QAAQ,KACvB,GAAInQ,GAAK,EAAG,CAEV,IAAM,EAAOwV,EAAK5V,MAAM,EAAGI,GAAGyV,OAAOC,cAC/BzW,EAAQuW,EAAK5V,MAAMI,EAAI,GAAGyV,OAChCH,EAAY,GAAQrW,MAGjBqW,EAGH,SAAUK,EAAiB7X,EAAoB8X,GACnD,GAAI9X,EAAS8T,QAAU,IAAK,CAS1BsD,EACEpX,EARgB,WAChB,IACE,OAAOyS,KAAKsF,MAAMD,GAClB,MAAOxI,GACP,OAAOwI,GAKTE,GACA,wDAAiDhY,EAAS8T,SAI9D,IACE,OAAOrB,KAAKsF,MAAMD,GAClB,MAAOxI,GACP,IAAM2F,EAAa3F,EAKnB,MAJA2F,EAAWhS,KAAO,mBAClBgS,EAAWjV,SAAWA,EACtBiV,EAAWoC,WAAarX,EAAS8T,OACjCmB,EAAW6C,SAAWA,EAChB7C,GAIJ,SAAUgD,EAAY3I,EAAU/P,G,QACnB,eAAb+P,EAAIrM,OAMJqM,EAAIzP,QAAUyP,EAAIzP,OAAOC,QAAUwP,EAAIzP,OAAOuE,OA4BnC,QAAb,EAAA7E,EAASK,YAAI,gBAAG0P,EAAIzP,SAGR,QAAd,EAAAN,EAASW,aAAK,gBAAGoP,IAGb,SAAU4I,EACdlY,EACAX,EACAE,GAUI,IAAoC4Y,KARd9Y,EASnB,SAACW,GACN,OAAAA,EACGoY,OACA7T,MAAK,SAACuT,GAAa,OAAAD,EAAc7X,EAAd,MACnBuE,MAAK,SAAC1E,GAyBL,OAxBIG,EAAS8T,QAAU,KAErBsD,EACEpX,EACAH,EACA,wDAAiDG,EAAS8T,SAI3DrH,MAAMY,QAAQxN,IACd,EAAe6B,KAAK7B,EAAQ,SAC5B,EAAe6B,KAAK7B,EAAQ,WAG7BuX,EACEpX,EACAH,EACA,iDACE4M,MAAMY,QAAQ8K,GACVA,EAAWlH,KAAI,SAAC1J,GAAO,OAAAA,EAAA,iBACvB4Q,EAAWE,cAAa,OAI3BxY,OAtCwBG,GAClCuE,MAAK,SAAC1E,G,QACQ,QAAb,EAAAN,EAASK,YAAI,gBAAGC,GACC,QAAjB,EAAAN,EAASY,gBAAQ,oBAElBuE,OAAM,SAAC4K,GAAQ,OAAA2I,EAAY3I,EAAZ,MCtLb,I,SC4HMgJ,EAAqB,CAChCC,KA7B2C,CAC3CC,cAAc,EACdC,mBAAmB,EACnBC,oBAAoB,GA2BpBC,QAxBqB,CAErBC,OAAQ,MAaR,eAAgB,oBAUhBrV,QAPqB,CACrBkC,OAAQ,SASGoT,EAA0B,SAACC,EAAKC,GAAY,OAAAA,EAAA,IAenD,SAAUC,EACd3Z,EACA0Z,G,IACA,wDAEA,IAAIxV,EAAU,GACVgV,EAAO,GAEXU,EAAQjX,SAAQ,SAAAuH,GACdhG,EAAU,OAAH,IAAG,CAAH,2BACFA,GACAgG,EAAOhG,SAAO,CACjBoV,QAAS,OAAF,IAAE,CAAF,eACFpV,EAAQoV,SACRpP,EAAOoP,WAIVpP,EAAO2P,cACT3V,EAAQ2V,YAAc3P,EAAO2P,aAG/BX,EAAO,OAAH,IAAG,CAAH,eACCA,GACAhP,EAAOgP,SAIdhV,EAAQoV,QAAUQ,EAAuB5V,EAAQoV,QAASJ,EAAKG,oBAGvD,IAAAL,EAAgDhZ,EAAnC,cAAE+Z,EAAiC/Z,EAAvB,WAAEgH,EAAqBhH,EAAZ,UAAEyF,EAAUzF,EAAL,MAC7CmX,EAAa,CAAE6B,cAAa,EAAEhS,UAAS,GAO7C,OALIkS,EAAKE,oBAAoBjC,EAAa4C,WAAaA,GAGnDb,EAAKC,eAAehC,EAAa1R,MAAQiU,EAAQjU,EAAO,MAErD,CACLvB,QAAO,EACPiT,KAAI,GAOR,SAAS2C,EACPR,EACAD,GAIA,IAAKA,EAAoB,CACvB,IAAM,EAAoB1X,OAAOmB,OAAO,MAIxC,OAHAnB,OAAOqB,KAAKrB,OAAO2X,IAAU3W,SAAQ,SAAAiB,GACnC,EAAkBA,EAAK2U,eAAiBe,EAAQ1V,MAE3C,EAOT,IAAMoW,EAAarY,OAAOmB,OAAO,MACjCnB,OAAOqB,KAAKrB,OAAO2X,IAAU3W,SAAQ,SAAAiB,GACnCoW,EAAWpW,EAAK2U,eAAiB,CAAE0B,aAAcrW,EAAM9B,MAAOwX,EAAQ1V,OAGxE,IAAMsW,EAAoBvY,OAAOmB,OAAO,MAIxC,OAHAnB,OAAOqB,KAAKgX,GAAYrX,SAAQ,SAAAiB,GAC9BsW,EAAkBF,EAAWpW,GAAMqW,cAAgBD,EAAWpW,GAAM9B,SAE/DoY,EC5NH,SAAUC,EAAaC,GAC3B,OAAO,IAAI,KAAc,SAAAla,GACvBA,EAASW,MAAMuZ,MCqBnB,IAAMC,EAAc,aAAM,WAAM,OAAAC,SAEnB,EAAiB,SAACC,QAAA,IAAAA,MAAA,IAE3B,MASEA,EATc,IAAhBC,OAAG,IAAG,aAAU,EAETC,EAOLF,EAPmB,MACrB,EAMEA,EANoB,MAAtBG,OAAK,IAAG,EAAAlB,EAAc,EACtBJ,EAKEmB,EALe,kBACjBlB,EAIEkB,EAJgB,mBAClBI,EAGEJ,EAHc,iBAChB,EAEEA,EAF4B,uBAA9BK,OAAsB,IAAG,GAAK,EAC3BC,EAAc,YACfN,EAVA,8GAYAjX,SHtCsB,SAACwX,GAC3B,IAAKA,GAA4B,qBAAVR,MACrB,MAAM,QAAI,QAAe,sbAWtB,YG4BH,CAAaG,GAAkBJ,GAGjC,IAAMU,EAAa,CACjB7B,KAAM,CAAEE,kBAAiB,EAAEC,mBAAkB,GAC7CnV,QAAS2W,EAAe7F,aACxB6E,YAAagB,EAAehB,YAC5BP,QAASuB,EAAevB,SAG1B,OAAO,IAAI0B,EAAA,GAAW,SAAAhb,GACpB,IAAIib,ECpDiB,SACvBjb,EACAkb,GAKA,OAHgBlb,EAAUwU,aACCgG,MAIO,oBAAhBU,EACTA,EAAYlb,GAEXkb,GAA0B,YDwClBC,CAAUnb,EAAWwa,GAE/B9Y,EAAU1B,EAAUwU,aAQpB4G,EAGF,GAEJ,GAAI1Z,EAAQ2Z,gBAAiB,CACrB,MAAoB3Z,EAAQ2Z,gBAA1B,EAAI,OAAEC,EAAO,UACjB,IACFF,EAAuB,6BAA+B,GAEpDE,IACFF,EAAuB,gCAAkCE,GAI7D,IAyCIC,EAzCEC,EAAiB,OAAH,IAAG,CAAH,eAAQJ,GAA2B1Z,EAAQ4X,SAEzDmC,EAAgB,CACpBvC,KAAMxX,EAAQwX,KACdhV,QAASxC,EAAQsT,aACjB6E,YAAanY,EAAQmY,YACrBP,QAASkC,GAIL,EAAoB7B,EACxB3Z,EACA0a,EACAzB,EACA8B,EACAU,GALMvX,EAAO,UAAEiT,EAAI,OAQrB,GAAIA,EAAKnQ,YAAc4T,EAAwB,CAC7C,IAAM,EAAc,IAAIxX,IAAIzB,OAAOqB,KAAKmU,EAAKnQ,YAC7C,YAAMhH,EAAUyF,MAAO,CACrB4D,SAAA,SAASE,EAAMC,EAAMC,GAKfA,GAAsD,uBAA3CA,EAAkCzB,MAC/C,EAAY0F,OAAOnE,EAAK3F,KAAK9B,UAI/B,EAAY4Z,OAGdvE,EAAKnQ,UAAS,eAAQmQ,EAAKnQ,WAC3B,EAAYrE,SAAQ,SAAAiB,UACXuT,EAAKnQ,UAAWpD,OAM7B,IAAMM,EAAgByX,OAAQ,CACtB,ME1H2B,WACrC,GAA+B,qBAApBC,gBACT,MAAO,CAAEL,YAAY,EAAOI,QAAQ,GAEtC,IAAMJ,EAAa,IAAIK,gBAEvB,MAAO,CAAEL,WAAU,EAAEI,OADNJ,EAAWI,QFqHsBE,GAAxBC,EAAW,aAAEH,EAAM,UACvCJ,EAAaO,KACI5X,EAAgByX,OAASA,GAmB5C,GAXEhB,IACC3a,EAAUyF,MAAMgJ,YAAY5F,MALF,SAAC0C,GAC5B,MAAkB,wBAAXA,EAAEvD,MAAkD,aAAhBuD,EAAEvL,eAM7CkE,EAAQkC,OAAS,OAIf,YAAc,CAAC,SAAUpG,EAAUyF,SACrCvB,EAAQoV,QAAQC,OAAS,yDAGJ,QAAnBrV,EAAQkC,OAAkB,CACtB,MG3IN,SAA2B6U,EAAmB9D,GAGlD,IAAM4E,EAAwB,GACxBC,EAAgB,SAAC/Y,EAAanB,GAClCia,EAAYnS,KAAK,UAAG3G,EAAG,YAAIgZ,mBAAmBna,MAShD,GANI,UAAWqV,GACb6E,EAAc,QAAS7E,EAAK1R,OAE1B0R,EAAK6B,eACPgD,EAAc,gBAAiB7E,EAAK6B,eAElC7B,EAAKnQ,UAAW,CAClB,IAAIkV,OAAmB,EACvB,IACEA,EAAsB,EACpB/E,EAAKnQ,UACL,iBAEF,MAAO4O,GACP,MAAO,CAAEA,WAAU,GAErBoG,EAAc,YAAaE,GAE7B,GAAI/E,EAAK4C,WAAY,CACnB,IAAIoC,OAAoB,EACxB,IACEA,EAAuB,EACrBhF,EAAK4C,WACL,kBAEF,MAAOnE,GACP,MAAO,CAAEA,WAAU,GAErBoG,EAAc,aAAcG,GAS9B,IAAItN,EAAW,GACbuN,EAAcnB,EACVoB,EAAgBpB,EAAUjI,QAAQ,MACjB,IAAnBqJ,IACFxN,EAAWoM,EAAUqB,OAAOD,GAC5BD,EAAcnB,EAAUqB,OAAO,EAAGD,IAEpC,IAAME,GAAkD,IAA9BH,EAAYpJ,QAAQ,KAAc,IAAM,IAGlE,MAAO,CAAEwJ,OADPJ,EAAcG,EAAoBR,EAAYlM,KAAK,KAAOhB,GHqFzB4N,CAAiBxB,EAAW9D,GAAnDqF,EAAM,SAAE5G,EAAU,aAC1B,GAAIA,EACF,OAAOuE,EAAUvE,GAEnBqF,EAAYuB,OAEZ,IACGtY,EAAgBiT,KAAO,EAAwBA,EAAM,WACtD,MAAOvB,GACP,OAAOuE,EAAUvE,GAIrB,OAAO,IAAI,KAAW,SAAA1V,GAqBpB,OAfqBua,GAAkB,aAAM,WAAM,OAAAH,UAAUD,GAE/CY,EAAW/W,GACtBgB,MAAK,SAAAvE,G,MACJX,EAAU0c,WAAW,CAAE/b,SAAQ,IAC/B,IAAMgc,EAAwB,QAAhB,EAAAhc,EAAS2Y,eAAO,eAAE9W,IAAI,gBAEpC,OAAc,OAAVma,GAAkB,qBAAqB/U,KAAK+U,GJ7JpD,SACJhc,EACAT,G,oKAEA,QAAoBoW,IAAhBsG,YACF,MAAM,IAAIhM,MACR,6EAGEiM,EAAU,IAAID,YAAY,SAC1BE,EAA8B,QAAhB,EAAAnc,EAAS2Y,eAAO,eAAE9W,IAAI,gBACpCua,EAAY,YAMZC,GAAyB,OAAXF,QAAW,IAAXA,OAAW,EAAXA,EAAaG,SAASF,IAC3B,OAAXD,QAAW,IAAXA,OAAW,EAAXA,EACII,WAAqB,OAAXJ,QAAW,IAAXA,OAAW,EAAXA,EAAa9J,QAAQ+J,IAAaA,EAAU5X,QACvDqL,QAAQ,QAAS,IACjBA,QAAQ,WAAY,IACpB8H,OACH,IAEA6E,EAAW,YAAKH,GAChBI,EAAS,GACP3G,EAAWS,EAAiBvW,GAC9B0c,GAAU,E,wBAEPA,EACmB,GAAM5G,EAASlW,QAD3B,M,OAOZ,IANM,EAAkB,SAAhBuB,EAAK,QAAEiU,EAAI,OACbE,EAAyB,kBAAVnU,EAAqBA,EAAQ+a,EAAQS,OAAOxb,GACjEub,GAAWtH,EAEPwH,GADJH,GAAUnH,GACMjD,QAAQmK,GAEjBI,GAAM,GAAG,CAMd,GALIjN,OAAO,EACX,EAAoB,CAClB8M,EAAO3a,MAAM,EAAG8a,GAChBH,EAAO3a,MAAM8a,EAAKJ,EAAShY,SAFnBiY,EAAM,MAAf9M,EAAO,MAIIgI,OAAQ,CAIlB,GAHMzV,EAAIyN,EAAQ0C,QAAQ,YACpBsG,EAAUrB,EAAa3H,EAAQ7N,MAAM,EAAGI,KACxC,EAAcyW,EAAQ,mBAGiC,IAA3D,EAAYf,cAAcvF,QAAQ,oBAElC,MAAM,IAAIpC,MAAM,iEAEZuG,EAAO7G,EAAQ7N,MAAMI,GAE3B,IACQrC,EAASgY,EAAiB7X,EAAUwW,EAAK3G,QAAQ,OAAQ,MAE7D7O,OAAOqB,KAAKxC,GAAQ2E,OAAS,GAC7B,SAAU3E,GACV,gBAAiBA,GACjB,WAAYA,KAIC,QAAb,EAAAN,EAASK,YAAI,gBAAGC,IAElB,MAAOyP,GACP2I,EAAY3I,EAAK/P,IAGrBqd,EAAKH,EAAOpK,QAAQmK,G,0BAGP,QAAjB,EAAAjd,EAASY,gBAAQ,iB,WIoFA0c,CAAkB7c,EAAUT,GAE5B2Y,EAAalY,EAAUX,EAAWE,MAG5CmF,OAAM,SAAA4K,GAAO,OAAA2I,EAAY3I,EAAZ,MAET,WAGDsL,GAAYA,EAAWkC,gBIjLnC,cAEE,WAAmBvZ,QAAA,IAAAA,MAAA,IAAnB,MACE,YAAM,EAAeA,GAAS/C,UAAQ,K,OADrB,EAAA+C,U,EAGrB,OAL8B,iBAK9B,EALA,CAA8B8W,EAAA,I,6FCFjB,EAAUA,EAAA,EAAWrW,Q,8CCE5B,SAAU+Y,EACdzX,EACA0X,EACAC,GAEA,OAAO,IAAI,KAAc,SAAA1d,GACf,IAAAK,EAA0BL,EAAtB,KAAEW,EAAoBX,EAAf,MAAE,EAAaA,EAAL,SACzB2d,EAAsB,EACtBC,GAAY,EACZC,EAAe,CAIjB7Y,KAAA,SAAK8Y,GACH,OAAO,IAAItH,SAAQ,SAAAC,GAAW,OAAAA,EAAQqH,UAI1C,SAASC,EACPC,EACAC,GAEA,OAAID,EACK,SAAAvU,KACHkU,EACF,IAAMO,EAAO,WAAM,OAAAF,EAAA,IACnBH,EAAeA,EAAa7Y,KAAKkZ,EAAMA,GAAMlZ,MAC3C,SAAA1E,KACIqd,EACFtd,GAAQA,EAAK8B,KAAKnC,EAAUM,GACxBsd,GACFO,EAAQvd,cAGZ,SAAAD,GAEE,OADEgd,EACIhd,KAERwE,OAAM,SAAAiZ,GACNzd,GAASA,EAAMwB,KAAKnC,EAAUoe,OAI3B,SAAA3U,GAAO,OAAAwU,GAAYA,EAAS9b,KAAKnC,EAA1B,IAIlB,IAAMme,EAAuB,CAC3B9d,KAAM0d,EAAaN,EAAOpd,GAC1BM,MAAOod,EAAaL,EAAS/c,GAC7BC,SAAQ,WACNgd,GAAY,EACPD,GACH,GAAY,EAASxb,KAAKnC,KAK1BC,EAAM8F,EAAW3F,UAAU+d,GACjC,OAAO,WAAM,OAAAle,EAAA,kBC7DX,SAAUoe,EAAsB/d,GACpC,OAAQA,EAAOC,QAAUD,EAAOC,OAAO0E,OAAS,IAAM,E,wDCDlD,SAAUqZ,EACdC,EACArY,EACAgE,GAKA,IAAMsU,EAAqC,GAC3CD,EAAU9b,SAAQ,SAAAgc,GAAO,OAAAA,EAAIvY,IAAWsY,EAAoB9U,KAAnC+U,MACzBD,EAAoB/b,SAAQ,SAAAgc,GAAO,OAACA,EAAYvY,GAAb,MCD/B,SAAUwY,EAEdC,GACA,SAASnc,EAAIO,GAIXtB,OAAO0N,eAAewP,EAAU5b,EAAK,CAAEnB,MAAO,MAShD,OAPI+U,EAAA,GAAgB3H,OAAO4P,SACzBpc,EAAIwM,OAAO4P,SAKbpc,EAAI,aACGmc,ECrBT,SAASE,EAAiBjd,GACxB,OAAOA,GAAwC,oBAAvBA,EAAcoD,KAsCxC,kBAcE,WAAY8Z,GAAZ,MACE,aAAM,SAAA9e,GAEJ,OADA,EAAK+e,YAAY/e,GACV,WAAM,SAAKgf,eAAL,QACb,K,OAdI,EAAAT,UAAY,IAAIrb,IAsGR,EAAAqD,QAAU,IAAIiQ,SAAW,SAACC,EAASC,GACjD,EAAKD,QAAUA,EACf,EAAKC,OAASA,KASR,EAAAuI,SAAW,CACjB5e,KAAM,SAACC,GACY,OAAb,EAAKL,MACP,EAAKif,OAAS,CAAC,OAAQ5e,GACvB,EAAK6e,OAAO,OAAQ7e,GACpBge,EAAuB,EAAKC,UAAW,OAAQje,KAInDK,MAAO,SAAC,GACE,IAAAV,EAAQ,EAAL,IACC,OAARA,IAIEA,GAAKmf,YAAW,WAAM,OAAAnf,EAAA,iBAC1B,EAAKA,IAAM,KACX,EAAKif,OAAS,CAAC,QAAS,GACxB,EAAKxI,OAAO,GACZ,EAAKyI,OAAO,QAAS,GACrBb,EAAuB,EAAKC,UAAW,QAAS,KAIpD3d,SAAU,WACA,IAAAX,EAAQ,EAAL,IACX,GAAY,OAARA,EAAc,CAChB,IAAM2B,EAAQ,EAAKkd,QAAQ7I,QACtBrU,EAiBMid,EAAcjd,GACvBA,EAAMoD,MAAK,SAAAyZ,GAAO,SAAKxe,IAAMwe,EAAIre,UAAU,EAAzB,aAElB,EAAKH,IAAM2B,EAAMxB,UAAU,EAAK6e,WAnB5Bhf,GAAKmf,YAAW,WAAM,OAAAnf,EAAA,iBAC1B,EAAKA,IAAM,KACP,EAAKif,QACc,SAAnB,EAAKA,OAAO,GACd,EAAKzI,QAAQ,EAAKyI,OAAO,IAEzB,EAAKzI,UAEP,EAAK0I,OAAO,YAOZb,EAAuB,EAAKC,UAAW,gBAUvC,EAAAc,oBAAsB,IAAInc,IAgC3B,EAAAoc,OAAS,SAACC,GACf,EAAK7I,OAAO6I,GACZ,EAAKT,QAAU,GACf,EAAKG,SAASre,YAvLd,EAAK2F,QAAQpB,OAAM,SAAAqa,OAKI,oBAAZV,IACTA,EAAU,CAAC,IAAI,IAAWA,KAGxBD,EAAcC,GAChBA,EAAQ9Z,MACN,SAAAya,GAAY,SAAKC,MAAL,KACZ,EAAKT,SAASte,OAGhB,EAAK+e,MAAMZ,G,EA0KjB,OAhNgC,iBA8CtB,YAAAY,MAAR,SAAcZ,QACK,IAAb5d,KAAKjB,MAKTiB,KAAK4d,QAAU5R,MAAMrJ,KAAKib,GAM1B5d,KAAK+d,SAASre,aAGR,YAAA+e,mBAAR,SAA2B3f,GACzB,GAAIkB,KAAKge,OAAQ,CACf,IAAMU,EAAc1e,KAAKge,OAAO,GAC1BhZ,EAASlG,EAAS4f,GACpB1Z,GACFA,EAAO/D,KAAKnC,EAAUkB,KAAKge,OAAO,IAKnB,OAAbhe,KAAKjB,KACW,SAAhB2f,GACA5f,EAASY,UACXZ,EAASY,aAKR,YAAAme,YAAP,SAAmB/e,GACZkB,KAAKqd,UAAUlc,IAAIrC,KAGtBkB,KAAKye,mBAAmB3f,GACxBkB,KAAKqd,UAAU5a,IAAI3D,KAIhB,YAAAgf,eAAP,SAAsBhf,GAElBkB,KAAKqd,UAAU/Q,OAAOxN,IACtBkB,KAAKqd,UAAU/C,KAAO,GAMtBta,KAAK+d,SAASre,YA4EV,YAAAue,OAAR,SACEjZ,EACAuD,GAEQ,IAAA4V,EAAwBne,KAAL,oBACvBme,EAAoB7D,OAGtBta,KAAKme,oBAAsB,IAAInc,IAC/Bmc,EAAoB5c,SAAQ,SAAA2K,GAAY,OAAAA,EAASlH,EAAT,QAU5C,YAAA2Z,WAAA,SAAW/B,GACT,IAAI5Z,GAAS,EACbhD,KAAKme,oBAAoB1b,KAAI,SAACuC,EAAQuD,GAC/BvF,IACHA,GAAS,EACT4Z,EAAS5X,EAAQuD,QAWzB,EAhNA,CAAgC,KAyNhCiV,EAAsB,G,gDClOpB,EAEEjd,OAFI,OACN,EACEA,OADY,eA0BhB,cAuCE,WAAY,G,IACVqe,EAAY,eACZC,EAAS,YACT/b,EAAO,UAHT,EASE,aAAM,SAAChE,GAGL,IACE,IAAIggB,EAAehgB,EAAiBigB,cAAcC,UAC9CF,IAAgBA,EAAYrf,QAC9Bqf,EAAYrf,MAAQwf,GAEtB,UAEF,IAAMC,GAAS,EAAK7B,UAAU/C,KAC9B,EAAK+C,UAAU5a,IAAI3D,GAGnB,IAAMqgB,EAAO,EAAKA,KAiBlB,OAhBIA,GAAQA,EAAK1f,MACfX,EAASW,OAASX,EAASW,MAAM0f,EAAK1f,OAC7B0f,GAAQA,EAAK/f,QACtBN,EAASK,MAAQL,EAASK,KAAKggB,EAAK/f,QAKlC8f,GAKF,EAAKE,YAAYnb,OAAM,eAGlB,WACD,EAAKoZ,UAAU/Q,OAAOxN,KAAc,EAAKue,UAAU/C,MACrD,EAAK+E,qBAGT,KA9DI,EAAAhC,UAAY,IAAIrb,IAChB,EAAAsd,cAAgB,IAAItd,IAgE1B,EAAK6c,UAAYA,EACjB,EAAKD,aAAeA,EAGpB,EAAKW,YAAa,EAGhB,MAGEX,EAAa7L,eADT,WADJ,QADF,MAEI,GAAE,GADJ3N,YAAaoa,OAAkB,IAAG,gBAAa,EAKjD,EAKE1c,EAL8B,YAAhCsC,OAAW,IAAG,EAAAoa,EAAkB,EAChC,EAIE1c,EADD,mBAHD8B,OAAkB,IAAG,EAEH,YAAhBQ,EAA4Boa,EAAqBpa,EAClD,EAGH,EAAKtC,QAAO,2BACPA,GAAO,CAKV8B,mBAAkB,EAIlBQ,YAAW,IAGb,EAAKqa,QAAUZ,EAAUY,SAAWb,EAAac,kBAEjD,IAAMC,EAAQ,YAAuB,EAAKtb,O,OAC1C,EAAKub,UAAYD,GAASA,EAAMnd,MAAQmd,EAAMnd,KAAK9B,M,EAuuBvD,OA/1BU,iBAKR,sBAAW,oBAAK,C,IAAhB,WAGE,OAAOV,KAAK4e,aAAaiB,UAAU7f,KAAK8C,QAAQuB,OAAOqG,U,gCAKzD,sBAAW,wBAAS,C,IAApB,WACE,OAAO1K,KAAK8C,QAAQ8C,W,gCA6Gf,YAAAxG,OAAP,sBACE,OAAO,IAAIkW,SAAQ,SAACC,EAASC,GAI3B,IAAM1W,EAA+C,CACnDK,KAAM,SAACC,GACLmW,EAAQnW,GAYR,EAAKie,UAAU/Q,OAAOxN,GACjB,EAAKue,UAAU/C,MAClB,EAAKsE,aAAakB,YAAY,EAAKL,SAGrCvB,YAAW,WACTzY,EAAa5F,gBACZ,IAELJ,MAAO+V,GAEH/P,EAAe,EAAKvG,UAAUJ,OAIjC,YAAAihB,iBAAP,SAAwBC,QAAA,IAAAA,OAAA,GAEtB,IAAMC,EAAajgB,KAAKkgB,eAAc,GAEhCxT,EACJ1M,KAAK6e,UAAUnS,eACduT,GAAcA,EAAWvT,eAC1B,IAAcyT,MAEV/gB,EAAS,2BACV6gB,GAAU,CACbhd,QAAS,YAAyByJ,GAClCA,cAAa,IAGP,EAAgC1M,KAAK8C,QAAV,YAA3BsC,OAAW,IAAG,gBAAa,EACnC,GAGkB,iBAAhBA,GACgB,aAAhBA,GACgB,YAAhBA,GAKApF,KAAK4e,aAAaiB,UAAU7f,KAAK8C,QAAQuB,OAAO+b,wBAG3C,CACL,IAAMC,EAAOrgB,KAAK6e,UAAUyB,WAExBD,EAAK3gB,UAAYM,KAAK8C,QAAQyd,qBAChCnhB,EAAOuE,KAAO0c,EAAKjhB,QAGjB,YAAMA,EAAOuE,KAAM,MACrBvE,EAAOuE,UAAO,GAGZ0c,EAAK3gB,iBAGAN,EAAOohB,SAMZH,EAAK3gB,UACLN,EAAOsN,gBAAkB,IAAczJ,SACtB,gBAAhBmC,GACe,eAAhBA,IAEAhG,EAAOsN,cAAgB,IAAcyT,MACrC/gB,EAAO6D,SAAU,IAGnB7D,EAAOohB,SAAU,GAIjBte,SACCme,EAAK3gB,UACLM,KAAK8C,QAAQ2d,gBACbrhB,EAAO6D,SACP7D,EAAOuE,MACPvE,EAAOK,OAERihB,EAAsBL,EAAKM,SAQ/B,OAJIX,GACFhgB,KAAK4gB,iBAAiBxhB,GAGjBA,GAKF,YAAAyhB,0BAAP,SACEC,EACAlb,GAEA,OACG5F,KAAKmf,OACL,YAAMnf,KAAKmf,KAAK/f,OAAQ0hB,IACxBlb,IAAc,YAAM5F,KAAKmf,KAAKvZ,UAAWA,IAItC,YAAAmb,QAAR,SACElf,EACAmf,GAEA,IAAM7B,EAAOnf,KAAKmf,KAClB,GACEA,GACAA,EAAKtd,MACHmf,GAAsB,YAAM7B,EAAKvZ,UAAW5F,KAAK4F,YAEnD,OAAOuZ,EAAKtd,IAIT,YAAAqe,cAAP,SAAqBc,GACnB,OAAOhhB,KAAK+gB,QAAQ,SAAUC,IAGzB,YAAAC,aAAP,SAAoBD,GAClB,OAAOhhB,KAAK+gB,QAAQ,QAASC,IAGxB,YAAAE,iBAAP,kBACSlhB,KAAKmf,KACZnf,KAAKuf,YAAa,GAGb,YAAA4B,sBAAP,WACEnhB,KAAK4e,aAAawC,YAAYphB,KAAKyf,UAU9B,YAAA4B,QAAP,SAAezb,G,MACP0b,EAAkE,CAEtEC,aAAc,GAMRnc,EAAgBpF,KAAK8C,QAAV,YASnB,GAPEwe,EAAiBlc,YADC,sBAAhBA,EAC6BA,EACN,aAAhBA,EACsB,WAEA,eAG7BlD,SAAW0D,GAAa,EAAe3E,KAAK2E,EAAW,aAAc,CACvE,IAAM4b,EAAW,YAAmBxhB,KAAKqE,OACnC8G,EAAOqW,EAASpY,oBACjB+B,GAASA,EAAK1D,MAAK,SAAAoB,GAAK,oBAAAA,EAAEU,SAAS/G,KAAK9B,UAC3C,SAAU,IAAK,uBACT,OAAC,KAAU,UAAU,mBAEzB,QAAa,QAAb,EAAS,EAAI,yBAAO,EAAI,QAAK,KAAU,UAAS,mIAetD,OATIkF,IAAc,YAAM5F,KAAK8C,QAAQ8C,UAAWA,KAE9C0b,EAAiB1b,UAAY5F,KAAK8C,QAAQ8C,UAAY,2BACjD5F,KAAK8C,QAAQ8C,WACbA,IAIP5F,KAAK6e,UAAU4C,iBACRzhB,KAAKof,UAAUkC,EAAkB,IAAcD,UAGjD,YAAAK,UAAP,SAGEC,GAHF,WAYQC,EAAkB,2BAClBD,EAAiBtd,MAAQsd,EAAkB,mDAC1C3hB,KAAK8C,SAAO,CACfuB,MAAOrE,KAAKqE,QACTsd,GAAgB,CACnB/b,UAAW,OAAF,IAAE,CAAF,eACJ5F,KAAK8C,QAAQ8C,WACb+b,EAAiB/b,cAEtB,CAMFR,YAAa,aAGTyc,EAAM7hB,KAAK4e,aAAac,kBAItBb,EAAc7e,KAAL,UACX8hB,EAAwBjD,EAAUnS,cACxCmS,EAAUnS,cAAgB,IAAcgV,UACpCE,EAAgBG,6BAClB/hB,KAAKgiB,UAGP,IAAMC,EAAkB,IAAIjgB,IAE5B,OAAOhC,KAAK4e,aAAasD,WACvBL,EACAD,EACA,IAAcF,WACd5d,MAAK,SAAAqe,GA+CL,OA9CA,EAAKvD,aAAakB,YAAY+B,GAE1BhD,EAAUnS,gBAAkB,IAAcgV,YAC5C7C,EAAUnS,cAAgBoV,GAQ5B,EAAKlD,aAAa3T,MAAMmX,MAAM,CAC5BC,OAAQ,SAAApX,GACE,IAAAqX,EAAgBX,EAAL,YACfW,EACFrX,EAAMqX,YAAY,CAChBje,MAAO,EAAKA,MACZuB,UAAW,EAAKA,UAChB2a,mBAAmB,EACnBgC,YAAY,IACX,SAAAxQ,GAAY,OAAAuQ,EAAYvQ,EAAW,CACpCoQ,gBAAiBA,EAAgBxe,KACjCiC,UAAWgc,EAAgBhc,eAS7BqF,EAAMuX,WAAW,CACfne,MAAOud,EAAgBvd,MACvBuB,UAAWgc,EAAgBhc,UAC3BjC,KAAMwe,EAAgBxe,QAK5B8e,eAAgB,SAAAC,GAGdT,EAAgBxf,IAAIigB,EAAMre,UAIvB8d,KAENQ,SAAQ,WAMJV,EAAgB9gB,IAAI,EAAKkD,QAC5Bue,EAAoB,OAQnB,YAAAC,gBAAP,SAIE/f,GAJF,WAUQ2C,EAAezF,KAAK4e,aACvBkE,yBAAyB,CACxBze,MAAOvB,EAAQ4H,SACf9E,UAAW9C,EAAQ8C,UACnBtF,QAASwC,EAAQxC,UAElBpB,UAAU,CACTC,KAAM,SAACiH,GACG,IAAAkc,EAAgBxf,EAAL,YACfwf,GACF,EAAKA,aACH,SAACvQ,EAAU,G,IAAEnM,EAAS,YACpB,OAAA0c,EAAYvQ,EAAU,CACpB3L,iBAAgB,EAChBR,UAAS,QAKnBnG,MAAO,SAACoP,GACF/L,EAAQhD,QACVgD,EAAQhD,QAAQ+O,GAGlB,SAAU,IAAM,6CAA6C,MAMnE,OAFA7O,KAAKsf,cAAc7c,IAAIgD,GAEhB,WACD,EAAK6Z,cAAchT,OAAO7G,IAC5BA,EAAa5F,gBAKZ,YAAAkjB,WAAP,SACEC,GAEA,OAAOhjB,KAAKof,UAAU4D,IAwBjB,YAAAC,aAAP,SACErd,GAEA,OAAI,YAAM5F,KAAK4F,UAAWA,GAIjB5F,KAAKqd,UAAU/C,KAClBta,KAAKZ,SACLkW,QAAQC,WAGdvV,KAAK8C,QAAQ8C,UAAYA,EAGpB5F,KAAKqd,UAAU/C,KAIbta,KAAKof,UAAU,CAEpBha,YAAapF,KAAK8C,QAAQ8B,mBAC1BgB,UAAS,GACR,IAAcqd,cAPR3N,QAAQC,YAUZ,YAAA+M,YAAP,SACE/F,GAKQ,IAAAqC,EAAiB5e,KAAL,aAQd8gB,EAAYvE,EAPCqC,EAAa3T,MAAMoV,KAAY,CAChDhc,MAAOrE,KAAK8C,QAAQuB,MACpBuB,UAAW5F,KAAK4F,UAChB2a,mBAAmB,EACnBgC,YAAY,IAJA,OAOmB,CAC/B3c,UAAY5F,KAAa4F,YAGvBkb,IACFlC,EAAa3T,MAAMuX,WAAW,CAC5Bne,MAAOrE,KAAK8C,QAAQuB,MACpBV,KAAMmd,EACNlb,UAAW5F,KAAK4F,YAGlBgZ,EAAasE,qBAIV,YAAAC,aAAP,SAAoB5B,GAClBvhB,KAAK8C,QAAQye,aAAeA,EAC5BvhB,KAAKojB,iBAGA,YAAAC,YAAP,WACErjB,KAAK8C,QAAQye,aAAe,EAC5BvhB,KAAKojB,iBAIC,YAAAE,qBAAR,SACEjF,EAKAvb,GAEA,GAAIA,EAAQygB,gBAAiB,CAEzB,MAEEzgB,EAFyB,YAA3BsC,OAAW,IAAG,gBAAa,EAC3B,EACEtC,EAD8B,mBAAhC8B,OAAkB,IAAG,EAAAQ,EAAW,EAGd,YAAhBA,IAE0C,oBAA5BtC,EAAQygB,gBAWxBzgB,EAAQsC,YAActC,EAAQygB,gBAAgBne,EAAa,CACzDiZ,OAAM,EACNvb,QAAO,EACP+B,WAAY7E,KACZ4E,mBAAkB,IAGpB9B,EAAQsC,YADY,sBAAXiZ,EACazZ,EAEA9B,EAAQygB,iBAIlC,OAAOzgB,EAAQsC,aAGT,YAAA8T,MAAR,SACEpW,EACA0gB,GAKA,OADAxjB,KAAK4e,aAAa6E,mBAAmBzjB,MAC9BA,KAAK4e,aAAa8E,qBACvB1jB,KAAKyf,QACL3c,EACA0gB,IAKI,YAAAJ,cAAR,sBAEE,IAAIpjB,KAAK4e,aAAa+E,QAAtB,CAIM,IACJC,EAIE5jB,KAJS,YAETuhB,EAEAvhB,KAFY,qBAIhB,GAAKuhB,GAQL,IAAIqC,GACAA,EAAYC,WAAatC,EAD7B,CAKA,QACE,YAAAA,EACA,kEACA,mBAEWqC,IAAgB5jB,KAAK4jB,YAAc,KAC3CC,SAAWtC,EAEhB,IAAMuC,EAAa,WACb,EAAKF,cACF,YAAyB,EAAK/E,UAAUnS,eAS3CqX,IARA,EAAK3E,UAAU,CAKbha,YAAiD,aAApC,EAAKtC,QAAQ8B,mBAAoC,WAAa,gBAC1E,IAAcmf,MAAMjgB,KAAKigB,EAAMA,KAOlCA,EAAO,WACX,IAAM7Y,EAAO,EAAK0Y,YACd1Y,IACF8Y,aAAa9Y,EAAK+Y,SAClB/Y,EAAK+Y,QAAU/F,WAAW4F,EAAY5Y,EAAK2Y,YAI/CE,UA5CMH,IACFI,aAAaJ,EAAYK,gBAClBjkB,KAAK4jB,eA6CV,YAAAhD,iBAAR,SACEE,EACAlb,GAYA,YAZA,IAAAA,MAAY5F,KAAK4F,WAEjB5F,KAAKmf,KAAI,2BACJnf,KAAKmf,MAAI,CACZ/f,OAAQY,KAAK4e,aAAasF,uBACtBpD,EACA,OAAAjgB,EAAA,GAAUigB,GACdlb,UAAS,IAEN,YAAgBkb,EAAUzhB,gBACtBW,KAAKmf,KAAK1f,MAEZO,KAAKmf,MAGP,YAAAC,UAAP,SACE4D,EACAQ,GAFF,WAIExjB,KAAKuf,YAAa,EAElB,IAAM4E,EAIJX,IAAqB,IAAcnC,SAGnCmC,IAAqB,IAAc9B,WAGnC8B,IAAqB,IAAcO,KAG/BK,EAAepkB,KAAK8C,QAAQ8C,UAC5Bye,EAAiBrkB,KAAK8C,QAAQsC,YAE9Bkf,EAAgB,OAAA3W,EAAA,GAAQ3N,KAAK8C,QAASkgB,GAAc,IACpDlgB,EAAUqhB,EAGZG,EACA,EAAOtkB,KAAK8C,QAASwhB,GAEpBH,IAEHnkB,KAAKojB,gBAKHJ,GACAA,EAAWpd,YACV,YAAMod,EAAWpd,UAAWwe,IAEL,YAAxBthB,EAAQsC,aAGRtC,EAAQsC,cAAgBif,IAExBrkB,KAAKsjB,qBAAqB,oBAAqBxgB,QACtB,IAArB0gB,IACFA,EAAmB,IAAcP,gBAKvC,IAAMrd,EAAY9C,EAAQ8C,WAAS,eAAS9C,EAAQ8C,WAC9C2e,EAAUvkB,KAAKkZ,MAAMpW,EAAS0gB,GAC9B1kB,EAA+C,CACnDK,KAAM,SAAAC,GACJ,EAAKolB,aAAaplB,EAAQwG,IAE5BnG,MAAO,YACL,EAAKglB,YAAY,EAAO7e,KAiB5B,OAbKue,IAGCnkB,KAAKukB,SAAWvkB,KAAKlB,UACvBkB,KAAKukB,QAAQzG,eAAe9d,KAAKlB,UAGnCkB,KAAKukB,QAAUA,EACfvkB,KAAKlB,SAAWA,GAGlBylB,EAAQ1G,YAAY/e,GAEbylB,EAAQlf,SAKT,YAAA2c,QAAR,WACEhiB,KAAKwkB,aAKHxkB,KAAK+f,kBAAiB,GACtB/f,KAAK4F,YAID,YAAA4e,aAAR,SACEplB,EACAwG,GAEA,IAAM8e,EAAY1kB,KAAKihB,gBACnByD,GAAa1kB,KAAK6gB,0BAA0BzhB,EAAQwG,OAClD8e,IAActlB,EAAOohB,SAAWxgB,KAAK8C,QAAQyd,oBAC/CvgB,KAAK4gB,iBAAiBxhB,EAAQwG,GAGhCwX,EAAuBpd,KAAKqd,UAAW,OAAQje,KAI3C,YAAAqlB,YAAR,SACEhlB,EACAmG,GAIA,IAAM+e,EAAc,2BACf3kB,KAAKkgB,iBAAe,CACvBzgB,MAAK,EACLJ,OAAQI,EAAMH,cACdoN,cAAe,IAAcjN,MAC7BwD,SAAS,IAGXjD,KAAK4gB,iBAAiB+D,EAAa/e,GAEnCwX,EAAuBpd,KAAKqd,UAAW,QAASrd,KAAKmf,KAAM1f,MAAQA,IAG9D,YAAAmlB,aAAP,WACE,OAAO5kB,KAAKqd,UAAU/C,KAAO,GAGvB,YAAA+E,cAAR,WACMrf,KAAKuf,aACLvf,KAAKukB,SAAWvkB,KAAKlB,WACvBkB,KAAKukB,QAAQzG,eAAe9d,KAAKlB,iBAC1BkB,KAAKukB,eACLvkB,KAAKlB,UAGdkB,KAAKqjB,cAELrjB,KAAKsf,cAAc/d,SAAQ,SAAAxC,GAAO,OAAAA,EAAA,iBAClCiB,KAAKsf,cAAcrT,QACnBjM,KAAK4e,aAAaiG,UAAU7kB,KAAKyf,SACjCzf,KAAKqd,UAAUpR,QACfjM,KAAKuf,YAAa,IAEtB,EAl2BA,CAGU,KA42BJ,SAAUqD,EACdkC,GAEM,MAAmCA,EAAShiB,QAA1CsC,EAAW,cAAE,EAAe,kBAEpC,MACkB,sBAAhBA,GACgB,iBAAhBA,EAEO0f,EAAS1F,UAAU,CACxBha,YAAa,cAGbme,gBAAA,WAME,OAHAvjB,KAAKujB,gBAAkB,EAGQ,oBAApB,EACF,EAAgBre,MAAMlF,KAAMmF,WAG9BC,KAKN0f,EAAS1F,YAGlB,SAASH,EAAyCxf,GAChD,SAAU,IAAM,wBAAyB,EAAS,QAAW,EAAE,OAG3D,SAAUihB,EACdC,GAEIze,SAAWye,GACb,SAAU,IAAM,sCACV,OAAC,KAAU,UACbA,IAAS,GAlDjBnD,EAAsB,G,wCC51BtB,aAME,WAAY,G,IACVvS,EAAK,QACL/K,EAAM,SACN6kB,EAAS,YACTC,EAAe,kBAEfhlB,KAAKiL,MAAQA,EAET/K,IACFF,KAAKE,OAASA,GAGZ6kB,GACF/kB,KAAKilB,aAAaF,GAGhBC,GACFhlB,KAAKklB,mBAAmBF,GAuW9B,OAnWS,YAAAC,aAAP,SAAoBF,GAApB,WACE/kB,KAAK+kB,UAAY/kB,KAAK+kB,WAAa,GAC/B/Y,MAAMY,QAAQmY,GAChBA,EAAUxjB,SAAQ,SAAA4jB,GAChB,EAAKJ,UAAY,OAAAK,EAAA,GAAU,EAAKL,UAAWI,MAG7CnlB,KAAK+kB,UAAY,OAAAK,EAAA,GAAUplB,KAAK+kB,UAAWA,IAIxC,YAAAM,aAAP,SAAoBN,GAClB/kB,KAAK+kB,UAAY,GACjB/kB,KAAKilB,aAAaF,IAGb,YAAAO,aAAP,WACE,OAAOtlB,KAAK+kB,WAAa,IAOd,YAAAQ,aAAb,SAAiC,G,IAC/B7a,EAAQ,WACR8a,EAAY,eACZllB,EAAO,UACPsF,EAAS,YACT,IAAA6f,8BAAsB,IAAG,GAAK,E,uFAQ9B,OAAI/a,EACF,GAAO1K,KAAK0lB,gBACVhb,EACA8a,EAAa7hB,KACbrD,EACAsF,EACA5F,KAAKglB,gBACLS,GACA3hB,MAAK,SAAA6hB,GAAe,kCACjBH,GAAY,CACf7hB,KAAMgiB,EAFc,aAMxB,GAAOH,UAGF,YAAAN,mBAAP,SAA0BF,GACxBhlB,KAAKglB,gBAAkBA,GAGlB,YAAAY,mBAAP,WACE,OAAO5lB,KAAKglB,iBAKP,YAAAa,YAAP,SAAmBnb,GACjB,OAAI,YAAc,CAAC,UAAWA,IACxB1K,KAAK+kB,UACAra,EAGJ,MAIF,YAAAob,YAAP,SAAmBpb,GACjB,OAAO,YAA6BA,IAG/B,YAAAqb,eAAP,SAAsBzlB,GACZ,IAAA2K,EAAUjL,KAAL,MACb,kCACKM,GAAO,CACV2K,MAAK,EAEL+a,YAAA,SAAY/jB,GACV,OAAOgJ,EAAMgb,SAAShkB,OAQf,YAAAikB,qBAAb,SACExb,EACA9E,EACAtF,G,YADA,IAAAsF,MAAA,SACA,IAAAtF,MAAA,I,gFAEA,OAAIoK,EACF,GAAO1K,KAAK0lB,gBACVhb,EACA1K,KAAKmmB,wBAAwBzb,EAAU9E,IAAc,GACrD5F,KAAK+lB,eAAezlB,GACpBsF,GACA9B,MAAK,SAAAH,GAAQ,kCACViC,GACAjC,EAFU,uBAMjB,kBACKiC,WAIA,YAAAwgB,qBAAP,SAA4B1b,GAC1B,IAAI2b,GAAiB,EAkBrB,OAjBA,YAAM3b,EAAU,CACd9B,UAAW,CACTV,MAAK,SAACC,GACJ,GAAwB,WAApBA,EAAK3F,KAAK9B,OAAsByH,EAAKhD,YACvCkhB,EAAiBle,EAAKhD,UAAUsC,MAC9B,SAAAc,GACE,MAAmB,WAAnBA,EAAI/F,KAAK9B,OACU,iBAAnB6H,EAAI7H,MAAMkG,OACU,IAApB2B,EAAI7H,MAAMA,UAGZ,OAAO,QAMV2lB,GAID,YAAAF,wBAAR,SACEzb,EACA9E,GAEA,OAAO5F,KAAKiL,MAAMoV,KAAK,CACrBhc,MAAO,YAA2BqG,GAClC9E,UAAS,EACT2a,mBAAmB,EACnBgC,YAAY,IACXnjB,QAGS,YAAAsmB,gBAAd,SACEhb,EACA4b,EACAhmB,EACAsF,EACAof,EACAS,G,YAHA,IAAAnlB,MAAA,SACA,IAAAsF,MAAA,SACA,IAAAof,MAAA,WAAyC,gBACzC,IAAAS,OAAA,G,sGA6BA,OA3BMc,EAAiB,YAAkB7b,GACnC0C,EAAY,YAAuB1C,GACnC3D,EAAc,YAAkBqG,GAEhCoZ,EAAuBD,EAC1B3nB,UAEG6nB,EAAuBD,EACzBA,EAAoBE,OAAO,GAAGC,cAC9BH,EAAoBnlB,MAAM,GAC1B,QAEI4J,GAAF,EAAoBjL,MAAb,MAAEE,EAAM,SACf0mB,EAA2B,CAC/B7f,YAAW,EACXzG,QAAS,OAAF,IAAE,CAAF,eACFA,GAAO,CACV2K,MAAK,EACL/K,OAAM,IAER0F,UAAS,EACTof,gBAAe,EACfyB,qBAAoB,EACpBI,kBAAmB,GACnBpB,uBAAsB,GAGxB,GAAOzlB,KAAK8mB,oBACVP,EAAevf,aACfsf,EACAM,GACA9iB,MAAK,SAAA1E,GAAU,MAAC,CAChBA,OAAM,EACNynB,kBAAmBD,EAAYC,8BAIrB,YAAAC,oBAAd,SACE9f,EACAsf,EACAM,G,4GA+CA,OA7CQ7f,EAAoC6f,EAAzB,YAAEtmB,EAAuBsmB,EAAhB,QAAEhhB,EAAcghB,EAAL,UACjCG,EAA0B,CAACT,GAE3B/iB,EAAU,SAAO4D,GAAwB,+C,6CAC7C,OAAK,YAAcA,EAAWvB,GAK1B,YAAQuB,GACV,GAAOnH,KAAKgnB,aAAa7f,EAAWmf,EAAWM,GAAa9iB,MAC1D,SAAAmjB,G,MAC6B,qBAAhBA,GACTF,EAAeve,OAAK,MACjB,YAAuBrB,IAAa8f,EAC7B,SAQd,YAAiB9f,GACnBsG,EAAWtG,GAGXsG,EAAW1G,EAAYI,EAAU3E,KAAK9B,OACtC,QAAU,YAAU,uBAAqB,OAAU,EAAU,KAAI,2BAG/D+M,GAAYA,EAASyZ,gBACjBA,EAAgBzZ,EAASyZ,cAAc1kB,KAAK9B,MAC9CkmB,EAAY5B,gBAAgBsB,EAAWY,EAAe5mB,IACxD,GAAON,KAAK8mB,oBACVrZ,EAASzG,aACTsf,EACAM,GACA9iB,MAAK,SAAAqjB,GACLJ,EAAeve,KAAK2e,O,KAjCxB,WAuCJ,GAAO7R,QAAQ8R,IAAIpgB,EAAaC,WAAWuJ,IAAIjN,IAAUO,MAAK,WAC5D,OAAO,OAAAshB,EAAA,GAAe2B,cAIZ,YAAAC,aAAd,SACE9c,EACAoc,EACAM,G,oHAqCA,OAnCQhhB,EAAcghB,EAAL,UACXvV,EAAYnH,EAAM1H,KAAK9B,MACvB2mB,EAAmB,YAAuBnd,GAC1Cod,EAAYjW,IAAcgW,EAC1BE,EAAgBjB,EAAUe,IAAqBf,EAAUjV,GAC3DmW,EAAgBlS,QAAQC,QAAQgS,GAOjCX,EAAYnB,yBACbzlB,KAAKomB,qBAAqBlc,KAEpBud,EACJnB,EAAU9T,YAAcoU,EAAYH,sBAChCiB,EAAc1nB,KAAK+kB,WAAa/kB,KAAK+kB,UAAU0C,MAE7ClS,EAAUmS,EAAYJ,EAAYjW,EAAYgW,MAElDG,EAAgBlS,QAAQC,QAGtB,IAAUoS,UAAU3nB,KAAKiL,MAAOsK,EAAS,CACvC+Q,EACA,YAAyBpc,EAAOtE,GAChCghB,EAAYtmB,QACZ,CAAE4J,MAAK,EAAEnD,YAAa6f,EAAY7f,kBAO5C,GAAOygB,EAAc1jB,MAAK,SAAC1E,GAgBzB,YAhByB,IAAAA,MAAA,GAGrB8K,EAAM3C,YACR2C,EAAM3C,WAAWhG,SAAQ,SAAAiG,GACM,WAAzBA,EAAUhF,KAAK9B,OAAsB8G,EAAUrC,WACjDqC,EAAUrC,UAAU5D,SAAQ,SAAAgH,GACH,OAAnBA,EAAI/F,KAAK9B,OAAqC,gBAAnB6H,EAAI7H,MAAMkG,OACvCggB,EAAYC,kBAAkBte,EAAI7H,MAAMA,OAAStB,SAQtD8K,EAAMlD,aAMG,MAAV5H,EAEKA,EAGL4M,MAAMY,QAAQxN,GACT,EAAKwoB,wBAAwB1d,EAAO9K,EAAQwnB,GAIjD1c,EAAMlD,aACD,EAAK8f,oBACV5c,EAAMlD,aACN5H,EACAwnB,QAJJ,EAfSxnB,aAyBL,YAAAwoB,wBAAR,SACE1d,EACA9K,EACAwnB,GAHF,WAKE,OAAOtR,QAAQ8R,IACbhoB,EAAOoR,KAAI,SAAAqX,GACT,OAAa,OAATA,EACK,KAIL7b,MAAMY,QAAQib,GACT,EAAKD,wBAAwB1d,EAAO2d,EAAMjB,GAI/C1c,EAAMlD,aACD,EAAK8f,oBAAoB5c,EAAMlD,aAAc6gB,EAAMjB,QAD5D,OAMR,EA9XA,GCtCMkB,EAA0B,IAC9BrS,EAAA,EAAgB1K,QAAU7J,KAG5B,SAAS6mB,EACP9c,EACA+c,GAEA,IAAMC,EAAWhd,EAAM+c,GACC,oBAAbC,IACThd,EAAM+c,GAAc,WASlB,OARAF,EAAwBxmB,IACtB2J,GAKC6c,EAAwB1mB,IAAI6J,GAAU,GAAK,MAEvCgd,EAAS/iB,MAAMlF,KAAMmF,aAKlC,SAAS+iB,EAAoBhd,GACvBA,EAAI,gBACN8Y,aAAa9Y,EAAI,eACjBA,EAAI,mBAAoB,GAgB5B,iBAaE,WACE0T,EACgBa,QAAA,IAAAA,MAAUb,EAAac,mBAAvB,KAAAD,UAdlB,KAAA9T,UAAY,IAAI3J,IAChB,KAAA0I,SAAgC,KAChC,KAAAyd,cAAgB,EAChB,KAAA7I,cAAgB,IAAItd,IAKpB,KAAAomB,SAAU,EA+DF,KAAAvc,OAAiB,EAkET,KAAAwc,gBAA+C,KAzH7D,IAAMpd,EAAQjL,KAAKiL,MAAQ2T,EAAa3T,MAOnC6c,EAAwB3mB,IAAI8J,KAC/B6c,EAAwBxmB,IAAI2J,EAAO,GACnC8c,EAA2B9c,EAAO,SAClC8c,EAA2B9c,EAAO,UAClC8c,EAA2B9c,EAAO,UAuZxC,OAnZS,YAAAqd,KAAP,SAAYjkB,GAUV,IAAIqI,EAAgBrI,EAAMqI,eAAiB,IAAczJ,QA2BzD,OA1BIjD,KAAK4F,WACL5F,KAAK0M,gBAAkB,IAAczJ,UACpC,YAAMjD,KAAK4F,UAAWvB,EAAMuB,aAC/B8G,EAAgB,IAAcuW,cAG3B,YAAM5e,EAAMuB,UAAW5F,KAAK4F,aAC/B5F,KAAKuoB,cAAW,GAGlBhoB,OAAOC,OAAOR,KAAM,CAClB0K,SAAUrG,EAAMqG,SAChB9E,UAAWvB,EAAMuB,UACjBjG,aAAc,KACdL,cAAeU,KAAKV,eAAiB,GACrCoN,cAAa,IAGXrI,EAAMgkB,iBACRroB,KAAKyjB,mBAAmBpf,EAAMgkB,iBAG5BhkB,EAAM8jB,gBACRnoB,KAAKmoB,cAAgB9jB,EAAM8jB,eAGtBnoB,MAOT,YAAAkE,MAAA,WACEgkB,EAAoBloB,MACpBA,KAAK6L,OAAQ,GAGf,YAAAyU,QAAA,SAAQ1a,QAAA,IAAAA,MAAY5F,KAAK4F,WACvB,IAAM9C,EAAU9C,KAAKwoB,eAAe5iB,GAEpC,GAAI5F,KAAKuoB,UAAY,YAAMzlB,EAAS9C,KAAKuoB,SAASzlB,SAChD,OAAO9C,KAAKuoB,SAASlI,KAGvBrgB,KAAKyoB,YAAYzoB,KAAK4F,UAAYA,GAElC,IAAM8iB,EAAK1oB,KAAKqoB,gBAChB,GAAIK,GAAiC,aAA3BA,EAAG5lB,QAAQsC,YACnB,MAAO,CAAE1F,UAAU,GAGrB,IAAM2gB,EAAOrgB,KAAKiL,MAAMoV,KAAKvd,GAE7B,OADA9C,KAAK2oB,eAAetI,EAAMvd,GACnBud,GAQD,YAAAsI,eAAR,SACEtI,EACAvd,GAEA9C,KAAKuoB,SAAWlI,EAAO,CACrBA,KAAI,EACJvd,QAASA,GAAW9C,KAAKwoB,uBACvB,GAGE,YAAAA,eAAR,SAAuB5iB,G,MACrB,YADqB,IAAAA,MAAY5F,KAAK4F,WAC/B,CACLvB,MAAOrE,KAAK0K,SACZ9E,UAAS,EACT2a,mBAAmB,EACnBgC,YAAY,EACZqG,gBAAqC,QAApB,EAAA5oB,KAAKqoB,uBAAe,eAAEvlB,QAAQ8lB,kBAInD,YAAAC,QAAA,SAAQxI,GAAR,WACQyI,EAAU9oB,KAAKuoB,UAAYvoB,KAAKuoB,SAASlI,KAC/CrgB,KAAK2oB,eAAetI,GACfrgB,KAAK6L,OACL,YAAMid,GAAWA,EAAQ1pB,OACnBihB,GAAQA,EAAKjhB,UACtBY,KAAK6L,OAAQ,EACR7L,KAAK+oB,gBACR/oB,KAAK+oB,cAAgB7K,YAAW,WAAM,oBAAe,MAQ3D,YAAAuF,mBAAA,SAAmBiF,GAAnB,WACMA,IAAO1oB,KAAKqoB,kBAEZroB,KAAKgpB,YACPhpB,KAAK2L,UAAUW,OAAOtM,KAAKgpB,YAG5BhpB,KAAaqoB,gBAAkBK,EAE5BA,GACFA,EAAE,UAAgB1oB,KAClBA,KAAK2L,UAAUlJ,IAAIzC,KAAKgpB,WAAa,WACtB,EAAK1I,UACT2I,0BAMPP,EAAE,UAUF9F,EAAoB8F,aAIjB1oB,KAAKgpB,aAIhB,YAAA/K,OAAA,sBACEiK,EAAoBloB,MAEhBA,KAAKkpB,gBACPlpB,KAAK2L,UAAUpK,SAAQ,SAAA2K,GAAY,OAAAA,EAAA,MAGrClM,KAAK6L,OAAQ,GAGP,YAAAqd,aAAR,WACE,IAAKlpB,KAAK6L,QAAU7L,KAAK2L,UAAU2O,KACjC,OAAO,EAGT,GAAI,YAAyBta,KAAK0M,gBAC9B1M,KAAKqoB,gBAAiB,CAChB,IAAAjjB,EAAgBpF,KAAKqoB,gBAAgBvlB,QAA1B,YACnB,GAAoB,eAAhBsC,GACgB,sBAAhBA,EACF,OAAO,EAIX,OAAO,GAGF,YAAA+jB,KAAP,WACE,IAAKnpB,KAAKooB,QAAS,CACjBpoB,KAAKooB,SAAU,EAGfpoB,KAAKkE,QAELlE,KAAKoe,SAGLpe,KAAKoe,OAASgL,EAAUxoB,UAAUwd,OAElCpe,KAAKsf,cAAc/d,SAAQ,SAAAxC,GAAO,OAAAA,EAAA,iBAElC,IAAM2pB,EAAK1oB,KAAKqoB,gBACZK,GAAIA,EAAGrF,gBAMP,YAAAjF,OAAR,aAIQ,YAAAqK,YAAR,SAAoB7iB,GAApB,gBAAoB,IAAAA,MAAY5F,KAAK4F,WACnC,IAAM8iB,EAAK1oB,KAAKqoB,gBAChB,IAAIK,GAAiC,aAA3BA,EAAG5lB,QAAQsC,YAArB,CAIA,IAAMikB,EAAY,2BAIbrpB,KAAKwoB,eAAe5iB,IAAU,CACjC0jB,QAAStpB,KACT4c,SAAU,SAAAyD,GAAQ,SAAKwI,QAAL,MAGf7oB,KAAKupB,WACL,YAAMF,EAAcrpB,KAAKupB,aAC5BvpB,KAAKoe,SACLpe,KAAKoe,OAASpe,KAAKiL,MAAMyX,MAAM1iB,KAAKupB,UAAYF,MAU7C,YAAA5H,eAAP,WACEzhB,KAAKwpB,eAAY,GAGX,YAAAC,YAAR,SACErqB,EACAwG,GAEQ,IAAA4jB,EAAcxpB,KAAL,UACjB,QACEwpB,GAIAA,EAAUE,UAAY5B,EAAwB1mB,IAAIpB,KAAKiL,QACvD,YAAMrF,EAAW4jB,EAAU5jB,YAC3B,YAAMxG,EAAOuE,KAAM6lB,EAAUpqB,OAAOuE,QAIjC,YAAAgmB,WAAP,SACEvqB,EACAsL,EACA5H,EAIA8mB,GAPF,WASQtqB,EAAgB,YAAgBF,EAAOC,QACzCD,EAAOC,OAAOgC,MAAM,GACpB,GAMJ,GAFArB,KAAKkE,QAED,gBAAiB9E,GAAU,YAAgBA,EAAOyqB,aAAc,CAClE,IAAI,EAAa7pB,KAAKsgB,UAAUlhB,OAC1B,EAAS,IAAI,IACnBA,EAAOyqB,YAAYtoB,SAAQ,SAAC,GAC1B,I,IAD4BoC,EAAI,OAAEmmB,EAAI,OAAEzqB,EAAM,SACrCoC,EAAIqoB,EAAK/lB,OAAS,EAAGtC,GAAK,IAAKA,EAAG,CACzC,IAAMI,EAAMioB,EAAKroB,GAEX,GADgBsoB,OAAOloB,GAC+B,GAAK,GACjE,EAAOA,GAAO8B,EACdA,EAAO,EAELtE,GACFC,EAAckJ,KAAI,MAAlBlJ,EAAsBD,GAExB,EAAa,EAAO2qB,MAAM,EAAYrmB,MAExCvE,EAAOuE,KAAO,EAGhB3D,KAAKV,cAAgBA,EAEO,aAAxBwD,EAAQsC,YACVpF,KAAK2oB,eACH,CAAEvpB,OAAQA,EAAOuE,KAAMjE,UAAU,GACjCM,KAAKwoB,eAAe1lB,EAAQ8C,YAGE,IAAvBgkB,IACLK,EAAkB7qB,EAAQ0D,EAAQonB,aAKpClqB,KAAKiL,MAAMkf,oBAAmB,SAAAlf,GAC5B,GAAI,EAAKwe,YAAYrqB,EAAQ0D,EAAQ8C,WACnCqF,EAAMuX,WAAW,CACfne,MAAOqG,EACP/G,KAAMvE,EAAOuE,KACbiC,UAAW9C,EAAQ8C,UACnBwkB,UAAkC,IAAvBR,IAGb,EAAKJ,UAAY,CACfpqB,OAAM,EACNwG,UAAW9C,EAAQ8C,UACnB8jB,QAAS5B,EAAwB1mB,IAAI,EAAK6J,aAmC5C,GAAI,EAAKsd,UACL,EAAKA,SAASlI,KAAK3gB,SAIrB,YADAN,EAAOuE,KAAO,EAAK4kB,SAASlI,KAAKjhB,QAOrC,IAAMirB,EAAc,EAAK7B,eAAe1lB,EAAQ8C,WAC1Cya,EAAOpV,EAAMoV,KAAQgK,GAKtB,EAAKjC,SAGR,EAAKK,YAAY3lB,EAAQ8C,WAQ3B,EAAK+iB,eAAetI,EAAMgK,GACtBhK,EAAK3gB,WACPN,EAAOuE,KAAO0c,EAAKjhB,WAIvBY,KAAKwpB,eAAY,IAKhB,YAAAc,UAAP,WAEE,OADAtqB,KAAKL,aAAe,KACbK,KAAK0M,cAAgB,IAAcyT,OAGrC,YAAAoK,UAAP,SAAiB9qB,GAcf,OAbAO,KAAK0M,cAAgB,IAAcjN,MACnCO,KAAKwpB,eAAY,EAEjBxpB,KAAKkE,QAEDzE,EAAMH,gBACRU,KAAKV,cAAgBG,EAAMH,eAGzBG,EAAME,eACRK,KAAKL,aAAeF,EAAME,cAGrBF,GAEX,EAnbA,GAqbM,SAAUwqB,EACd7qB,EACA8qB,QAAA,IAAAA,MAAA,QAEA,IAAMM,EACY,WAAhBN,GACgB,QAAhBA,EACEO,GAAmBtN,EAAsB/d,GAI7C,OAHKqrB,GAAmBD,GAAgBprB,EAAOuE,OAC7C8mB,GAAkB,GAEbA,ECldD,MAAmBlqB,OAAOK,UAAZ,eAuBtB,aAyBE,WAAY,G,IACVqK,EAAK,QACLtM,EAAI,OACJoU,EAAc,iBACd,IAAA2X,0BAAkB,IAAG,GAAK,EAC1BC,EAAW,cACX,IAAAhH,eAAO,IAAG,GAAK,EACf,IAAA1J,uBAAe,IAAG,KAAE,EACpB2Q,EAAU,aACV1G,EAAsB,yBAzBhB,KAAAjK,gBAA0C,GAU1C,KAAA4Q,QAAU,IAAI3pB,IAId,KAAA4pB,eAAiB,IAAI5pB,IAicrB,KAAA6pB,eAAiB,IACvBtV,EAAA,EAAgB1K,QAAU7J,KAiIpB,KAAA8pB,eAAiB,EAKjB,KAAAC,iBAAmB,EAKnB,KAAAC,kBAAoB,EA6PpB,KAAAC,wBAA0B,IAAIjqB,IAnzBpClB,KAAKiL,MAAQA,EACbjL,KAAKrB,KAAOA,EACZqB,KAAK+S,eAAiBA,GAAkBxS,OAAOmB,OAAO,MACtD1B,KAAK0qB,mBAAqBA,EAC1B1qB,KAAKia,gBAAkBA,EACvBja,KAAK4qB,WAAaA,GAAc,IAAI,EAAW,CAAE3f,MAAK,IACtDjL,KAAK2jB,QAAUA,EACf3jB,KAAKkkB,yBAA2BA,GAC3BlkB,KAAK2qB,YAAcA,KACtB3qB,KAAKorB,cAAgB7qB,OAAOmB,OAAO,OAq3CxC,OA72CQ,YAAAynB,KAAP,sBACEnpB,KAAK6qB,QAAQtpB,SAAQ,SAAC8pB,EAAO5L,GAC3B,EAAK6L,qBAAqB7L,MAG5Bzf,KAAKurB,qBACH,QAAI,QAAe,kDACnB,cAGI,YAAAA,qBAAR,SAA6B9rB,GAC3BO,KAAK8qB,eAAevpB,SAAQ,SAAA6c,GAAU,OAAAA,EAAA,MACtCpe,KAAK8qB,eAAe7e,SAGT,YAAApI,OAAb,SAKE,G,QACAhB,EAAQ,WACR+C,EAAS,YACT4lB,EAAkB,qBAClBC,EAAa,gBACb,IAAAC,sBAAc,IAAG,KAAE,EACnB,IAAAC,2BAAmB,IAAG,GAAK,EACnBC,EAAiB,SACzBC,EAAc,iBACd,IAAAzmB,mBAAW,IAAG,GAA0B,QAA1B,EAAApF,KAAK+S,eAAelP,cAAM,eAAEuB,cAAe,eAAc,EACvE,IAAA8kB,mBAAW,IAAG,GAA0B,QAA1B,EAAAlqB,KAAK+S,eAAelP,cAAM,eAAEqmB,cAAe,OAAM,EAC/D4B,EAAc,iBACdxrB,EAAO,U,8HAuBO,OArBd,QACE,YACA,iGACA,kBAEF,QACE,YAAgB,iBAAhB8E,GACgB,aAAhBA,EACA,8MACA,gCAEsB,aAAlB,EAAkB,IAElB,EAGG,KAFP,qBAGF,OAAQ,UAAQ,GAAM,EAAiB,EAAAsF,SAAU,qBAEjD,EAAS,KAAO,MAAC,iBAAqB,GAClC,sBAAgB,GACN,E,gDAAsB,MAAlC,O,qBAGI,O,OAEJ,EAAM,KAAc,gBAClB,mBAAQ,IACR,SAAS,EACT,UAAS,EACT,SAAO,EACgB,aAGzB,GAME,4BAAU,GACV,WAAU,EACV,SAAS,EACT,UAAW,EACX,YAAW,EACX,YAAO,EACP,UACA,cAAQ,EACR,SACC,mBAKC,KAAI,mBAEV,O,CACE,MAAO,SAAQ,SACR,KAWH,SAAI,wBAAiC,EAAW,YAAK,YAAQ,iD,GAC3D,EAAsB,eACpB,cAAe,CACd,yBAIHqhB,IACAA,EAAmB,SAAQ,EAC5B,cAID,IAAI,EAAO,YAAc,GAAK,GAe5B,MAdiB,oBAAjBL,IACD,QAGG,cACF,EAAmB,WACpB,SAQC,qBAAU,CACV,WAAQ,EACR,OAAQ,EACR,SAAS,EACT,UAAW,EACX,YAAW,EACX,YAAO,EACP,QAAQ,EACRrJ,OAAA,EACA,gBACA,oBAAc,EACd,eAAgB,EAChB,iBAAc,WACd,eAAc,EACb,sBAIL,UAAI,C,KACF,SAAK,GAQL,qBACD,MAGC,eAAI,GACF0J,IACAA,EAAmB,SAAQ,EAC5B,WAGC,GACD,4BAID,qBAEI,eAAiB,eAEnB,iCAMV,YAiKC,iCA3IC,gBAEiB,IAAX,IAAM,EAAc,YAC1B,IAAM,IAAuC,OACvC,EAAY,GAEd,EAAgC,aAAtB,EAAI,Y,IAChB,GAAY,EAAK,kBACf,EAAQ,MACR,OAAQ,OACR,OAAO,gBACP,QAAW,SACV,wBAGH,IAAI,EAAe,gBACjB,G,KAAwB,0BAAe,KACrC,IAAM,EAAY,EAAA1D,gBACd,EAAU,GAAoB,EAAK,U,GACrC,GAAO,Y,CAGH,MAA8B,EAA5B,GAGF,EAA2C,EAAM,QAAY,kCACjE,EAAK,EAAE,KAAS,CAChB,QACA,YACA,mBAAiB,EAJH,gBAOZ,EAAY,S,GAAoB,YAE5B,EAA0B,C,IAC9B,EAAgB,EAAM,GACtB,eAAW,EACX,aAAgB,YAAU,WACzB,mBAID,GACE,EAAQ,MACR,OAAQ,EACR,OAAO,aACP,QACC,mB,GASX,EAAS,UACTxlB,EAAS,gBACTA,EAAS,QACTA,EAAS,gBAET,EAAM,iBAAoB,CAE1B,IAAI,EAAC,G,GACH,oBAAa,CACX,YAAK,SAAW,GACd,GACD,4CAMD,IAAIwf,EAAQ,S,GACV,EAAK,C,IAKH,EAAa,C,IACX,EAAI,QAIJ,mBACA,QAAW,UAAS,EAAS,kBAC7B,UAAU,EAAO,UACjB,cACC,uBAGD,aAAiCjjB,ECvbKyqB,cDwbvC,kDAID,IAAS,EAAS,CAClB,QAAS,EAAE,QACV,wBAMH,GAAa,kBACX,EAAE,OAAE,CACJ,mB,OAAgB,SAAS,KACvB,MAAgB,YAAK,EAAe,EAAM,OAC3C,+BAQP,UAAiB,eAIjB,cAMA,iBAAgBhnB,EAAS,iBAExB,eAAQ,EAAM,gBAAI,OAErB,SAAI,SAAS,UAAuB,EAAS,WAI3C,EAAO,qBAAqB,EAAK,eAClC,mDAIJ,2BAED,YA4BC,qCAdC,IAAM,EAAO,KACX,EAA8B,oBAA5B2oB,EACAA,EAAmB,aAEvB,EACE,YAAI,+C,IAKH,yEACC,SACD,yBAEJ,eAOC,EAAO,UAAKtJ,WAAoB,SAE9B,EACA,EAAAxV,GAEH,iDAGC,EAAW,UAA2C,cAAa,WACnE,IAAI,EAAQ,OAAC,OAAQ,MAStB,OARG,aAAM,SAAW,cACf,KAAW,CACX,YAAe,UACf,cAAcxB,EAAK,cACnB,aAAa,EAAM,aACnB,kCAGL,GAGC,EAAM,UAAY,YAAiB,SAAS,GAC5C,IAAI2T,EAAW,oBACbA,IACAA,EAAU,kBAAa,EACxB,qBAQO,YAAc,mBAAU,GAEhC,IAAI,EAAgB,KAAI,e,IACtB,EAAiB,IAAG,GAAW,CAC/B,IAAM,EAAY,KAAG,2BACf,EAAc,YAAuC,GACrD,EAAc,gBAAgB,YAAK,GAEnC,EAAU,GAAwB,+BACtC,EAAU,CAGV,WACA,iBAAkB,YAAiB,GACnC,mBAAW,wCACX,YAAW,EACX,YAAa,EAKb,YAAS,OAAF,IAAE,CAAF,YAEL,I,QACE,YAAQ,YAAS,OAAqB,2C,MACrB,wBAAbjV,EAAI,MACN,sBACD,mDAGJ,QAID,EAAI,SAAQ,GACV,IAAAmhB,EAAsB,IAAE,IACzB,YAMHtoB,EAAI,GACJA,EAAI,GACJA,EAAI,GACL,KAGF,iBAMC,yBACU,SAAUiI,EAAU,GAG/B,qEAMC,EAAO,qBACF,SAAO,GAQkC,qBAD9C,EAAI,YAAe,6BAAgC,kBAAa,wBACtDqX,8BACT,kCAGD,IAAM,EAAU,IAAG,EAAI,MACrB,EAAY,IAAE,EAAI,CAClB,aAAS,KACT,UAAO,EACN,YAWJ,OAPC,aAAU,IAAK,aACb,OAAU,CACV,SAAAld,EAAiB,MACjB,gBAAWA,EACV,wBAGJ,GAED,YA6BC,oBA3BC,WA2BD,YAxBG,IADF,IAEE,0BACE,sBACF,+FAGkB,YAAK,EACvB,UAGF,QACE,YACA,aADkB,aAClB,kDACA,0CAEF,QACE,aAAkB,oBAClB,0DACA,qCAEF,QAAO,aAAK,EACV,aAEA,qDAAsC,aAAC,mBAC1C,mEAIC,EAAO,UAAY,gBAAkB,WACtC,sCAIC,EAAO,UAAK,kBAAmB,WAChC,gCAIC,EAAO,UAAY,mBAAqB,WACzC,yCAGC,EAAK,UAAAmnB,iBAA4B,SAAS,GAC1ChsB,KAAK,4BAAmB,GACzB,yBAGC,EAAM,UAAY,4BAA0B,YAC5C,IAAI6e,EAAS,oBAAEA,GAChB,UAEiB,gCAAA/b,GA4BjB,YA3BC,QAAoB,GACrB,oBAUC9C,KAAK,qBAAgB,YAAS,wFAC5B,aAAI,SAAU,YAGZ6e,EAAU,gBACX,4BAEA,YAID7e,KAAKorB,gBACN,wCAIF,qBAED,YAuFC,iCAtFC,gBAEgB,IAAV,IAAkD,YACxD,IAAM,UACA,EAAkB,IAAG,IAEvB,EAAqB,IAAG,IAgF7B,OA/EG,cAAQ,IACN,EAAI,SAAO,SAAS,GAClB,oBACD,YACC,YAAkB,GACnB,kCACC,YAAmB,IAAU,SAC9B,Y,KAIoC,QAAE,oBAAE,GAC3C,IAAI1C,EAAI,+B,GACN,EAAI,C,GACU,QAAZ,EAED,YADC,EAAO,SAQT,IACE,EAAW,Y,GACE,YADY,uBAGlB,WAAP,IAAO,iBACR,QAIW,WAAV,GACC,GAAYuD,EAAkB9qB,IAAI,IAEnC,GAAY,EAAa,UACzB,EAAI,MAAS,GAAEye,GACX,EAAQ,UAAE,GACf,iBAKHsM,EAAmB,MAIjB,EAAgB,kBAAa,GAC7B,IAAM,EAAS,YAAQ,sBACrB,EAAU,EAAQ,SAAK,SACvB,SAAS,EAAS,MACjB,wBAED,QAAkB,CAClB,aAAS,EACT,UAAS,EAIR,sEAEH,YAAU,eACV,EAAQ,mBAAiB,GACxB,cAIH,SAAAD,EAA0B,MACxB,EAAa,SAAE,cACb,GAKD,qKAKN,GAED,YAqBC,qCApBC,gBAEM,QAAgE,MAEtE,IAAI,EACF,GAeH,OAbW,0BAAgB,EAAgB,MAArB,UAA6B,uBAChD,MAAgB,EAAkB,QAAC,YACnC,EAAI,oBACA,GACY,YAAX7mB,GACH,mBACD,oBAEA,+BAIH,wBACD,gBAGC,EAAK,UAAS,mBAAyB,YACxC,gDAED,YA0DC,qC,IAzDC,EAAK,KAMLf,EAAY,EAAC,MAAS,EAAQ,EAAS,wEACvC,OAAY,UAAK,GAAa,SAE9B,EAAM,kBAAiB,EAACuB,G,IACtB,EAAK,YAKH,SAAI,sBAA0B,EAAE,sB,GAG1B,aAAJ,IACE,EAAiB,MACf,QAAK,OACL,MAAM,EACN,OAAQ,OACR,2BACC,cAIN,sBAGC,EAAsB,GACpB,cAAe,CACd,yBAIL,a,GAGF,KAAM,8BAAyB,CAM/B,MAAsC,gBAAQ,oCAC5C,WAAO,KAAuC,YAC9C,WAKC,OADD,EAAO,MAAM,SAAO,GAAP,OAAyB,4BACrC,yCAIN,aAGC,EAAK,UAAAif,UAAqB,SAAS,GACnC7kB,KAAK,qBAAmB,GACzB,yBAGC,EAAK,+BAAqC,YAC1CA,KAAK,4BAAqB,GAC3B,qBAQC,EAAK,UAAc,YAAQ,SAAS,GACpC,KAAI,eAAgB,OAACyf,GACnBzf,KAAK,QAAQ,IAACyf,KACdzf,KAAK,SAAQ,GAAO,OACrB,yBAID,EAAS,2BAAW,WAAEA,KAAK2qB,aACvB,KAAC,cACN,wDAGC,EAAO,UAAK,cAAW,WACxB,wBAOD,YA0EC,wC,IAtEC,EAOQ,E,YAFmC,IAAvC,IAAuC,qGAG3C,IAAI7E,EAAa,8B,GACT,EAAE,CAER,IAAe,EAAT,KAAY,0BAAZ,KAAY,KAChB,EAAO,CACP,QACA,YACA,cAAc,YAAe,SAAD,EAI5B,6E,GAIF,EAAI,EAAe,QACjB,EAAM,CACN,QAAyC,QAAa,QAEtD,EAAgB,MAAmB,GACnC,MAAa,YAAgB,G,KAE7B,EAAK,EAAY,QACF,C,IACX,EAAQ,IAAM,GACb,SAIH,EAAQ,IAAU,EAAC,KACjB,EAAI,uBACA,EAAY,OAAO,IACrB,UACD,qBAMH,EAAQ,IAAI,EAAE,CACb,cAIH,MAAa,EAAO,CACnB,oBAEJ,yBAGD,IAAID,EAAa,8BAYlB,OAXG,IACE,EAAO,EAAK,GAAW,SAAa,GAClC,SAAQ,WAAE,aAAW,CACrB,WACA,aAAO,EACP,QAAS,EACR,kBAKR,GAWC,EAAM,UAAY,mBAAuB,SAAQ,IAAoB,GAIrE,MAAU,EAAU,cAAS,yBAK7B,EAAM,YAAe,GAKrB,MACE7lB,KAAK,uBACH,eACA,EAAe,UACP,UAIR,SAAM,KAAa,sBAAmB,EAAc,oC,IAClD,EAAgB,YAAQ,UACtB,EAAG,gBAEH,GACF,gBAAO,GAAoB,cAAA6pB,cACzB,cAAI,SAAkB,SAAQ,GAC5B,UACD,4BAQL,IAAI,EAAS,YAAc,G,GACzB,GAAa,EAAI,cAAmB,C,GAElC,GAA8B,SAAxB,EAAU,YACd,kBAAa,SACX,mBAMNhL,EAAU,WAAW,EAAC,OACvB,c,IAGC,EAAM,CACN,OAAS,KACT,WACA,yBAUJ,OAPI,GAA2B,WAAd,gBACbsN,EAAI,SACL,2BAKH,KACE,SAAW,G,IACT,EAAE,YAAY,GACZ,EAGA,QAAa,cAAU,IAM7B,MALI,GAAU,EAAU,eACrB,eAIH,MAGJ,YA0FC,qCApFC,gBAEmB,IAAb,IAAuB,EAAe,IAAS,SACrD,IAAM,OAAY,UAAK,EAAa9nB,OAAO,SACrC,EAAYrE,KAAK,aAASqE,EAAS,aAEnC,EAAWrE,KAAK,YAEpB,EAKE,KALF,0BAOI,IAAa,YAAa,OAAc,oNAC5C,EAAK,oBACL,QACA,UAAW,EACX,YAAW,EACX,cACA,oBACA,4BAAO,EACN,YAMD,EAAW,SAAY4F,GAEvB,EAAM,UAAiB,EAMvB,IAGE,EAAW,EAAW,mBAAc,OAUtC,MAP2B,YAAzB,eACA,EAAU,UAEViZ,EAAUwJ,iBACX,wDAGD,GAKE,EAAgB,WAAa,SAAM,0BACrC,oBAAe,IAAG,eAElB,IACC,gD,IAcC,EAAO,MAAW,iBAChB,OAAgB,iBAIhB,gBAAc,qBAClB,uCAEF,EAAoB,EAAC,YAGtB,OADC,EAAO,QAAQ,UAChB,GAED,YA0JC,2B,IAzJC,OAOM,gBAA6B,EAI9B,uIAED,EAAS,QACX,GACE,0BAAoB,GAAW,SAAE,cAC/B,EAAE,OACF,KACC,sCAMP,IAAI,MAAa,IAiIlB,OAhIG,GACE,WAAM,MAAE,CA+BR,SASA,cAAgB,MAEhB,mBACE,eACE,SAAa,IAAY,G,IACzB,EAAK,EAACiB,mBAAwB,GAE5B,EAAI,wB,GACN,EAAI,C,GAIF,GAEA,EACE,OAAc,EAAG,SAEnB,IAAIlqB,EAAM,EAAW,OAetB,OAZc,IAAXA,IACD,gBAKY,IAAXA,GACD,WAKF,EAMoB,OAAnB,GACD,8CAOPgtB,EAAoB,MAAW,EAAI,SAAQ,cACzC,IAII,EAJA,IAAwE,yB,GAK1E,EAAW,C,IACT,EAAM,CACN,IAAI,EAAM,EAAG,UACb,EAAI,QACL,cAEF,WAIC,IAAsB,IAAH,IACpB,gBAGY,IAAXhtB,GACD,WAGC,EAAK,+BAA8B,GACpC,6BAYH,GACD,+BAGF,GAED,YAwJC,mC,IAtJG,EAAK,KAcD,UAAgB,EAAa,YAAc,yHAEjD,EAAe,gBACb,OAAU,CACV,SAAS,2BACT,YACC,kBAIH,IAAM,aAAmB,OACM,EAC7B,QAAgE,IAAhE,aAAAsN,QAEwB,IAAlB,IAAmB,gCAEzB,IAAI,EAAO,UACP,SACC,GACH,mBACD,aASD,IAAI,EAAQ,SAAK,GAAe,OAAE,OAAkB,YAAE,gF,OACpD,GAAO,EAAK,UAAW,sBACrB,EAAQ,WAAO,cACf,WACA,aAAO,SACP,QAAS,EACT,YACC,wBAAa,IACjB,8CAGD,MAIA,EAEkC,aAAd,EAAqB,EACvC,QAAuB,SAEI,UAA3B,EAA2B,EAEzB,EAIF,EAAS,4CACT,UAAO,EACP,UACA,YAAW,EAPe,YAS5B,KAIA,EAAO,GACc,kBAArB2f,GACA,OAEF,YAAqB,GACrB,UAAS,QACP,kB,OAEIhM,EAAK,KACP,SACE,CACA,oBAIF,GAAO,EACL,CACA,EAAiB,GACjB,KAIF,CACA,KAIF,wBAEA,IAAIA,E,OAAAA,EAAK,KACP,UAAO,KACL,CACA,EAAiB,GACjB,KAIF,CACA,KAIF,iBACE,OACA,sBAGF,mB,OACE,EACE,CACA,EAAiB,KACjB,KAKD,MACH,e,OACE,EAIE,CACA,EAAiB,aACjB,KAKD,MACH,cACD,WAID,EAAI,UAAY,SAAa,SAAW,GAIzC,OAHG,IAAY,KAAK,QAAO,IAAE,IAC3B,kCAEF,qBAEsB,oCAAY,QACjB,IAAV,IAAkB,EAAW,IACnC,sBACK,eACH,GAEH,8EACF,EA56CD,G,SExCIiM,GAAuB,EAkC3B,aAiDE,WAAYxpB,GAAZ,WAtCQ,KAAAypB,oBAAiD,GACjD,KAAAC,oBAAiD,GAuCrD,IAAApT,EAqBEtW,EArBC,IACH2V,EAoBE3V,EApBS,YACXoV,EAmBEpV,EAnBK,QACPmI,EAkBEnI,EAlBG,MACL,EAiBEA,EAjBa,QAAf6gB,OAAO,IAAG,GAAK,EACf,EAgBE7gB,EAhBoB,mBAAtB2pB,OAAkB,IAAG,IAAC,EACtB,EAeE3pB,EATO,kBANT4pB,OAAiB,MAIG,kBAAX5f,SACLA,OAAe6f,mBACjBzqB,QAAO,EACT,EAQEY,EARuB,mBAAzB4nB,OAAkB,IAAG,GAAI,EACzB3X,EAOEjQ,EAPY,eACd,EAMEA,EAN4B,uBAA9BohB,OAAsB,IAAG,GAAK,EAC9Ba,EAKEjiB,EALO,UACT8pB,EAIE9pB,EAJM,SACRkiB,EAGEliB,EAHa,gBACT+pB,EAEJ/pB,EAFuB,KAChBgqB,EACPhqB,EAD6B,QAG3BnE,EAASmE,EAAL,KAQV,GANKnE,IACHA,EAAOya,EACH,IAAI2T,EAAA,EAAS,CAAE3T,IAAG,EAAEX,YAAW,EAAEP,QAAO,IACxC0B,EAAA,EAAWoT,UAGZ/hB,EACH,MAAM,QAAI,QACR,8JAGA,WA8BJ,GA3BAjL,KAAKrB,KAAOA,EACZqB,KAAKiL,MAAQA,EACbjL,KAAKitB,sBAAwBtJ,GAAW8I,EAAqB,EAC7DzsB,KAAK0qB,mBAAqBA,EAC1B1qB,KAAK+S,eAAiBA,GAAkBxS,OAAOmB,OAAO,MACtD1B,KAAK4sB,SAAWA,EAEZH,GACFvO,YACE,WAAM,OAAC,EAAK+O,uBAAN,IACNR,GAIJzsB,KAAKktB,WAAaltB,KAAKktB,WAAW1tB,KAAKQ,MACvCA,KAAKqE,MAAQrE,KAAKqE,MAAM7E,KAAKQ,MAC7BA,KAAK6D,OAAS7D,KAAK6D,OAAOrE,KAAKQ,MAC/BA,KAAKmtB,WAAantB,KAAKmtB,WAAW3tB,KAAKQ,MACvCA,KAAKotB,yBAA2BptB,KAAKotB,yBAAyB5tB,KAAKQ,MAE/D0sB,GAAuC,kBAAX5f,SAC7BA,OAAe6f,kBAAoB3sB,OAMjCssB,GAAwBpqB,UAC3BoqB,GAAuB,EAEH,qBAAXxf,QACPA,OAAOpC,UACPoC,OAAOugB,MAAQvgB,OAAOC,OACpBD,OAAewgB,iCACjB,CACA,IAAMC,EAAMzgB,OAAO0gB,UACbC,EAAKF,GAAOA,EAAIG,UAClBC,OAAG,EACW,kBAAPF,IACLA,EAAG7b,QAAQ,YAAc,EAC3B+b,EAAM,uGAEGF,EAAG7b,QAAQ,aAAe,IACnC+b,EAAM,2EAGNA,GACF,SAAU,IACR,yEACmBA,GAM3B3tB,KAAKka,QC9Nc,QDgOnBla,KAAK4qB,WAAa,IAAI,EAAW,CAC/B3f,MAAK,EACL/K,OAAQF,KACR+kB,UAAS,EACTC,gBAAe,IAGjBhlB,KAAK4e,aAAe,IAAI,EAAa,CACnC3T,MAAOjL,KAAKiL,MACZtM,KAAMqB,KAAKrB,KACXoU,eAAgB/S,KAAK+S,eACrB2X,mBAAkB,EAClB/G,QAAO,EACP1J,gBAAiB,CACfzX,KAAMqqB,EACN3S,QAAS4S,GAEXlC,WAAY5qB,KAAK4qB,WACjB1G,uBAAsB,EACtByG,YAAa+B,EAAoB,WAC3B,EAAKkB,gBACP,EAAKA,eAAe,CAClBC,OAAQ,GACRC,MAAO,CACLjD,QAAS,EAAKjM,aAAamP,gBAC3BC,UAAW,EAAKpP,aAAawM,eAAiB,IAEhD6C,0BAA2B,EAAKhjB,MAAMijB,SAAQ,WAGhD,IAyXV,OAjXS,YAAA/E,KAAP,WACEnpB,KAAK4e,aAAauK,QAsBb,YAAA+D,WAAP,SACEpqB,GAeA,OAbI9C,KAAK+S,eAAema,aACtBpqB,EAAU,OAAAqrB,EAAA,GAAanuB,KAAK+S,eAAema,WAAYpqB,KAKvD9C,KAAKitB,uBACoB,iBAAxBnqB,EAAQsC,aACiB,sBAAxBtC,EAAQsC,cAEVtC,EAAU,OAAH,IAAG,CAAH,eAAQA,GAAO,CAAEsC,YAAa,iBAGhCpF,KAAK4e,aAAasO,WAA0BpqB,IAY9C,YAAAuB,MAAP,SACEvB,GAkBA,OAhBI9C,KAAK+S,eAAe1O,QACtBvB,EAAU,OAAAqrB,EAAA,GAAanuB,KAAK+S,eAAe1O,MAAOvB,IAGpD,QACG,YAAkD,sBAA1C,EAAAsC,YACT,qSAIA,oDAEEpF,KAAKitB,uBAAiD,iBAAxBnqB,EAAQsC,cACxCtC,EAAU,OAAH,IAAG,CAAH,eAAQA,GAAO,CAAEsC,YAAa,iBAGhCpF,KAAK4e,aAAava,MAAqBvB,IAUzC,YAAAe,OAAP,SAMEf,GAKA,OAHI9C,KAAK+S,eAAelP,SACtBf,EAAU,OAAAqrB,EAAA,GAAanuB,KAAK+S,eAAelP,OAAQf,IAE9C9C,KAAK4e,aAAa/a,OAA4Cf,IAOhE,YAAA5D,UAAP,SACE4D,GAEA,OAAO9C,KAAK4e,aAAakE,yBAA4BhgB,IAYhD,YAAAsrB,UAAP,SACEtrB,EACAyf,GAEA,YAFA,IAAAA,OAAA,GAEOviB,KAAKiL,MAAMmjB,UAAyBtrB,EAASyf,IAiB/C,YAAA8L,aAAP,SACEvrB,EACAyf,GAEA,YAFA,IAAAA,OAAA,GAEOviB,KAAKiL,MAAMojB,aAA4BvrB,EAASyf,IAQlD,YAAAC,WAAP,SACE1f,GAEA9C,KAAKiL,MAAMuX,WAA8B1f,GACzC9C,KAAK4e,aAAasE,oBAcb,YAAAoL,cAAP,SACExrB,GAEA9C,KAAKiL,MAAMqjB,cAAiCxrB,GAC5C9C,KAAK4e,aAAasE,oBAGb,YAAAqL,wBAAP,SAA+BC,GAC7BxuB,KAAK4tB,eAAiBY,GAGjB,YAAAC,aAAP,SAAoBC,GAClB,OAAO,EAAQ1uB,KAAKrB,KAAM+vB,IAmBrB,YAAAvB,WAAP,sBACE,OAAO7X,QAAQC,UACZzR,MAAK,WAAM,SAAK8a,aAAa+P,WAAW,CACvCC,gBAAgB,OAEjB9qB,MAAK,WAAM,OAAAwR,QAAQ8R,IAAI,EAAKmF,oBAAoB/b,KAAI,SAAAqe,GAAM,OAAAA,WAC1D/qB,MAAK,WAAM,wCAOT,YAAA6qB,WAAP,sBACE,OAAOrZ,QAAQC,UACZzR,MAAK,WAAM,SAAK8a,aAAa+P,WAAW,CACvCC,gBAAgB,OAEjB9qB,MAAK,WAAM,OAAAwR,QAAQ8R,IAAI,EAAKoF,oBAAoBhc,KAAI,SAAAqe,GAAM,OAAAA,YAQxD,YAAAC,aAAP,SAAoBN,GAApB,WAEE,OADAxuB,KAAKusB,oBAAoB/jB,KAAKgmB,GACvB,WACL,EAAKjC,oBAAsB,EAAKA,oBAAoBljB,QAAO,SAAA0lB,GAAK,OAAAA,IAAA,OAS7D,YAAAC,aAAP,SAAoBR,GAApB,WAEE,OADAxuB,KAAKwsB,oBAAoBhkB,KAAKgmB,GACvB,WACL,EAAKhC,oBAAsB,EAAKA,oBAAoBnjB,QAAO,SAAA0lB,GAAK,OAAAA,IAAA,OAgB7D,YAAA3B,yBAAP,SACE6B,GAEA,OAAOjvB,KAAK4e,aAAawO,yBAAyB6B,IAc7C,YAAAvD,eAAP,SAIE5oB,GAEA,IAAM0N,EAAMxQ,KAAK4e,aAAa8M,eAAe5oB,GACvC+nB,EAAkC,GAClCqE,EAAmD,GAEzD1e,EAAIjP,SAAQ,SAACnC,EAAQ0lB,GACnB+F,EAAQriB,KAAKsc,GACboK,EAAQ1mB,KAAKpJ,MAGf,IAAMA,EAASkW,QAAQ8R,IACrB8H,GAeF,OAVA9vB,EAAOyrB,QAAUA,EACjBzrB,EAAO8vB,QAAUA,EAKjB9vB,EAAO6E,OAAM,SAAAxE,GACX,SAAU,IAAM,2EAAqE,OAAS,OAGzFL,GAWF,YAAA+vB,qBAAP,SACEC,GAEA,YAFA,IAAAA,MAAA,UAEOpvB,KAAK4e,aAAauQ,qBAAqBC,IAMzC,YAAAlB,QAAP,SAAe3L,GACb,OAAOviB,KAAKiL,MAAMijB,QAAQ3L,IAUrB,YAAA8M,QAAP,SAAeC,GACb,OAAOtvB,KAAKiL,MAAMokB,QAAQC,IAMrB,YAAArK,aAAP,SAAoBF,GAClB/kB,KAAK4qB,WAAW3F,aAAaF,IAMxB,YAAAM,aAAP,SAAoBN,GAClB/kB,KAAK4qB,WAAWvF,aAAaN,IAMxB,YAAAO,aAAP,WACE,OAAOtlB,KAAK4qB,WAAWtF,gBAMlB,YAAAiK,6BAAP,SAAoCvK,GAClChlB,KAAK4qB,WAAW1F,mBAAmBF,IAM9B,YAAAwK,QAAP,SAAeC,GACbzvB,KAAKrB,KAAOqB,KAAK4e,aAAajgB,KAAO8wB,GAEzC,EA1iBA,I,+GEhEA,0BAiIU,KAAAC,eAAiB,YAAK,KAoEhC,OAhJS,YAAAtN,MAAP,SAAgBtf,GAAhB,IAIM6sB,EAJN,OACQC,EAC0B,kBAAvB9sB,EAAQyf,WAA0Bzf,EAAQyf,YAC1B,IAAvBzf,EAAQyf,WAAuB,UAAO,EAMxC,OAJAviB,KAAKmqB,oBACH,WAAM,OAAAwF,EAAe7sB,EAAQuf,OAAvB,KACNuN,GAEKD,GAeF,YAAAE,4BAAP,SACEC,EACAF,GAEA5vB,KAAKmqB,mBAAmB2F,EAAaF,IAOhC,YAAAG,kBAAP,SAAyBrlB,GACvB,OAAOA,GAKF,YAAAslB,iBAAP,SAAwBtlB,GACtB,OAAOA,GAGF,YAAAub,SAAP,SAAgBgK,KAIT,YAAAC,GAAP,WACE,MAAO,IAGF,YAAAC,OAAP,SAAcrtB,GACZ,OAAO,GASF,YAAAsrB,UAAP,SACEtrB,EACAyf,GAEA,YAFA,IAAAA,QAAezf,EAAQyf,YAEhBviB,KAAK6V,KAAI,2BACX/S,GAAO,CACVstB,OAAQttB,EAAQ4M,IAAM,aACtB6S,WAAU,MAQP,YAAA8L,aAAP,SACEvrB,EACAyf,GAEA,YAFA,IAAAA,QAAezf,EAAQyf,YAEhBviB,KAAK6V,KAAI,2BACX/S,GAAO,CACVuB,MAAOrE,KAAK0vB,eAAe5sB,EAAQ2K,SAAU3K,EAAQoK,cACrDkjB,OAAQttB,EAAQ4M,GAChB6S,WAAU,MAIP,YAAAC,WAAP,SAAiD,GAC/C,IAAA9S,EAAE,KACF/L,EAAI,OACDb,EAAO,cAHqC,eAK/C,OAAO9C,KAAKqwB,MAAM9vB,OAAOC,OAAOsC,EAAS,CACvCwtB,OAAQ5gB,GAAM,aACdtQ,OAAQuE,MAIL,YAAA2qB,cAAP,SAAoD,GAClD,IAAA5e,EAAE,KACF/L,EAAI,OACJ8J,EAAQ,WACRP,EAAY,eACTpK,EAAO,cALwC,yCAOlD,OAAO9C,KAAKqwB,MAAM9vB,OAAOC,OAAOsC,EAAS,CACvCuB,MAAOrE,KAAK0vB,eAAejiB,EAAUP,GACrCojB,OAAQ5gB,EACRtQ,OAAQuE,MAIL,YAAA2e,YAAP,SACExf,EACA,GAEA,OAAO9C,KAAKoiB,MAAM,CAChBC,OAAA,SAAOpX,GACL,IAAMvK,EAAQuK,EAAMmjB,UAA6BtrB,GAC3Ca,EAAO,EAAOjD,GACpB,YAAa,IAATiD,GAA4B,OAATA,EAAsBjD,GAC7CuK,EAAMuX,WAAU,2BAAyB1f,GAAO,CAAEa,KAAI,KAC/CA,OAKN,YAAA4sB,eAAP,SACEztB,EACA,GAEA,OAAO9C,KAAKoiB,MAAM,CAChBC,OAAA,SAAOpX,GACL,IAAMvK,EAAQuK,EAAMojB,aAAgCvrB,GAC9Ca,EAAO,EAAOjD,GACpB,YAAa,IAATiD,GAA4B,OAATA,EAAsBjD,GAC7CuK,EAAMqjB,cAAa,2BAAyBxrB,GAAO,CAAEa,KAAI,KAClDA,OAIf,EArMA,GCWA,cACE,WACkBuL,EACA4a,EACAzlB,EACAuB,G,MAJlB,EAOE,YAAMsJ,IAAQ,KAEd,GARgB,EAAAA,UACA,EAAA4a,OACA,EAAAzlB,QACA,EAAAuB,YAKZoG,MAAMY,QAAQ,EAAKkd,MAAO,CAC5B,EAAKnJ,QAAU,EAAKzR,QACpB,IAAK,IAAIzN,EAAI,EAAKqoB,KAAK/lB,OAAS,EAAGtC,GAAK,IAAKA,EAC3C,EAAKkf,UAAO,MAAM,EAAKmJ,KAAKroB,IAAK,EAAKkf,QAAO,QAG/C,EAAKA,QAAU,EAAKmJ,K,OAKrB,EAAaxa,UAAYkhB,EAAkB5vB,U,EAIhD,OAzBuC,iBAyBvC,EAzBA,CAAuC4O,O,uGCKjCihB,EAAclwB,OAAOmB,OAAO,MAC5BgvB,EAA6B,WAAM,OAAAD,GACnCE,EAAkBpwB,OAAOmB,OAAO,MAEtC,aAGE,WACkBkvB,EACAC,GAFlB,WACkB,KAAAD,WACA,KAAAC,QAJR,KAAAltB,KAA8BpD,OAAOmB,OAAO,MAqU9C,KAAAovB,QAEJvwB,OAAOmB,OAAO,MA0DV,KAAAqvB,KAEJxwB,OAAOmB,OAAO,MA6CX,KAAAsvB,cAAgB,SACrBC,EACAC,GACG,cAAAxuB,EAAA,GACH,YAAYuuB,GACR,EAAK7vB,IAAI6vB,EAAkBthB,MAAOuhB,GAClCD,GAAqBA,EAHtB,KASE,KAAAE,QAA2B,SAAAC,GAChC,OAAO,YAAYA,GACf,EAAKjwB,IAAIiwB,EAASzhB,OACE,kBAAbyhB,GAON,KAAAC,YAAmC,SACxCC,EACAC,GAEA,GAA4B,kBAAjBD,EACT,OAAO,YAAcA,GAGvB,GAAI,YAAYA,GACd,OAAOA,EAGF,IAAA5hB,EAAM,EAAKkhB,SAAS3K,SAASqL,GAA3B,GAET,GAAI5hB,EAAI,CACN,IAAMvM,EAAM,YAAcuM,GAI1B,OAHI6hB,GACF,EAAKvH,MAAMta,EAAI4hB,GAEVnuB,IAGb,OA1cS,YAAAquB,SAAP,WACE,sBAAYxxB,KAAK2D,OAGZ,YAAAxC,IAAP,SAAWmvB,GACT,YAAqC,IAA9BtwB,KAAKyxB,OAAOnB,GAAQ,IAGtB,YAAAlvB,IAAP,SAAWkvB,EAAgBjf,GAEzB,GADArR,KAAK6wB,MAAMa,OAAOpB,EAAQjf,GACtB,IAAOpQ,KAAKjB,KAAK2D,KAAM2sB,GAAS,CAClC,IAAMqB,EAAc3xB,KAAK2D,KAAK2sB,GAC9B,GAAIqB,GAAe,IAAO1wB,KAAK0wB,EAAatgB,GAC1C,OAAOsgB,EAAYtgB,GAGvB,MAAkB,eAAdA,GACA,IAAOpQ,KAAKjB,KAAK4wB,SAASgB,kBAAmBtB,GACxCtwB,KAAK4wB,SAASgB,kBAAkBtB,GAErCtwB,gBAAgB,EACXA,KAAKqI,OAAOjH,IAAIkvB,EAAQjf,QADjC,GAKQ,YAAAogB,OAAV,SAAiBnB,EAAgBuB,GAQ/B,OAFIA,GAAmB7xB,KAAK6wB,MAAMa,OAAOpB,EAAQ,YAE7C,IAAOrvB,KAAKjB,KAAK2D,KAAM2sB,GAClBtwB,KAAK2D,KAAK2sB,GAGftwB,gBAAgB,EACXA,KAAKqI,OAAOopB,OAAOnB,EAAQuB,GAGhC7xB,KAAK4wB,SAASgB,kBAAkBtB,GAC3B/vB,OAAOmB,OAAO,WADvB,GAKK,YAAAsoB,MAAP,SACE8H,EACAC,GAFF,IAIMzB,EAJN,OAOM,YAAYwB,KAAQA,EAAQA,EAAMniB,OAClC,YAAYoiB,KAAQA,EAAQA,EAAMpiB,OAEtC,IAAMqiB,EACa,kBAAVF,EACH9xB,KAAKyxB,OAAOnB,EAASwB,GACrBA,EAEAG,EACa,kBAAVF,EACH/xB,KAAKyxB,OAAOnB,EAASyB,GACrBA,EAIN,GAAKE,EAAL,CAEA,QACE,YACA,kBADa,EACb,mCACA,mCAEF,IAAMztB,EACJ,IAAI,IAAW0tB,GAAuBlI,MAAMgI,EAAUC,GAMxD,GAFAjyB,KAAK2D,KAAK2sB,GAAU9rB,EAEhBA,IAAWwtB,WACNhyB,KAAK+wB,KAAKT,GACbtwB,KAAK6wB,MAAMsB,SAAS,CACtB,IAAM,EAAmC5xB,OAAOmB,OAAO,MAKlDswB,IAAU,EAAcI,SAAW,GAIxC7xB,OAAOqB,KAAKqwB,GAAU1wB,SAAQ,SAAA2vB,GAC5B,IAAKc,GAAYA,EAASd,KAAoB1sB,EAAO0sB,GAAiB,CAGpE,EAAcA,GAAkB,EAShC,IAAM7f,EAAY,YAAuB6f,GACrC7f,IAAc6f,GACb,EAAKN,SAASyB,WAAW7tB,EAAOgO,WAAYnB,KAC/C,EAAcA,GAAa,QAME,IAA3B7M,EAAO0sB,IAAgC,aAAgB,UAClD1sB,EAAO0sB,QAKhB,EAAc1e,YACZwf,GAAYA,EAASxf,YAKvBxS,KAAK4wB,SAASgB,kBAAkBtB,KAAY9rB,EAAOgO,mBAC9C,EAAcA,WAGvBjS,OAAOqB,KAAK,GAAeL,SACzB,SAAA8P,GAAa,SAAKwf,MAAMhlB,MAAMykB,EAAjB,SAKd,YAAAH,OAAP,SACEG,EACA/f,GAFF,WAIQohB,EAAc3xB,KAAKyxB,OAAOnB,GAEhC,GAAIqB,EAAa,CACf,IAAM,EAAqCpxB,OAAOmB,OAAO,MACrD,GAAc,EACd,GAAa,EAEX,EAAgB,CACpB+uB,OAAM,EACNE,WAAU,EACV9gB,YAAW,IACXwhB,YAAarxB,KAAKqxB,YAClBF,QAASnxB,KAAKmxB,QACdmB,UAAW,SACTC,EACA5vB,GACG,SAAKiuB,SAAS0B,UACa,kBAAvBC,EAAkC,CACvClhB,UAAWkhB,EACX5vB,KAAMA,GAAQ,YAAc2tB,IAC1BiC,EACJ,CAAEC,MALC,MAwCP,GA/BAjyB,OAAOqB,KAAK+vB,GAAapwB,SAAQ,SAAA2vB,GAC/B,IAAM7f,EAAY,YAAuB6f,GACrCuB,EAAad,EAAYT,GAC7B,QAAmB,IAAfuB,EAAJ,CACA,IAAMtC,EAAiD,oBAAX5f,EACxCA,EACAA,EAAO2gB,IAAmB3gB,EAAOc,GACrC,GAAI8e,EAAQ,CACV,IAAIvkB,EAAWukB,IAAWO,EAAcD,EACtCN,EAAO,OAAAztB,EAAA,GAAgB+vB,GAAW,2BAC7B,GAAa,CAChBphB,UAAS,EACT6f,eAAc,EACdwB,QAAS,EAAKC,WAAWrC,EAAQY,MAEjCtlB,IAAa+kB,EACf,EAAKE,MAAMhlB,MAAMykB,EAAQY,IAErBtlB,IAAa6kB,IAAQ7kB,OAAW,GAChCA,IAAa6mB,IACf,EAAcvB,GAAkBtlB,EAChC,GAAc,EACd6mB,EAAa7mB,SAIA,IAAf6mB,IACF,GAAa,OAIb,EAYF,OAXAzyB,KAAKgqB,MAAMsG,EAAQ,GAEf,IACEtwB,gBAAgB,EAClBA,KAAK2D,KAAK2sB,QAAU,SAEbtwB,KAAK2D,KAAK2sB,GAEnBtwB,KAAK6wB,MAAMhlB,MAAMykB,EAAQ,cAGpB,EAIX,OAAO,GASF,YAAAhkB,OAAP,SACEgkB,EACAjf,EACAC,G,MAEMqgB,EAAc3xB,KAAKyxB,OAAOnB,GAChC,GAAIqB,EAAa,CACf,IAAMjf,EAAW1S,KAAKgxB,cAAsBW,EAAa,cACnDT,EAAiB7f,GAAaC,EAChCtR,KAAK4wB,SAASgC,kBAAkB,CAAElgB,SAAQ,EAAErB,UAAS,EAAEC,KAAI,IAC3DD,EACJ,OAAOrR,KAAKmwB,OAAOG,EAAQY,IAAgB,MACxCA,GAAiBR,EAClB,GAAEA,GAEN,OAAO,GAGF,YAAAmC,MAAP,SACE/vB,EACAgwB,GAEA,IAAIC,GAAU,EAgBd,OAfIjwB,EAAQ4M,KACN,IAAOzO,KAAKjB,KAAK2D,KAAMb,EAAQ4M,MACjCqjB,EAAU/yB,KAAKsM,OAAOxJ,EAAQ4M,GAAI5M,EAAQuO,UAAWvO,EAAQwO,OAE3DtR,gBAAgB,GAASA,OAAS8yB,IACpCC,EAAU/yB,KAAKqI,OAAOwqB,MAAM/vB,EAASgwB,IAAUC,IAM7CjwB,EAAQuO,WAAa0hB,IACvB/yB,KAAK6wB,MAAMhlB,MAAM/I,EAAQ4M,GAAI5M,EAAQuO,WAAa,aAG/C0hB,GAGF,YAAA9mB,MAAP,WACEjM,KAAKoP,QAAQ,OAGR,YAAA8e,QAAP,sBACQjsB,EAAMjC,KAAKwxB,WACXwB,EAAyB,GAS/B,OARAhzB,KAAKizB,eAAe1xB,SAAQ,SAAAmO,GACrB,IAAOzO,KAAK,EAAK2vB,SAASgB,kBAAmBliB,IAChDsjB,EAAaxqB,KAAKkH,MAGlBsjB,EAAajvB,SACf9B,EAAIixB,OAAS,CAAEF,aAAcA,EAAaxhB,SAErCvP,GAGF,YAAAmN,QAAP,SAAe+jB,GAAf,WAME,GALA5yB,OAAOqB,KAAK5B,KAAK2D,MAAMpC,SAAQ,SAAA+uB,GACvB6C,GAAW,IAAOlyB,KAAKkyB,EAAS7C,IACpC,EAAKhkB,OAAOgkB,MAGZ6C,EAAS,CACH,IAAAD,EAAoBC,EAAd,OAAK,EAAI,YAAKA,EAAtB,YACN5yB,OAAOqB,KAAK,GAAML,SAAQ,SAAA+uB,GACxB,EAAKtG,MAAMsG,EAAQ,EAAKA,OAEtB4C,GACFA,EAAOF,aAAazxB,QAAQvB,KAAKozB,OAAQpzB,QAiBxC,YAAAozB,OAAP,SAAchD,GACZ,OAAOpwB,KAAK8wB,QAAQV,IAAWpwB,KAAK8wB,QAAQV,IAAW,GAAK,GAGvD,YAAAiD,QAAP,SAAejD,GACb,GAAIpwB,KAAK8wB,QAAQV,GAAU,EAAG,CAC5B,IAAMlc,IAAUlU,KAAK8wB,QAAQV,GAE7B,OADKlc,UAAclU,KAAK8wB,QAAQV,GACzBlc,EAET,OAAO,GAKF,YAAA+e,aAAP,SAAoBK,GAUlB,YAVkB,IAAAA,MAAA,IAAUtxB,KAC5BzB,OAAOqB,KAAK5B,KAAK8wB,SAASvvB,QAAQ+xB,EAAI7wB,IAAK6wB,GACvCtzB,gBAAgB,EAClBA,KAAKqI,OAAO4qB,aAAaK,GAKzB/yB,OAAOqB,KAAK5B,KAAK4wB,SAASgB,mBAAmBrwB,QAAQ+xB,EAAI7wB,IAAK6wB,GAEzDA,GAOF,YAAApD,GAAP,sBACQoD,EAAMtzB,KAAKizB,eACXM,EAAWvzB,KAAKwxB,WACtB8B,EAAI/xB,SAAQ,SAAAmO,GACN,IAAOzO,KAAKsyB,EAAU7jB,KAIxBnP,OAAOqB,KAAK,EAAK4xB,gBAAgB9jB,IAAKnO,QAAQ+xB,EAAI7wB,IAAK6wB,UAGhDC,EAAS7jB,OAGpB,IAAM+jB,EAAclzB,OAAOqB,KAAK2xB,GAChC,GAAIE,EAAY1vB,OAAQ,CAEtB,IADA,IAAI,EAAoB/D,KACjB,aAAgB,GAAO,EAAO,EAAKqI,OAC1CorB,EAAYlyB,SAAQ,SAAAmO,GAAM,SAAKpD,OAAL,MAE5B,OAAOmnB,GAQF,YAAAD,gBAAP,SAAuBlD,GACrB,IAAK,IAAOrvB,KAAKjB,KAAK+wB,KAAMT,GAAS,CACnC,IAAM,EAAQtwB,KAAK+wB,KAAKT,GAAU/vB,OAAOmB,OAAO,MAC1CgyB,EAAO1zB,KAAK2D,KAAK2sB,GACvB,IAAKoD,EAAM,OAAO,EAElB,IAAM,EAAU,IAAI1xB,IAAkC,CAAC0xB,IAGvD,EAAQnyB,SAAQ,SAAAU,GACV,YAAYA,KACd,EAAMA,EAAI0N,QAAS,GASjB,YAAgB1N,IAClB1B,OAAOqB,KAAKK,GAAKV,SAAQ,SAAAM,GACvB,IAAML,EAAQS,EAAIJ,GAGd,YAAgBL,IAClB,EAAQiB,IAAIjB,SAMtB,OAAOxB,KAAK+wB,KAAKT,IAKZ,YAAAqD,aAAP,WACE,OAAO3zB,KAAK6wB,MAAM+C,SAASC,YAAY1uB,YAiD3C,EA7dA,GA8eA,aAOE,WACkBgtB,EACR9pB,QAAA,IAAAA,MAAA,MADQ,KAAA8pB,UACR,KAAA9pB,SARF,KAAA8B,EAAiD,KAUvDnK,KAAK8zB,eAyCT,OAtCS,YAAAA,aAAP,WACE9zB,KAAKmK,EAAInK,KAAKmyB,QAAU,cAAgB,KACxCnyB,KAAK4zB,SAAW,IAAI,IAAKne,EAAA,IAGpB,YAAAic,OAAP,SAAcpB,EAAgBY,GAC5B,GAAIlxB,KAAKmK,EAAG,CACVnK,KAAKmK,EAAE4pB,EAAWzD,EAAQY,IAC1B,IAAM7f,EAAY,YAAuB6f,GACrC7f,IAAc6f,GAMhBlxB,KAAKmK,EAAE4pB,EAAWzD,EAAQjf,IAExBrR,KAAKqI,QACPrI,KAAKqI,OAAOqpB,OAAOpB,EAAQY,KAK1B,YAAArlB,MAAP,SAAaykB,EAAgBY,GACvBlxB,KAAKmK,GACPnK,KAAKmK,EAAE0B,MACLkoB,EAAWzD,EAAQY,GAQA,aAAnBA,EAAgC,SAAW,aAInD,EApDA,GAsDA,SAAS6C,EAAWzD,EAAgBY,GAIlC,OAAOA,EAAiB,IAAMZ,EAG1B,SAAU0D,EACdxB,EACAyB,GAEIC,EAAsB1B,IASxBA,EAAM3B,MAAMa,OAAOuC,EAAU,aAIjC,SAAiBE,GAEf,kBACE,WAAY,G,IACVvD,EAAQ,WACR,IAAAwD,qBAAa,IAAG,GAAI,EACpBC,EAAI,OAHN,EASE,YAAMzD,EAAU,IAAI,EAAWwD,KAAe,K,OAIhC,EAAAE,MAAQ,IAAI,EAAM,GAiBlB,EAAAC,YAAc,IAAI,IAAkB9e,EAAA,GApB9C4e,GAAM,EAAKjlB,QAAQilB,G,EAwB3B,OAnC0B,iBAgBjB,YAAAG,SAAP,SACEC,EACAC,GAKA,OAAO10B,KAAKs0B,MAAME,SAASC,EAASC,IAG/B,YAAAC,YAAP,WAEE,OAAO30B,MAIF,YAAA2yB,WAAP,WACE,OAAO3yB,KAAKu0B,YAAYV,YAAY1uB,YAExC,EAnCA,CAA0BgvB,GAAb,EAAAS,KAAI,EAFnB,CAAiB,MAAW,KA0C5B,kBACE,WACkBllB,EACArH,EACAqsB,EACA7D,GAJlB,MAME,YAAMxoB,EAAOuoB,SAAUC,IAAM,K,OALb,EAAAnhB,KACA,EAAArH,SACA,EAAAqsB,SACA,EAAA7D,QAGhB6D,EAAO,G,EAmFX,OA3FoB,iBAWX,YAAAF,SAAP,SACEC,EACAC,GAEA,OAAO,IAAIG,EAAMJ,EAASz0B,KAAM00B,EAAQ10B,KAAK6wB,QAGxC,YAAA8D,YAAP,SAAmBF,GAAnB,WAEQpsB,EAASrI,KAAKqI,OAAOssB,YAAYF,GAEvC,OAAIA,IAAYz0B,KAAK0P,IACf1P,KAAK6wB,MAAMsB,SAKb5xB,OAAOqB,KAAK5B,KAAK2D,MAAMpC,SAAQ,SAAA+uB,GAC7B,IAAMwE,EAAiB,EAAKnxB,KAAK2sB,GAC3ByE,EAAoB1sB,EAAM,OAAWioB,GACtCyE,EAOOD,EASDA,IAAmBC,GAI5Bx0B,OAAOqB,KAAKkzB,GAAgBvzB,SAAQ,SAAA2vB,GAC7B,YAAM4D,EAAe5D,GACf6D,EAAkB7D,KAC3B,EAAKL,MAAMhlB,MAAMykB,EAAQY,OAX7B,EAAKL,MAAMhlB,MAAMykB,EAAQ,YACzB/vB,OAAOqB,KAAKmzB,GAAmBxzB,SAAQ,SAAA2vB,GACrC,EAAKL,MAAMhlB,MAAMykB,EAAQY,OAR3B,EAAK5kB,OAAOgkB,MAwBXjoB,GAILA,IAAWrI,KAAKqI,OAAerI,KAG5BqI,EAAOmsB,SAASx0B,KAAK0P,GAAI1P,KAAK00B,SAGhC,YAAAlD,SAAP,WACE,kCACKxxB,KAAKqI,OAAOmpB,YACZxxB,KAAK2D,OAIL,YAAA6vB,gBAAP,SAAuBlD,GACrB,IAAM0E,EAAah1B,KAAKqI,OAAOmrB,gBAAgBlD,GAC/C,OAAO,IAAOrvB,KAAKjB,KAAK2D,KAAM2sB,GAAS,2BAClC0E,GACA,YAAMxB,gBAAe,UAAClD,IACvB0E,GAGC,YAAArC,WAAP,WAEE,IADA,IAAIte,EAAiBrU,KAAKqI,OAClBgM,EAAYhM,QAAQgM,EAAKA,EAAYhM,OAC7C,OAAOgM,EAAEse,WAAWztB,MAAMmP,EAAGlP,YAEjC,EA3FA,CAAoB,GAiGpB,cACE,WAAYuuB,G,OACV,YACE,oBACAA,GACA,cACA,IAAI,EAAWA,EAAK7C,MAAMsB,QAASuB,EAAK7C,SACzC,KAgBL,OAvBoB,iBAUX,YAAA8D,YAAP,WAEE,OAAO30B,MAGF,YAAAgqB,MAAP,WAME,OAAOhqB,KAAKqI,OAAO2hB,MAAM9kB,MAAMlF,KAAKqI,OAAQlD,YAEhD,EAvBA,CAAoB,GAyBpB,SAAS+sB,EACP+C,EACAC,EACAC,GAEA,IAAMC,EAAgBH,EAAeE,GAC/BE,EAAgBH,EAAeC,GAMrC,OAAO,YAAMC,EAAeC,GAAiBD,EAAgBC,EAGzD,SAAUnB,EAAsB1B,GAEpC,SAAUA,aAAiB,GAAeA,EAAM3B,MAAMsB,S,YCtrBxD,SAASmD,EACPxyB,GAEA,MAAO,CACLA,EAAQkE,aACRlE,EAAQmuB,kBACRnuB,EAAQxC,QAGRwC,EAAQxC,QAAQsoB,iBAIpB,iBA+BE,WAAY9f,GAAZ,WATQ,KAAAysB,aAAe,IACrB9f,EAAA,EAAgB1K,QAAU7J,KAS1BlB,KAAK8I,OAAS,OAAA6E,EAAA,GAAQ7E,EAAQ,CAC5B0sB,aAAoC,IAAvB1sB,EAAO0sB,YACpB5M,gBAAiB,YAAsB9f,KAGzC9I,KAAKy1B,MAAQ3sB,EAAO2sB,OAAS,IAAI,IAEjCz1B,KAAK01B,oBAAsB,aAAK,SAAA5yB,G,MACtB8lB,EAAoB9lB,EAAQxC,QAAb,gBAEjBq1B,EAAWL,EAAwBxyB,GAIzC6yB,EAAS,IAAM/M,EAEf,IAAMgN,GAAQ,IAAKF,qBAAoBG,KAAI,QAAIF,GAE/C,OAAIC,EACEhN,EACF,2BACKgN,GAAK,CAGRx2B,OAAQ,EAAKq2B,MAAMK,MAAMF,EAAMx2B,UAK5Bw2B,GAGT5B,EACElxB,EAAQxC,QAAQkyB,MAChB1vB,EAAQizB,aAAapmB,OAKhB,EAAKqmB,qBAAqBlzB,MAEhC,CACDmzB,IAAKj2B,KAAK8I,OAAOotB,mBACjBC,QAASb,EAGT3B,aAAY,SAAC3sB,EAAcqB,EAAQ/H,EAASsoB,GAC1C,GAAIsL,EAAsB5zB,EAAQkyB,OAChC,OAAOlyB,EAAQkyB,MAAMmB,aACnB3sB,EACA,YAAYqB,GAAUA,EAAOsH,MAAQtH,EACrC/H,EAAQ81B,UACRxN,MAMR5oB,KAAKq2B,wBAA0B,aAAK,SAACvzB,GAKnC,OAJAkxB,EACElxB,EAAQxC,QAAQkyB,MAChB1vB,EAAQizB,aAAapmB,OAEhB,EAAK2mB,yBAAyBxzB,KACpC,CACDmzB,IAAKj2B,KAAK8I,OAAOotB,mBACjBvC,aAAY,SAAC,G,IAAEzpB,EAAK,QAAE3D,EAAK,QAAEjG,EAAO,UAClC,GAAI4zB,EAAsB5zB,EAAQkyB,OAChC,OAAOlyB,EAAQkyB,MAAMmB,aACnBzpB,EACA3D,EACAjG,EAAQ81B,cA+RpB,OA3WS,YAAAG,WAAP,WACEv2B,KAAKy1B,MAAQ,IAAI,KAyFZ,YAAAe,sBAAP,SAAgC,G,IAC9BhE,EAAK,QACLnuB,EAAK,QACL,IAAA+rB,cAAM,IAAG,eAAY,EACrBxqB,EAAS,YACT,IAAA2a,yBAAiB,IAAG,GAAI,EACxB,IAAAqI,uBAAe,IAAG,EAAA5oB,KAAK8I,OAAO8f,gBAAe,EAEvCgI,EAAW5wB,KAAK8I,OAAOmC,MAAM2lB,SAEnChrB,EAAY,OAAH,IAAG,CAAH,eACJ,YAAiB,YAAmBvB,KACpCuB,GAGL,IAgBI+a,EAhBE8V,EAAU,YAAcrG,GACxBsG,EAAa12B,KAAK01B,oBAAoB,CAC1C1uB,aAAc,YAAkB3C,GAAO2C,aACvCiqB,kBAAmBwF,EACnBV,aAAcU,EACdn2B,QAAS,OAAF,IAAE,CAAF,CACLkyB,MAAK,EACLnuB,MAAK,EACLusB,SAAQ,EACRhrB,UAAS,EACTwwB,UAAW,YAAmBxwB,GAC9BgjB,gBAAe,GACZ,YAAuBvkB,EAAOrE,KAAK8I,OAAOsE,cAKjD,GAAIspB,EAAW/V,UAKbA,EAAU,CAAC,IAAI,EACbgW,EAAaD,EAAW/V,SACxB+V,EAAW/V,QACXtc,EACAuB,KAEG2a,GACH,MAAMI,EAAQ,GAIlB,MAAO,CACLvhB,OAAQs3B,EAAWt3B,OACnBM,UAAWihB,EACXA,QAAO,IAIJ,YAAAiW,QAAP,SACEx3B,EACAiJ,EACArB,EACA1G,GAEA,GAAI4zB,EAAsB5zB,EAAQkyB,QAC9BxyB,KAAKu1B,aAAan0B,IAAIhC,KAAY4H,EAAc,CAClD,IAAMgX,EAAShe,KAAK01B,oBAAoBG,KACtC7uB,EACAqB,EACA/H,EAIAN,KAAKy1B,MAAMoB,QAAQz3B,IAErB,GAAI4e,GAAU5e,IAAW4e,EAAO5e,OAC9B,OAAO,EAGX,OAAO,GAID,YAAA42B,qBAAR,SAA6B,GAA7B,WACEhvB,EAAY,eACZiqB,EAAiB,oBACjB8E,EAAY,eACZz1B,EAAO,UAEP,GAAI,YAAY2wB,KACX3wB,EAAQswB,SAASgB,kBAAkBX,EAAkBthB,SACrDrP,EAAQkyB,MAAMrxB,IAAI8vB,EAAkBthB,OACvC,MAAO,CACLvQ,OAAQY,KAAKy1B,MAAMzI,MACnBrM,QAAS,wCAAiCsQ,EAAkBthB,MAAK,YAI7D,IAIJgR,EAJI/a,EAA+BtF,EAAtB,UAAEswB,EAAoBtwB,EAAZ,SACrBoS,EADiCpS,EAAL,MACX0wB,cAAsBC,EAAmB,cAE1D6F,EAAwC,GAExCC,EAAgB,IAAI,IAW1B,SAASC,EAAiB53B,EAAuB63B,G,MAI/C,OAHI73B,EAAOuhB,UACTA,EAAUoW,EAAc/M,MAAMrJ,IAAO,MAAKsW,GAAa73B,EAAOuhB,QAAO,KAEhEvhB,EAAOA,OAbZY,KAAK8I,OAAO0sB,aACQ,kBAAb9iB,IACNke,EAASsG,kBAAkBxkB,IAI9BokB,EAAetuB,KAAK,CAAEgK,WAAYE,IAUpC,IAAM3Q,EAAU,IAAIC,IAAIgF,EAAaC,YAErClF,EAAQR,SAAQ,SAAA4F,G,QAGd,GAAK,YAAcA,EAAWvB,GAE9B,GAAI,YAAQuB,GAAY,CACtB,IAAIsrB,EAAa7B,EAAS0B,UAAU,CAClCjhB,UAAWlK,EAAU3E,KAAK9B,MAC1BwJ,MAAO/C,EACPvB,UAAWtF,EAAQsF,UACnBjD,KAAMsuB,GACL3wB,GAEG22B,EAAa,YAAuB9vB,QAEvB,IAAfsrB,EACG,IAAsBroB,MAAMjD,KAC/BwZ,EAAUoW,EAAc/M,MAAMrJ,IAAO,MAClCsW,GAAa,4BACZ9vB,EAAU3E,KAAK9B,MAAK,gBAEpB,YAAYuwB,GACRA,EAAkBthB,MAAQ,UAC1B,UAAYqC,KAAKP,UAAUwf,EAAmB,KAAM,IAE1D,KAGK,YAAQwB,GACjBA,EAAauE,EAAc,EAAKX,wBAAwB,CACtDnsB,MAAO/C,EACPZ,MAAOksB,EACPsD,aAAY,EACZz1B,QAAO,IACL22B,GAEM9vB,EAAUH,aASG,MAAdyrB,IAITA,EAAauE,EAAc,EAAKtB,oBAAoB,CAClD1uB,aAAcG,EAAUH,aACxBiqB,kBAAmBwB,EACnBsD,aAAc,YAAYtD,GAAcA,EAAasD,EACrDz1B,QAAO,IACL22B,IAbA32B,EAAQsoB,kBACV6J,EAAa,EAAKgD,MAAM0B,KAAK1E,SAed,IAAfA,GACFqE,EAAetuB,OAAI,MAAIyuB,GAAaxE,EAAU,QAG3C,CACL,IAAMhlB,EAAW,YACftG,EACA7G,EAAQ82B,gBAGV,IAAK3pB,GAAYtG,EAAUP,OAAS,IAAKywB,gBACvC,MAAM,QAAI,QAAe,qBAAqB,OAAU,EAAU,KAAI,mBAGpE5pB,GAAYmjB,EAAS0G,gBAAgB7pB,EAAUiF,IACjDjF,EAASzG,aAAaC,WAAW1F,QAAQQ,EAAQU,IAAKV,OAK5D,IACMw1B,EAA0B,CAAEn4B,OADnB,OAAAgmB,EAAA,GAAe0R,GACYnW,QAAO,GAC3C6W,EAASl3B,EAAQsoB,gBACnB5oB,KAAKy1B,MAAMK,MAAMyB,GAGjB,OAAA70B,EAAA,GAAgB60B,GAQpB,OAJIC,EAAOp4B,QACTY,KAAKu1B,aAAaj0B,IAAIk2B,EAAOp4B,OAAQ4H,GAGhCwwB,GAID,YAAAlB,yBAAR,SAAiC,GAAjC,IAMM3V,EANN,OACEzW,EAAK,QACL3D,EAAK,QACLwvB,EAAY,eACZz1B,EAAO,UAGHy2B,EAAgB,IAAI,IAExB,SAASC,EAAiBS,EAA4Bh2B,G,MAIpD,OAHIg2B,EAAY9W,UACdA,EAAUoW,EAAc/M,MAAMrJ,IAAO,MAAKlf,GAAIg2B,EAAY9W,QAAO,KAE5D8W,EAAYr4B,OAwCrB,OArCI8K,EAAMlD,eACRT,EAAQA,EAAM8C,OAAO/I,EAAQkyB,MAAMrB,UAGrC5qB,EAAQA,EAAMiK,KAAI,SAACqX,EAAMpmB,GAEvB,OAAa,OAATomB,EACK,KAIL,YAAQA,GACHmP,EAAc,EAAKX,wBAAwB,CAChDnsB,MAAK,EACL3D,MAAOshB,EACPkO,aAAY,EACZz1B,QAAO,IACLmB,GAIFyI,EAAMlD,aACDgwB,EAAc,EAAKtB,oBAAoB,CAC5C1uB,aAAckD,EAAMlD,aACpBiqB,kBAAmBpJ,EACnBkO,aAAc,YAAYlO,GAAQA,EAAOkO,EACzCz1B,QAAO,IACLmB,IAGFS,SAyBV,SACEswB,EACAtoB,EACAuoB,GAEA,IAAKvoB,EAAMlD,aAAc,CACvB,IAAM,EAAU,IAAIhF,IAAI,CAACywB,IACzB,EAAQlxB,SAAQ,SAAAb,GACV,YAAgBA,KAClB,QACE,aAAC,YACD,+CACE,mBAAkC,EAAM,iCACb,OAAU,EAAM,KAC7C,uCACFH,OAAOqQ,OAAOlQ,GAAOa,QAAQ,EAAQkB,IAAK,QAvC1Ci1B,CAA6Bp3B,EAAQkyB,MAAOtoB,EAAO2d,GAG9CA,MAGF,CACLzoB,OAAQkB,EAAQsoB,gBAAkB5oB,KAAKy1B,MAAMK,MAAMvvB,GAASA,EAC5Doa,QAAO,IAGb,EAtYA,GAwYA,SAASgW,EAAagB,GACpB,IACE3lB,KAAKP,UAAUkmB,GAAM,SAACrZ,EAAG5d,GACvB,GAAqB,kBAAVA,EAAoB,MAAMA,EACrC,OAAOA,KAET,MAAOtB,GACP,OAAOA,G,6CCveLw4B,EAIDr3B,OAAOmB,OAAO,MAEnB,SAASm2B,EAAoBC,GAI3B,IAAMC,EAAW/lB,KAAKP,UAAUqmB,GAChC,OAAOF,EAAmBG,KACvBH,EAAmBG,GAAYx3B,OAAOmB,OAAO,OAG5C,SAAUs2B,EACdC,GAEA,IAAM/sB,EAAO2sB,EAAoBI,GAEjC,OAAO/sB,EAAKgtB,cAAgBhtB,EAAKgtB,YAAc,SAC7CjI,EACA3vB,GAEA,IAAM4tB,EACJ,SAACvrB,EAAMd,GAAQ,OAAAvB,EAAQgyB,UAAUzwB,EAAlB,IAEXs2B,EAAY73B,EAAQ63B,UAAYC,EACpCH,GACA,SAAAI,GACE,IAAIC,EAAYC,EACdj4B,EAAQqxB,YACR0G,EAIAnK,GA2BF,YAvBgB,IAAdoK,GACArI,IAAW3vB,EAAQqxB,aACnB,IAAO1wB,KAAKgvB,EAAQoI,EAAc,MAWlCC,EAAYC,EAAetI,EAAQoI,EAAeG,IAGpD,QACE,iBACA,IADS,EACT,kBAAkB,OAAAH,EAAuB,gDACvC,OAAK,KAAU,UAEjB,+BAEKC,KAIX,MAAO,UAAGh4B,EAAQoS,SAAQ,YAAIV,KAAKP,UAAU0mB,MAW3C,SAAUM,EAAuBR,GACrC,IAAM/sB,EAAO2sB,EAAoBI,GAEjC,OAAO/sB,EAAKwtB,YAAcxtB,EAAKwtB,UAAY,SAACpnB,EAAM,G,IAChDpH,EAAK,QACLtE,EAAS,YACTyL,EAAS,YAEHyB,EAAYslB,EAAsBH,GAAW,SAAAU,GACjD,IAAMC,EAAWD,EAAQ,GACnBE,EAAYD,EAASlS,OAAO,GAElC,GAAkB,MAAdmS,EA8BJ,GAAkB,MAAdA,GAaJ,GAAIvnB,EACF,OAAOinB,EAAejnB,EAAMqnB,OAd9B,CACE,IAAMG,EAAeF,EAASv3B,MAAM,GACpC,GAAIuE,GAAa,IAAO3E,KAAK2E,EAAWkzB,GAAe,CACrD,IAAMC,EAAaJ,EAAQt3B,MAAM,GAEjC,OADA03B,EAAW,GAAKD,EACTP,EAAe3yB,EAAWmzB,SAlCnC,GAAI7uB,GAAS,YAAgBA,EAAM3C,YAAa,CAC9C,IAAM,EAAgBqxB,EAASv3B,MAAM,GAI/B8I,EAAID,EAAM3C,WAAWyxB,MAAK,SAAA7uB,GAAK,OAAAA,EAAE3H,KAAK9B,QAAP,KAE/Bu4B,EAAgB9uB,GAAK,YAAyBA,EAAGvE,GAQvD,OAAOqzB,GAAiBV,EACtBU,EAIAN,EAAQt3B,MAAM,QA2BhB63B,EAASlnB,KAAKP,UAAUqB,GAW9B,OAJIxB,GAAmB,OAAX4nB,KACV7nB,GAAa,IAAM6nB,GAGd7nB,IAIL,SAAU+mB,EACdH,EACAkB,GAIA,IAAMC,EAAS,IAAI,IACnB,OAAOC,EAAkBpB,GAAW/lB,QAAO,SAACY,EAAWgX,G,MACjDwP,EAAUH,EAAUrP,GACxB,QAAgB,IAAZwP,EAAoB,CAGtB,IAAK,IAAI73B,EAAIqoB,EAAK/lB,OAAS,EAAGtC,GAAK,IAAKA,GAC/B,MAAMqoB,EAAKroB,IAAK63B,EAAvBA,EAA8B,EAEhCxmB,EAAYsmB,EAAOpP,MAAMlX,EAAWwmB,GAEtC,OAAOxmB,IACNvS,OAAOmB,OAAO,OAGb,SAAU23B,EAAkBvB,GAChC,IAAM5sB,EAAO2sB,EAAoBC,GAEjC,IAAK5sB,EAAKquB,MAAO,CACf,IAAM,EAAoBruB,EAAKquB,MAAQ,GACjC,EAAwB,GAE9BzB,EAAKv2B,SAAQ,SAACuQ,EAAGrQ,GACX,YAAQqQ,IACVunB,EAAkBvnB,GAAGvQ,SAAQ,SAAA8S,GAAK,SAAM7L,KAAK,EAAY2G,OAAvBkF,OAClC,EAAYtQ,OAAS,IAErB,EAAYyE,KAAKsJ,GACZ,YAAQgmB,EAAKr2B,EAAI,MACpB,EAAM+G,KAAK,EAAYnH,MAAM,IAC7B,EAAY0C,OAAS,OAM7B,OAAOmH,EAAKquB,MAGd,SAASf,EAGPvI,EAAcpuB,GACd,OAAOouB,EAAOpuB,GAGV,SAAU02B,EACdtI,EACAnG,EACAoE,GAcA,OADAA,EAAUA,GAAWsK,EACdgB,EAAU1P,EAAK5X,QAAO,SAASunB,EAAQx3B,EAAKJ,GACjD,OAAO,YAAQI,GACXA,EAAIuO,KAAI,SAAAhP,GAAS,OAAAi4B,EAAQj4B,EAAR,MACjBS,GAAOisB,EAASjsB,EAAKJ,KACxBouB,IAGL,SAASuJ,EAAa94B,GAIpB,OAAI,YAAgBA,GACd,YAAQA,GACHA,EAAM8P,IAAIgpB,GAEZpB,EACL73B,OAAOqB,KAAKlB,GAAO8Q,QACnB,SAAAsY,GAAQ,OAAAyO,EAAe73B,EAAf,MAGLA,ECzFT,SAASg5B,EAAuB5B,GAC9B,YAAqB,IAAdA,EAAKxmB,KAAkBwmB,EAAKxmB,KACjCwmB,EAAK5tB,MAAQ,YAAyB4tB,EAAK5tB,MAAO4tB,EAAKlyB,WAAa,KAtHxE,IAAgBiM,aAAa,KAoN7B,IAAM8nB,EAAqC,aACrCC,EAAmC,SAACC,EAAOv5B,GAAY,OAAAA,EAAA,WAIvDw5B,EACJ,SAAC9H,EAAUC,EAAU,GAAqB,OAAA8H,EAAP,gBAAoB/H,EAAUC,IAC7D+H,GAAwC,SAAC1b,EAAG2T,GAAa,OAAAA,GAM/D,cAsCE,WAAoBnpB,GAAA,KAAAA,SArCZ,KAAAmxB,aAYJ15B,OAAOmB,OAAO,MAEV,KAAAw4B,UAEJ35B,OAAOmB,OAAO,MAMV,KAAAy4B,aAAe,IAAIj5B,IAMnB,KAAAk5B,cAAgB,IAAIl5B,IAIZ,KAAAg2B,kBAA4C32B,OAAOmB,OAAO,MAC1D,KAAAkwB,kBAA4CrxB,OAAOmB,OAAO,MAE1D,KAAA24B,oBAAqB,EAQnCr6B,KAAK8I,OAAM,aACTwxB,iBAAkB,KACfxxB,GAGL9I,KAAKiL,MAAQjL,KAAK8I,OAAOmC,MAEzBjL,KAAKu6B,gBAAgB,SACrBv6B,KAAKu6B,gBAAgB,YACrBv6B,KAAKu6B,gBAAgB,gBAEjBzxB,EAAO0xB,eACTx6B,KAAKy6B,iBAAiB3xB,EAAO0xB,eAG3B1xB,EAAOmxB,cACTj6B,KAAK06B,gBAAgB5xB,EAAOmxB,cAoiBlC,OAhiBS,YAAAhU,SAAP,SACEgK,EACA0K,G,MAEM/J,EAAW5wB,KAEX0S,EAAWioB,IACfA,EAAejoB,WACW,QAA1B,EAAAioB,EAAehJ,mBAAW,eAAEnf,cACzByd,EAAOzd,WAOZ,GAAIE,IAAa1S,KAAK4xB,kBAAkBgJ,WACtC,MAAO,CAAC,cAuBV,IAnBA,IAeIlrB,EAfEiiB,EAAcgJ,GAAkBA,EAAehJ,aAAe1B,EAE9D3vB,EAAO,2BACRq6B,GAAc,CACjBjoB,SAAQ,EACRif,YAAW,EACXW,UAAWqI,GAAkBA,EAAerI,WAAa,WACvD,IAAMxvB,EAAU+3B,GAA0B11B,UAAWwsB,GACrD,OAAOf,EAAS0B,UAAUxvB,EAAS,CACjC0vB,MAAO5B,EAAS3lB,MAAT,KACPrF,UAAW9C,EAAQ8C,eAOnBk1B,EAASpoB,GAAY1S,KAAK+6B,cAAcroB,GAC1CsoB,EAAQF,GAAUA,EAAOE,OAASh7B,KAAK8I,OAAOwxB,iBAC3CU,GAAO,CACZ,IAAMC,EAAgBD,EAAM/K,EAAQ3vB,GACpC,IAAI,YAAQ26B,GAEL,CACLvrB,EAAKurB,EACL,MAHAD,EAAQhD,EAAyBiD,GAQrC,OADAvrB,EAAKA,EAAKE,OAAOF,QAAM,EAChBpP,EAAQ63B,UAAY,CAACzoB,EAAIpP,EAAQ63B,WAAa,CAACzoB,IAGjD,YAAAgrB,gBAAP,SAAuBT,GAAvB,WACE15B,OAAOqB,KAAKq4B,GAAc14B,SAAQ,SAAAmR,GAChC,IAAM,EAKFunB,EAAavnB,GAJfwoB,EAAS,YACTC,EAAY,eACZC,EAAgB,mBACbnJ,EAAQ,cAJP,iDAqBFiJ,GAAW,EAAKX,gBAAgB,QAAS7nB,GACzCyoB,GAAc,EAAKZ,gBAAgB,WAAY7nB,GAC/C0oB,GAAkB,EAAKb,gBAAgB,eAAgB7nB,GAEvD,IAAOzR,KAAK,EAAKi5B,UAAWxnB,GAC9B,EAAKwnB,UAAUxnB,GAAUlK,KAAKypB,GAE9B,EAAKiI,UAAUxnB,GAAY,CAACuf,OAK1B,YAAAoJ,iBAAR,SAAyB3oB,EAAkBuf,GAA3C,WACQD,EAAWhyB,KAAK+6B,cAAcroB,GAC5B4oB,EAAsBrJ,EAAb,UAAE1hB,EAAW0hB,EAAL,OAEzB,SAASsJ,EACPvJ,EACAhI,GAEAgI,EAAShI,MACU,oBAAVA,EAAuBA,GAGpB,IAAVA,EAAiB8P,GAGP,IAAV9P,EAAkBgQ,GAClBhI,EAAShI,MAKbuR,EAASvJ,EAAUC,EAASjI,OAE5BgI,EAASgJ,OAEO,IAAdM,EAAsB3B,EAGtB,YAAQ2B,GAAatD,EAAyBsD,GAEzB,oBAAdA,EAA2BA,EAElCtJ,EAASgJ,MAEPzqB,GACFhQ,OAAOqB,KAAK2O,GAAQhP,SAAQ,SAAA8P,GAC1B,IAAM2gB,EAAW,EAAKwJ,eAAe9oB,EAAUrB,GAAW,GACpD4gB,EAAW1hB,EAAOc,GAExB,GAAwB,oBAAb4gB,EACTD,EAASnc,KAAOoc,MACX,CACG,IAAAkE,EAAyBlE,EAAlB,QAAEpc,EAAgBoc,EAAZ,KAAEjI,EAAUiI,EAAL,MAE5BD,EAASgJ,OAGK,IAAZ7E,EAAoByD,EAGpB,YAAQzD,GAAWsC,EAAuBtC,GAEvB,oBAAZA,EAAyBA,EAEhCnE,EAASgJ,MAES,oBAATnlB,IACTmc,EAASnc,KAAOA,GAGlB0lB,EAASvJ,EAAUhI,GAGjBgI,EAASnc,MAAQmc,EAAShI,QAM5BgI,EAASgJ,MAAQhJ,EAASgJ,OAASpB,OAMnC,YAAAW,gBAAR,SACEkB,EACA/oB,QAAA,IAAAA,MAAA,GAEA,IAAM0d,EAAS,QAAUqL,EAAM9U,cACzB+U,EAAM17B,KAAK4xB,kBAAkBxB,GAC/B1d,IAAagpB,IACf,QAAU,aAAW,GAAK,IAAO,wBAAsB,OAAK,iCAA8B,yBAGtFA,UAAY17B,KAAKk3B,kBAAkBwE,GAEvC17B,KAAKk3B,kBAAkBxkB,GAAY0d,EAEnCpwB,KAAK4xB,kBAAkBxB,GAAU1d,IAI9B,YAAA+nB,iBAAP,SAAwBD,GAAxB,WACGx6B,KAAKq6B,oBAAiC,EACvC95B,OAAOqB,KAAK44B,GAAej5B,SAAQ,SAAAo6B,GAIjC,EAAKC,gBAAgBD,GAAW,GAEhCnB,EAAcmB,GAAWp6B,SAAQ,SAAAs6B,GAC/B,EAAKD,gBAAgBC,GAAS,GAAOp5B,IAAIk5B,GACzC,IAAMG,EAAQD,EAAQC,MAAM,KACvBA,GAASA,EAAM,KAAOD,GAEzB,EAAKzB,cAAc94B,IAAIu6B,EAAS,IAAIE,OAAOF,WAM3C,YAAAd,cAAR,SAAsBroB,GAAtB,WACE,IAAK,IAAOzR,KAAKjB,KAAKi6B,aAAcvnB,GAAW,CAC7C,IAAM,EACJ1S,KAAKi6B,aAAavnB,GAAYnS,OAAOmB,OAAO,MAC9C,EAAO6O,OAAShQ,OAAOmB,OAAO,MAuB9B,IAAMs6B,EAAah8B,KAAKm6B,aAAa/4B,IAAIsR,GACrCspB,GAAcA,EAAW1hB,MAC3B0hB,EAAWz6B,SAAQ,SAAAo6B,GACjB,IAAM,EAAsB,EAAKZ,cAAcY,GAAvCprB,EAAM,SAAK0rB,EAAI,cAAjB,YACN17B,OAAOC,OAAO,EAAQy7B,GACtB17B,OAAOC,OAAO,EAAO+P,OAAQA,MAKnC,IAAM2rB,EAAQl8B,KAAKk6B,UAAUxnB,GAS7B,OARIwpB,GAASA,EAAMn4B,QAGjBm4B,EAAMC,OAAO,GAAG56B,SAAQ,SAAAu5B,GACtB,EAAKO,iBAAiB3oB,EAAUooB,MAI7B96B,KAAKi6B,aAAavnB,IAGnB,YAAA8oB,eAAR,SACE9oB,EACArB,EACA+qB,GAMA,GAAI1pB,EAAU,CACZ,IAAM2pB,EAAgBr8B,KAAK+6B,cAAcroB,GAAUnC,OACnD,OAAO8rB,EAAchrB,IACnB+qB,IAAoBC,EAAchrB,GAAa9Q,OAAOmB,OAAO,SAI3D,YAAAk6B,gBAAR,SACEC,EACAO,GAEA,IAAIE,EAAet8B,KAAKm6B,aAAa/4B,IAAIy6B,GAIzC,OAHKS,GAAgBF,GACnBp8B,KAAKm6B,aAAa74B,IAAIu6B,EAASS,EAAe,IAAIt6B,KAE7Cs6B,GAGF,YAAAhF,gBAAP,SACE7pB,EACAiF,EACAtT,EACAwG,GAJF,WAME,IAAK6H,EAASyZ,cAAe,OAAO,EAIpC,IAAKxU,EAAU,OAAO,EAEtB,IAAMipB,EAAYluB,EAASyZ,cAAc1kB,KAAK9B,MAE9C,GAAIgS,IAAaipB,EAAW,OAAO,EAEnC,GAAI37B,KAAKq6B,oBACLr6B,KAAKm6B,aAAah5B,IAAIw6B,GAuBxB,IAtBA,IAAMY,EAAuBv8B,KAAK47B,gBAAgBlpB,GAAU,GACtD,EAAY,CAAC6pB,GACb,EAAe,SAACV,GACpB,IAAMS,EAAe,EAAKV,gBAAgBC,GAAS,GAC/CS,GACAA,EAAahiB,MACb,EAAU1I,QAAQ0qB,GAAgB,GACpC,EAAU9zB,KAAK8zB,IAUfE,KAA8Bp9B,IAAUY,KAAKo6B,cAAc9f,MAC3DmiB,GAAwB,EAInBh7B,EAAI,EAAGA,EAAI,EAAUsC,SAAUtC,EAAG,CACzC,IAAM66B,EAAe,EAAU76B,GAE/B,GAAI66B,EAAan7B,IAAIw6B,GAWnB,OAVKY,EAAqBp7B,IAAIw6B,KACxBc,GACF,SAAU,IAAK,0BAAqB,OAAQ,oBAAiB,OAAY,IAM3EF,EAAqB95B,IAAIk5B,KAEpB,EAGTW,EAAa/6B,QAAQ,GAEjBi7B,GAGA/6B,IAAM,EAAUsC,OAAS,GAKzB,YAA0B0J,EAASzG,aAAc5H,EAASwG,KAI5D42B,GAA2B,EAC3BC,GAAwB,EAMxBz8B,KAAKo6B,cAAc74B,SAAQ,SAACm7B,EAAQC,GAClC,IAAMb,EAAQppB,EAASopB,MAAMY,GACzBZ,GAASA,EAAM,KAAOppB,GACxB,EAAaiqB,OAOvB,OAAO,GAGF,YAAAtK,WAAP,SAAkB3f,EAA8BrB,GAC9C,IAAMypB,EAAS96B,KAAKw7B,eAAe9oB,EAAUrB,GAAW,GACxD,SAAUypB,IAAUA,EAAOE,QAGtB,YAAApI,kBAAP,SAAyBgK,GACf,IAEJ1L,EAFIxe,EAAwBkqB,EAAhB,SAAEvrB,EAAcurB,EAAL,UACrB9B,EAAS96B,KAAKw7B,eAAe9oB,EAAUrB,GAAW,GAGpD2pB,EAAQF,GAAUA,EAAOE,MAC7B,GAAIA,GAAStoB,EAQX,IAPA,IAAMpS,EAA0C,CAC9CoS,SAAQ,EACRrB,UAAS,EACTnH,MAAO0yB,EAAU1yB,OAAS,KAC1BtE,UAAWg3B,EAAUh3B,WAEjB0L,EAAOooB,EAAuBkD,GAC7B5B,GAAO,CACZ,IAAM6B,EAAoB7B,EAAM1pB,EAAMhR,GACtC,IAAI,YAAQu8B,GAEL,CAGL3L,EAAiB2L,GAAqBxrB,EACtC,MALA2pB,EAAQvC,EAAuBoE,GAkBrC,YARuB,IAAnB3L,IACFA,EAAiB0L,EAAU1yB,MACvB,YAAsB0yB,EAAU1yB,MAAO0yB,EAAUh3B,WACjD,YAAgByL,EAAWqoB,EAAuBkD,MAKjC,IAAnB1L,EACK7f,EAMFA,IAAc,YAAuB6f,GACxCA,EACA7f,EAAY,IAAM6f,GAGjB,YAAAoB,UAAP,SACExvB,EACAxC,GAEA,IAAM2wB,EAAoBnuB,EAAQH,KAClC,GAAKsuB,IAEenuB,EAAQoH,OAASpH,EAAQuO,WAC7C,CAEA,QAAyB,IAArBvO,EAAQ4P,SAAqB,CAC/B,IAAMA,EAAWpS,EAAQkyB,MAAMxB,cAAsBC,EAAmB,cACpEve,IAAU5P,EAAQ4P,SAAWA,GAGnC,IAAMwe,EAAiBlxB,KAAK4yB,kBAAkB9vB,GACxCuO,EAAY,YAAuB6f,GACnCc,EAAW1xB,EAAQkyB,MAAMxB,cAAiBC,EAAmBC,GAC7D4J,EAAS96B,KAAKw7B,eAAe14B,EAAQ4P,SAAUrB,GAAW,GAC1DwE,EAAOilB,GAAUA,EAAOjlB,KAE9B,GAAIA,EAAM,CACR,IAAMinB,EAAcC,GAClB/8B,KACAixB,EACAnuB,EACAxC,EACAA,EAAQkyB,MAAMG,WACZ,YAAY1B,GACRA,EAAkBthB,MAClBshB,EACJC,IAKJ,OAAO,IAAUvJ,UACf3nB,KAAKiL,MACL4K,EACA,CAACmc,EAAU8K,IAIf,OAAO9K,IAGF,YAAAgL,gBAAP,SACEtqB,EACArB,GAEA,IAAMypB,EAAS96B,KAAKw7B,eAAe9oB,EAAUrB,GAAW,GACxD,OAAOypB,GAAUA,EAAOjlB,MAGnB,YAAAonB,iBAAP,SACEC,EACA7rB,EACA8rB,GAEA,IAAIrC,EAIF96B,KAAKw7B,eAAe0B,EAAgB7rB,GAAW,GAC7C2Y,EAAQ8Q,GAAUA,EAAO9Q,MAK7B,OAJKA,GAASmT,IAEZnT,GADA8Q,EAAS96B,KAAK+6B,cAAcoC,KACVrC,EAAO9Q,OAEpBA,GAGF,YAAAoT,iBAAP,SACEpL,EACAC,EACA,EACA3xB,EACAoyB,G,IAFExoB,EAAK,QAAEwI,EAAQ,WAAEsX,EAAK,QAIxB,OAAIA,IAAU8P,EAILuD,GACL/8B,EAAQkyB,MADH6K,CAELrL,EACAC,GAGAjI,IAAUgQ,GAEL/H,GAOL3xB,EAAQ8pB,YACV4H,OAAW,GAGNhI,EAAMgI,EAAUC,EAAU8K,GAC/B/8B,UAYA,EACA,CAAE0S,SAAQ,EACRrB,UAAWnH,EAAM1H,KAAK9B,MACtBwJ,MAAK,EACLtE,UAAWtF,EAAQsF,WACrBtF,EACAoyB,GAAWnyB,OAAOmB,OAAO,UAG/B,EAhmBA,GAkmBA,SAASq7B,GACPnM,EACAK,EACA2L,EACAt8B,EACAoyB,GAEA,IAAMxB,EAAiBN,EAASgC,kBAAkBgK,GAC5CvrB,EAAY,YAAuB6f,GACnCtrB,EAAYg3B,EAAUh3B,WAAatF,EAAQsF,UAC3C,EAA2BtF,EAAQkyB,MAAjCnB,EAAW,cAAEF,EAAO,UAE5B,MAAO,CACL7f,KAAMooB,EAAuBkD,GAC7B1yB,MAAO0yB,EAAU1yB,OAAS,KAC1BmH,UAAS,EACT6f,eAAc,EACdtrB,UAAS,EACTiK,YAAW,IACXwhB,YAAW,EACXqB,QAAO,EACPznB,MAAO2lB,EAAS3lB,MAChBkmB,QAAO,EACPmB,UAAA,WACE,OAAO1B,EAAS0B,UACduI,GAA0B11B,UAAW8rB,EAAmBrrB,GACxDtF,IAGJy5B,aAAcsD,GAAyB/8B,EAAQkyB,QAI7C,SAAUqI,GACdyC,EACArM,EACArrB,GAGE,IAKE9C,EALCyvB,EAGD+K,EAHmB,GAClB36B,EAED26B,EAFK,GACCC,EACND,EADU,OAgCd,MA3BkC,kBAAvB/K,EACTzvB,EAAU,CACRuO,UAAWkhB,EAIX5vB,KAAM46B,EAAO,EAAI56B,EAAOsuB,IAG1BnuB,EAAU,OAAH,IAAG,CAAH,GAAQyvB,GAGV,IAAOtxB,KAAK6B,EAAS,UACxBA,EAAQH,KAAOsuB,IAIf/uB,cAA4B,IAAjBY,EAAQH,MACrB,SAAU,IAAK,4DACb,OCx7BA,SAA8BjC,GAClC,IAAM88B,EAAU,OAAAxpB,EAAA,GAAa,uBAC7B,OAAOhC,KAAKP,UAAU/Q,GAAO,SAACmB,EAAKnB,GACjC,YAAiB,IAAVA,EAAmB88B,EAAU98B,KACnCsW,MAAMhF,KAAKP,UAAU+rB,IAAU/uB,KAAK,eDo7BnCgvB,CAA+B,WAAAH,WAI/B,IAAWx6B,EAAQ8C,YACrB9C,EAAQ8C,UAAYA,GAGf9C,EAGT,SAASu6B,GACP7K,GAEA,OAAO,SAAsBR,EAAUC,GACrC,GAAI,YAAQD,IAAa,YAAQC,GAC/B,MAAM,QAAI,QAAe,qCAAqC,WAOhE,GAAI,YAAgBD,IAChB,YAAgBC,GAAW,CAC7B,IAAMyL,EAAQlL,EAAMxB,cAAcgB,EAAU,cACtC2L,EAAQnL,EAAMxB,cAAciB,EAAU,cAG5C,GAFoByL,GAASC,GAASD,IAAUC,EAG9C,OAAO1L,EAGT,GAAI,YAAYD,IACZ,YAAwBC,GAK1B,OADAO,EAAMxI,MAAMgI,EAASriB,MAAOsiB,GACrBD,EAGT,GAAI,YAAwBA,IACxB,YAAYC,GAMd,OADAO,EAAMxI,MAAMgI,EAAUC,EAAStiB,OACxBsiB,EAGT,GAAI,YAAwBD,IACxB,YAAwBC,GAC1B,kCAAYD,GAAaC,GAI7B,OAAOA,GEz6BX,SAAS2L,GACPt9B,EACAu9B,EACAC,GAEA,IAAMj8B,EAAM,UAAGg8B,GAAU,OAAGC,GACxBC,EAAWz9B,EAAQ09B,QAAQ58B,IAAIS,GAWnC,OAVKk8B,GACHz9B,EAAQ09B,QAAQ18B,IAAIO,EAAKk8B,EACvBz9B,EAAQu9B,aAAeA,GACvBv9B,EAAQw9B,WAAaA,EACnBx9B,EAAS,2BACRA,GAAO,CACVu9B,WAAU,EACVC,SAAQ,KAGLC,EAWT,kBACE,WACkB9yB,EACR2K,EACAxI,GAFQ,KAAAnC,QACR,KAAA2K,SACA,KAAAxI,YA+jBZ,OA5jBS,YAAA6wB,aAAP,SAAoBzL,EAAwB,GAA5C,WACEnuB,EAAK,QACLjF,EAAM,SACNkxB,EAAM,SACN1qB,EAAS,YACTwkB,EAAS,YAEH8T,EAAsB,YAAuB75B,GAC7C+0B,EAAS,cAEfxzB,EAAY,OAAH,IAAG,CAAH,eACJ,YAAiBs4B,IACjBt4B,GAGL,IAAMtF,EAAO,yBACXkyB,MAAK,EACL2L,QAAS59B,OAAOmB,OAAO,MACvBsoB,MAAA,SAASgI,EAAaC,GACpB,OAAOmH,EAAOpP,MAAMgI,EAAUC,IAEhCrsB,UAAS,EACTwwB,UAAW,YAAmBxwB,IAC3B,YAAuBvB,EAAOrE,KAAKoN,YAAU,CAChDgd,YAAaA,EACbgU,aAAc,IAAIl9B,IAClB28B,YAAY,EACZC,UAAU,EACVE,QAAS,IAAI98B,MAGTiC,EAAMnD,KAAKq+B,oBAAoB,CACnCj/B,OAAQA,GAAUmB,OAAOmB,OAAO,MAChC4uB,OAAM,EACNtpB,aAAck3B,EAAoBl3B,aAClCs3B,UAAW,CAAE9tB,IAAK,IAAItP,KACtBZ,QAAO,IAGT,IAAK,YAAY6C,GACf,MAAM,QAAI,QAAe,6BAA6B,OAAK,KAAU,UAAW,gBAkElF,OA7DA7C,EAAQ89B,aAAa78B,SAAQ,SAAC,EAA0C+uB,G,IAAxCqB,EAAW,cAAE2M,EAAS,YAAEC,EAAY,eAC5DC,EAAY,YAAclO,GAEhC,GAAIgO,GAAaA,EAAU9tB,IAAI8J,KAAM,CACnC,IAAMmkB,EAAU,EAAKC,YAAYJ,EAAWE,EAAW7M,EAAarxB,GACpE,GAAI,YAAYm+B,GAId,OAIF9M,EAAc8M,EAGhB,GAAIv8B,UAAY5B,EAAQ8pB,UAAW,CACjC,IAAM,EAAgD7pB,OAAOmB,OAAO,MACpE68B,EAAah9B,SAAQ,SAAA2I,GACfA,EAAMlD,eACR,EAAwBkD,EAAM1H,KAAK9B,QAAS,MAchDH,OAAOqB,KAAK+vB,GAAapwB,SAAQ,SAAA2vB,IAVT,SAACA,GACvB,OAEM,IAFN,EACE,YAAuBA,KAarB,CAAgBA,KAVG,SAACA,GACxB,IAAMyN,EAAYL,GAAaA,EAAU9tB,IAAIpP,IAAI8vB,GACjD,OAAOxiB,QAAQiwB,GAAaA,EAAUzzB,MAAQyzB,EAAUzzB,KAAK8e,OASxD,CAAiBkH,IAmjBhC,SACE0N,EACAC,EACA3N,EACAsB,GAEA,IAAMsM,EAAW,SAAC1N,GAChB,IAAM5vB,EAAQgxB,EAAMxB,cAA2BI,EAAUF,GACzD,MAAwB,kBAAV1vB,GAAsBA,GAGhCwwB,EAAW8M,EAASF,GAC1B,IAAK5M,EAAU,OAEf,IAAMC,EAAW6M,EAASD,GAC1B,IAAK5M,EAAU,OAIf,GAAI,YAAYD,GAAW,OAI3B,GAAI,YAAMA,EAAUC,GAAW,OAK/B,GAAI1xB,OAAOqB,KAAKowB,GAAU9qB,OACxB,SAAArF,GAAO,YAAuC,IAAvC2wB,EAAMxB,cAAciB,EAAUpwB,MACrC,OAGF,IAAMk9B,EACJvM,EAAMxB,cAAsB4N,EAAa,eACzCpM,EAAMxB,cAAsB6N,EAAa,cACrCxtB,EAAY,YAAuB6f,GACnC8N,EAAc,UAAGD,EAAU,YAAI1tB,GAErC,GAAI4tB,GAAS99B,IAAI69B,GAAc,OAC/BC,GAASx8B,IAAIu8B,GAEb,IAAME,EAA2B,GAG5B,YAAQlN,IACR,YAAQC,IACX,CAACD,EAAUC,GAAU1wB,SAAQ,SAAAC,GAC3B,IAAMkR,EAAW8f,EAAMxB,cAAcxvB,EAAO,cACpB,kBAAbkR,GACNwsB,EAAerjB,SAASnJ,IAC3BwsB,EAAe12B,KAAKkK,MAK1B,SAAU,IACZ,kDAA6C,OAAS,kBAAe,OAAU,iFAG7E,OAAAwsB,EAAqB,OACjB,qCACEA,EAAezwB,KAAK,SAAW,8CACjC,GAAE,kDAENuwB,EAAW,mFAGChtB,KAAKP,UAAUugB,GAAU3wB,MAAM,EAAG,KAAK,yBACvC2Q,KAAKP,UAAUwgB,GAAU5wB,MAAM,EAAG,KAAK,mRAvnBzC89B,CACEX,EACA7M,EACAT,EACA5wB,EAAQkyB,UAMhBA,EAAMxI,MAAMsG,EAAQqB,MAQtBa,EAAMY,OAAOjwB,EAAIwM,OAEVxM,GAGD,YAAAk7B,oBAAR,SAA4B,GAA5B,WACE/N,EAAM,SACNlxB,EAAM,SACN4H,EAAY,eACZ1G,EAAO,UAGPg+B,EAAS,YAED1N,EAAa5wB,KAAKiL,MAAV,SAIZgnB,EAAwB1xB,OAAOmB,OAAO,MAKpCgR,EACH4d,GAAUM,EAASgB,kBAAkBtB,IACtC,YAAsBlxB,EAAQ4H,EAAc1G,EAAQyG,cACnDupB,GAAUhwB,EAAQkyB,MAAMpxB,IAAIkvB,EAAQ,cAEnC,kBAAoB5d,IACtBuf,EAASzf,WAAaE,GAWxB,IAAM4f,EAA+B,WACnC,IAAMxvB,EAAU+3B,GACd11B,UACA8sB,EACA3xB,EAAQsF,WAGV,GAAI,YAAY9C,EAAQH,MAAO,CAC7B,IAAMuI,EAAO5K,EAAQ89B,aAAah9B,IAAI0B,EAAQH,KAAKgN,OACnD,GAAIzE,EAAM,CACR,IAAM,EAAS0lB,EAAS0B,UAAS,2BAC5BxvB,GAAO,CACVH,KAAMuI,EAAKymB,cACVrxB,GAEH,QAAe,IAAX,EACF,OAAO,GAKb,OAAOswB,EAAS0B,UAAUxvB,EAASxC,IAG/Bi+B,EAAe,IAAIv8B,IAEzBhC,KAAKo/B,cACHp4B,EACA5H,EAIAkB,EACAoS,GACAnR,SAAQ,SAACjB,EAAS4J,G,MACZm1B,EAAiB,YAAuBn1B,GACxCxJ,EAAQtB,EAAOigC,GAIrB,GAFAd,EAAa97B,IAAIyH,QAEH,IAAVxJ,EAAkB,CACpB,IAAMwwB,EAAiBN,EAASgC,kBAAkB,CAChDlgB,SAAQ,EACRrB,UAAWnH,EAAM1H,KAAK9B,MACtBwJ,MAAK,EACLtE,UAAWtF,EAAQsF,YAGf+4B,EAAYW,GAAkBhB,EAAWpN,GAE3CmE,EAAgB,EAAKkK,kBACvB7+B,EACAwJ,EAGAA,EAAMlD,aACF42B,GAAiBt9B,GAAS,GAAO,GACjCA,EACJq+B,GAMExB,OAAa,EAIbjzB,EAAMlD,eACL,YAAYquB,IACZ,YAAwBA,MAC3B8H,EAAgB7K,EAAkB,aAAc+C,IAGlD,IAAMrL,EAAQ4G,EAASqM,iBACrBvqB,EACAxI,EAAM1H,KAAK9B,MACXy8B,GAGEnT,EACF2U,EAAUzzB,KAAO,CAEfhB,MAAK,EACLwI,SAAQ,EACRsX,MAAK,GAGPwV,GAA2BlB,EAAWpN,GAGxCe,EAAW3xB,EAAQ0pB,MAAMiI,IAAQ,MAC9Bf,GAAiBmE,EAClB,SAGFnzB,SACC5B,EAAQu9B,YACRv9B,EAAQw9B,UACR,IAAsB1zB,MAAMF,IAI5B0mB,EAASoM,gBAAgBtqB,EAAUxI,EAAM1H,KAAK9B,QAE/C,SAAU,IAAM,wBACd,mBAA6B,8BAEzB,OAAC,KAAU,UAAQ,EACtB,SAAW,UAAS,WAM3B,IACQ,MAAkBkwB,EAAS3K,SAAS7mB,EAAQ,CAChDsT,SAAQ,EACR1L,aAAY,EACZD,YAAazG,EAAQyG,YACrB4qB,YAAaM,EACbK,UAAS,IALJ5iB,EAAE,KAAEyoB,EAAS,KAUpB7H,EAASA,GAAU5gB,EAIfyoB,IAEFlG,EAAW3xB,EAAQ0pB,MAAMiI,EAAUkG,IAErC,MAAOv4B,GAEP,IAAK0wB,EAAQ,MAAM1wB,EAGrB,GAAI,kBAAoB0wB,EAAQ,CAC9B,IAAMmP,EAAU,YAAcnP,GAOxBoP,EAAOp/B,EAAQ69B,QAAQ7N,KAAYhwB,EAAQ69B,QAAQ7N,GAAU,IACnE,GAAIoP,EAAK9tB,QAAQ5K,IAAiB,EAAG,OAAOy4B,EAQ5C,GAPAC,EAAKl3B,KAAKxB,GAONhH,KAAK4V,QAAU5V,KAAK4V,OAAOghB,QAC7Bx3B,EACAqgC,EACAz4B,EACA1G,GAEA,OAAOm/B,EAGT,IAAM,EAAWn/B,EAAQ89B,aAAah9B,IAAIkvB,GAgB1C,OAfI,GACF,EAASqB,YAAcrxB,EAAQ0pB,MAAM,EAAS2H,YAAaM,GAC3D,EAASqM,UAAYqB,GAAgB,EAASrB,UAAWA,GACzDC,EAAah9B,SAAQ,SAAA2I,GAAS,SAASq0B,aAAa97B,IAAtB,OAE9BnC,EAAQ89B,aAAa98B,IAAIgvB,EAAQ,CAC/BqB,YAAaM,EAIbqM,UAAWsB,GAAiBtB,QAAa,EAASA,EAClDC,aAAY,IAITkB,EAGT,OAAOxN,GAGD,YAAAsN,kBAAR,SACE7+B,EACAwJ,EACA5J,EACAg+B,GAJF,WAME,OAAKp0B,EAAMlD,cAA0B,OAAVtG,EAOvB,YAAQA,GACHA,EAAM8P,KAAI,SAACqX,EAAMpmB,GACtB,IAAMf,EAAQ,EAAK6+B,kBACjB1X,EAAM3d,EAAO5J,EAASg/B,GAAkBhB,EAAW78B,IAErD,OADA+9B,GAA2BlB,EAAW78B,GAC/Bf,KAIJV,KAAKq+B,oBAAoB,CAC9Bj/B,OAAQsB,EACRsG,aAAckD,EAAMlD,aACpB1G,QAAO,EACPg+B,UAAS,IAhBFp8B,QAAU,OAAArB,EAAA,GAAUH,GAASA,GAsBhC,YAAA0+B,cAAR,SASEp4B,EACA5H,EACAkB,EACAoS,QAAA,IAAAA,MAAW,YAAsBtT,EAAQ4H,EAAc1G,EAAQyG,cAE/D,IAAM84B,EAAW,IAAI3+B,IACb0vB,EAAa5wB,KAAKiL,MAAV,SAEV60B,EAAe,IAAI,KAUtB,GAsFH,OApFA,SAAUC,EAER/4B,EACAg5B,GAEA,IAAMC,EAAcH,EAAarO,OAC/BzqB,EAKAg5B,EAAiBnC,WACjBmC,EAAiBlC,UAEfmC,EAAYC,UAChBD,EAAYC,SAAU,EAEtBl5B,EAAaC,WAAW1F,SAAQ,SAAA4F,GAC9B,GAAK,YAAcA,EAAW7G,EAAQsF,WAAtC,CAEM,IAAAi4B,EAAyBmC,EAAf,WAAElC,EAAakC,EAAL,SA0B1B,GArBInC,GAAcC,IAChB,YAAgB32B,EAAUI,aAE1BJ,EAAUI,WAAWhG,SAAQ,SAAAmG,GAC3B,IAAMlF,EAAOkF,EAAIlF,KAAK9B,MAEtB,GADa,WAAT8B,IAAmBq7B,GAAa,GACvB,UAATr7B,EAAkB,CACpB,IAAM8O,EAAO,YAAyB5J,EAAKpH,EAAQsF,WAK9C0L,IAA0C,IAAjCA,EAA0B6uB,KACtCrC,GAAW,OAQf,YAAQ32B,GAAY,CACtB,IAAM6qB,EAAW6N,EAASz+B,IAAI+F,GAC1B6qB,IAIF6L,EAAaA,GAAc7L,EAAS6L,WACpCC,EAAWA,GAAY9L,EAAS8L,UAGlC+B,EAASv+B,IACP6F,EACAy2B,GAAiBt9B,EAASu9B,EAAYC,QAGnC,CACL,IAAMrwB,EAAW,YACftG,EACA7G,EAAQ82B,gBAGV,IAAK3pB,GAAYtG,EAAUP,OAAS,IAAKywB,gBACvC,MAAM,QAAI,QAAe,qBAAqB,OAAU,EAAU,KAAI,mBAGpE5pB,GACAmjB,EAAS0G,gBACP7pB,EAAUiF,EAAUtT,EAAQkB,EAAQsF,YAExCm6B,EACEtyB,EAASzG,aACT42B,GAAiBt9B,EAASu9B,EAAYC,UA7EhD,CAkFG92B,EAAc1G,GAEVu/B,GAGD,YAAAnB,YAAR,SACEJ,EACAtM,EACAC,EACA3xB,EACA8/B,G,MALF,OAOE,GAAI9B,EAAU9tB,IAAI8J,OAAS,YAAY2X,GAAW,CAChD,IA6BI,EA7BE,EAIH,YAAQA,KAIR,YAAYD,KAAa,YAAwBA,QACrC,EAAXA,EAKE,EAAIC,EAMN,IAAMmO,IACRA,EAAiB,CAAC,YAAY,GAAK,EAAEzwB,MAAQ,IAU/C,IAAM,EAAW,SACfhN,EACAH,GAEA,OAAO,YAAQG,GACM,kBAATH,EAAoBG,EAAKH,QAAQ,EACzClC,EAAQkyB,MAAMxB,cAAcruB,EAAMiN,OAAOpN,KAG/C87B,EAAU9tB,IAAIjP,SAAQ,SAACo9B,EAAWzN,GAChC,IAAMmP,EAAO,EAAS,EAAGnP,GACnBoP,EAAO,EAAS,EAAGpP,GAEzB,QAAI,IAAWoP,EAAf,CACIF,GACFA,EAAe53B,KAAK0oB,GAEtB,IAAMqP,EAAO,EAAK7B,YAChBC,EACA0B,EACAC,EACAhgC,EACA8/B,GAEEG,IAASD,IACX,EAAgB,GAAiB,IAAIp/B,KACvBI,IAAI4vB,EAAgBqP,GAEhCH,GACF,YAAUA,EAAeI,QAAUtP,OAInC,IAEFe,EAAY,YAAQ,GAAK,EAAE5wB,MAAM,GAAI,eAAM,GAC3C,EAAcE,SAAQ,SAACb,EAAO8B,GAC3ByvB,EAAiBzvB,GAAQ9B,MAKhC,OAAI49B,EAAUpzB,KACLlL,KAAKiL,MAAM2lB,SAASwM,iBACzBpL,EACAC,EACAqM,EAAUpzB,KACV5K,EACA8/B,IAAkB,EAAA9/B,EAAQkyB,OAAMG,WAAU,QAAIyN,IAI3CnO,GAEX,EAnkBA,GAqkBMwO,GAAkC,GAExC,SAASnB,GACP,EACA98B,G,IADEgO,EAAG,MAML,OAHKA,EAAIrP,IAAIqB,IACXgO,EAAIlP,IAAIkB,EAAMi+B,GAAmBD,OAAS,CAAEhwB,IAAK,IAAItP,MAEhDsP,EAAIpP,IAAIoB,GAGjB,SAASm9B,GACPe,EACAC,GAEA,GAAID,IAASC,IAAUA,GAASf,GAAiBe,GAAQ,OAAOD,EAChE,IAAKA,GAAQd,GAAiBc,GAAO,OAAOC,EAE5C,IAAMz1B,EAAOw1B,EAAKx1B,MAAQy1B,EAAMz1B,KAAM,2BACjCw1B,EAAKx1B,MACLy1B,EAAMz1B,MACPw1B,EAAKx1B,MAAQy1B,EAAMz1B,KAEjB01B,EAAkBF,EAAKlwB,IAAI8J,MAAQqmB,EAAMnwB,IAAI8J,KAI7C9V,EAAS,CAAE0G,KAAI,EAAEsF,IAHXowB,EAAkB,IAAI1/B,IAChCw/B,EAAKlwB,IAAI8J,KAAOomB,EAAKlwB,IAAMmwB,EAAMnwB,KAInC,GAAIowB,EAAiB,CACnB,IAAM,EAAqB,IAAI5+B,IAAI2+B,EAAMnwB,IAAI5O,QAE7C8+B,EAAKlwB,IAAIjP,SAAQ,SAACs/B,EAAUh/B,GAC1B2C,EAAOgM,IAAIlP,IACTO,EACA89B,GAAgBkB,EAAUF,EAAMnwB,IAAIpP,IAAIS,KAE1C,EAAmByK,OAAOzK,MAG5B,EAAmBN,SAAQ,SAAAM,GACzB2C,EAAOgM,IAAIlP,IACTO,EACA89B,GACEgB,EAAMnwB,IAAIpP,IAAIS,GACd6+B,EAAKlwB,IAAIpP,IAAIS,QAMrB,OAAO2C,EAGT,SAASo7B,GAAiBjI,GACxB,OAAQA,KAAUA,EAAKzsB,MAAQysB,EAAKnnB,IAAI8J,MAG1C,SAASklB,GACP,EACAh9B,G,IADEgO,EAAG,MAGCmuB,EAAYnuB,EAAIpP,IAAIoB,GACtBm8B,GAAaiB,GAAiBjB,KAChC8B,GAAmBj4B,KAAKm2B,GACxBnuB,EAAIlE,OAAO9J,IAIf,IAAMy8B,GAAW,IAAIj9B,ICjtBrB,mBAwBE,WAAY8G,QAAA,IAAAA,MAAA,IAAZ,MACE,cAAO,K,OApBD,EAAAg4B,QAAU,IAAI9+B,IAGd,EAAA++B,sBAAwB,IAAI7/B,IAcpB,EAAAuK,QAAU,IA+UlB,EAAAu1B,QAAU,EA3UhB,EAAKl4B,OAAS,YAAgBA,GAC9B,EAAK0sB,cAAgB,EAAK1sB,OAAO0sB,YAEjC,EAAK5E,SAAW,IAAI,GAAS,CAC3B3lB,MAAO,EACPqvB,iBAAkB,EAAKxxB,OAAOwxB,iBAC9BE,cAAe,EAAK1xB,OAAO0xB,cAC3BP,aAAc,EAAKnxB,OAAOmxB,eAG5B,EAAK3R,O,EAqfT,OAzhBmC,iBAuCzB,YAAAA,KAAR,WAIE,IAAM2Y,EAAYjhC,KAAK2D,KAAO,IAAI,EAAYixB,KAAK,CACjDhE,SAAU5wB,KAAK4wB,SACfwD,cAAep0B,KAAK8I,OAAOsrB,gBAQ7Bp0B,KAAKkhC,eAAiBD,EAAU3M,MAEhCt0B,KAAKmhC,oBAGC,YAAAA,iBAAR,SAAyBC,GAAzB,WACQC,EAAiBrhC,KAAKshC,YACpBl0B,EAAcpN,KAAK8I,OAAV,UAKjB9I,KAAKuhC,YAAc,IAAI,GACrBvhC,KACAA,KAAKshC,YAAc,IAAI,EAAY,CACjCr2B,MAAOjL,KACPw1B,YAAax1B,KAAKw1B,YAClBU,mBAAoBl2B,KAAK8I,OAAOotB,mBAChCtN,gBAAiB,YAAsB5oB,KAAK8I,QAC5C2sB,MAAO2L,OACH,EACAC,GAAkBA,EAAe5L,MACrCroB,UAAS,IAEXA,GAGFpN,KAAKwhC,oBAAsB,aAAK,SAC9BzS,EACAjsB,GAEA,OAAO,EAAK2+B,eAAe1S,EAAGjsB,KAC7B,CACDmzB,IAAKj2B,KAAK8I,OAAOotB,mBACjBvC,aAAc,SAAC5E,GAGb,IAAMyD,EAAQzD,EAAExM,WAAa,EAAK2e,eAAiB,EAAKv9B,KACxD,GAAIuwB,EAAsB1B,GAAQ,CACxB,IAAAjQ,EAA8BwM,EAApB,WAAErf,EAAkBqf,EAAhB,GAAEnpB,EAAcmpB,EAAL,UACjC,OAAOyD,EAAMmB,aACX5E,EAAE1qB,MAOF0qB,EAAEnS,SACF,YAAmB,CAAE2F,WAAU,EAAE7S,GAAE,EAAE9J,UAAS,SAStD,IAAI5D,IAAI,CACNhC,KAAK2D,KAAKktB,MACV7wB,KAAKkhC,eAAerQ,QACnBtvB,SAAQ,SAAAsvB,GAAS,OAAAA,EAAA,mBAGf,YAAAxB,QAAP,SAAe1rB,GAMb,OALA3D,KAAKsoB,OAID3kB,GAAM3D,KAAK2D,KAAKyL,QAAQzL,GACrB3D,MAGF,YAAAkuB,QAAP,SAAe3L,GACb,YADa,IAAAA,OAAA,IACLA,EAAaviB,KAAKkhC,eAAiBlhC,KAAK2D,MAAMuqB,WAGjD,YAAArY,KAAP,SAAe/S,GASX,MACEA,EADuB,kBAAzByd,OAAiB,IAAG,GAAK,EAE3B,IACE,OAAOvgB,KAAKshC,YAAY9K,sBAAqB,2BACxC1zB,GAAO,CACV0vB,MAAO1vB,EAAQyf,WAAaviB,KAAKkhC,eAAiBlhC,KAAK2D,KACvDmF,OAAQ9I,KAAK8I,OACbyX,kBAAiB,KAChBnhB,QAAU,KACb,MAAOQ,GACP,GAAIA,aAAa,EAMf,OAAO,KAET,MAAMA,IAIH,YAAAywB,MAAP,SAAavtB,GACX,IAEE,QADE9C,KAAKghC,QACAhhC,KAAKuhC,YAAYtD,aAAaj+B,KAAK2D,KAAMb,GAFlD,UAIS9C,KAAKghC,UAAiC,IAAtBl+B,EAAQgJ,WAC7B9L,KAAKuM,qBAKJ,YAAA4jB,OAAP,SAAcrtB,GACZ,GAAI,IAAO7B,KAAK6B,EAAS,QAAUA,EAAQ4M,GAUzC,OAAO,EAET,IAAM8iB,EAAQ1vB,EAAQyf,WAClBviB,KAAKkhC,eACLlhC,KAAK2D,KACT,IAEE,QADE3D,KAAKghC,QACAxO,EAAMrC,OAAOrtB,EAAQ4M,IAAM,aAAc5M,EAAQyN,QAF1D,UAISvQ,KAAKghC,UAAiC,IAAtBl+B,EAAQgJ,WAC7B9L,KAAKuM,qBAKJ,YAAA8T,KAAP,SACEvd,GAEA,OAAO9C,KAAKshC,YAAY9K,sBAAqB,2BACxC1zB,GAAO,CACV0vB,MAAO1vB,EAAQyf,WAAaviB,KAAKkhC,eAAiBlhC,KAAK2D,KACvDysB,OAAQttB,EAAQ4M,IAAM,aACtB5G,OAAQ9I,KAAK8I,WAIV,YAAA4Z,MAAP,SACEA,GADF,WAoBE,OAjBK1iB,KAAK8gC,QAAQxmB,MAWhB,YAAYta,MAEdA,KAAK8gC,QAAQr+B,IAAIigB,GACbA,EAAMgf,WACR1hC,KAAKwhC,oBAAoB9e,GAEpB,WAID,EAAKoe,QAAQx0B,OAAOoW,KAAW,EAAKoe,QAAQxmB,MAC9C,YAAY,GAKd,EAAKknB,oBAAoBG,OAAOjf,KAI7B,YAAAwN,GAAP,SAAUptB,GASR,IAAmBoB,QACnB,IAAMovB,EAAMtzB,KAAKkhC,eAAehR,KAQhC,OAPIptB,IAAY9C,KAAKghC,UACfl+B,EAAQq+B,iBACVnhC,KAAKmhC,iBAAiBr+B,EAAQs+B,uBACrBt+B,EAAQs+B,uBACjBphC,KAAKshC,YAAY/K,cAGdjD,GAUF,YAAAF,OAAP,SAAchD,EAAgB7N,GAC5B,OAAQA,EAAaviB,KAAKkhC,eAAiBlhC,KAAK2D,MAAMyvB,OAAOhD,IAQxD,YAAAiD,QAAP,SAAejD,EAAgB7N,GAC7B,OAAQA,EAAaviB,KAAKkhC,eAAiBlhC,KAAK2D,MAAM0vB,QAAQjD,IASzD,YAAAnK,SAAP,SAAgBgK,GACd,GAAI,YAAYA,GAAS,OAAOA,EAAOtgB,MACvC,IACE,OAAO3P,KAAK4wB,SAAS3K,SAASgK,GAAQ,GACtC,MAAOrwB,GACP,SAAU,IAAQ,UAIf,YAAAizB,MAAP,SAAa/vB,GACX,IAAKA,EAAQ4M,GAAI,CACf,GAAI,IAAOzO,KAAK6B,EAAS,MAGvB,OAAO,EAETA,EAAU,OAAH,IAAG,CAAH,eAAQA,GAAO,CAAE4M,GAAI,eAE9B,IASE,QAJE1P,KAAKghC,QAIAhhC,KAAKkhC,eAAerO,MAAM/vB,EAAS9C,KAAK2D,MATjD,UAWS3D,KAAKghC,UAAiC,IAAtBl+B,EAAQgJ,WAC7B9L,KAAKuM,qBAKJ,YAAArI,MAAP,SAAapB,GAAb,WAqBE,OApBA9C,KAAKsoB,OAEL,IAAmBpkB,QAEfpB,GAAWA,EAAQ8rB,gBAGrB5uB,KAAK8gC,QAAQv/B,SAAQ,SAAAmhB,GAAS,SAAK8e,oBAAoBG,OAAzB,MAC9B3hC,KAAK8gC,QAAQ70B,QACb,YAAYjM,OAQZA,KAAKuM,mBAGA+I,QAAQC,WAGV,YAAAqsB,iBAAP,SAAwBC,GACtB,IAAMC,EAAoB9hC,KAAKkhC,eAAevM,YAAYkN,GACtDC,IAAsB9hC,KAAKkhC,iBAC7BlhC,KAAKkhC,eAAiBY,EACtB9hC,KAAKuM,qBAMF,YAAA6V,MAAP,SACEtf,GADF,IAUM6sB,EAVN,OAIItN,EAIEvf,EAJI,OACN,EAGEA,EAHe,WAAjByf,OAAU,IAAG,GAAI,EACjBqf,EAEE9+B,EAFc,iBAChB,EACEA,EADY,eAIVi/B,EAAU,SAACC,GACT,MAA2B,EAAzBr+B,EAAI,OAAEu9B,EAAc,mBAC1B,EAAKF,QACHgB,IACF,EAAKr+B,KAAO,EAAKu9B,eAAiBc,GAEpC,IACE,OAAOrS,EAAetN,EAAO,GAD/B,UAGI,EAAK2e,QACP,EAAKr9B,KAAOA,EACZ,EAAKu9B,eAAiBA,IAIpBe,EAAe,IAAIjgC,IAwEzB,OAtEI,IAAmBhC,KAAKghC,SAU1BhhC,KAAKuM,iBAAgB,2BAChBzJ,GAAO,CACV2f,eAAc,SAACC,GAEb,OADAuf,EAAax/B,IAAIigB,IACV,MAKa,kBAAfH,EAITviB,KAAKkhC,eAAiBlhC,KAAKkhC,eAAe1M,SAASjS,EAAYwf,IACvC,IAAfxf,EAMTwf,EAAQ/hC,KAAK2D,MAIbo+B,IAG8B,kBAArBH,IACT5hC,KAAKkhC,eAAiBlhC,KAAKkhC,eAAevM,YAAYiN,IAMpD,GAAkBK,EAAa3nB,MACjCta,KAAKuM,iBAAgB,2BAChBzJ,GAAO,CACV2f,eAAc,SAACC,EAAOrC,GACpB,IAAMjhB,EAAS,EAAe6B,KAAKjB,KAAM0iB,EAAOrC,GAOhD,OANe,IAAXjhB,GAIF6iC,EAAa31B,OAAOoW,GAEftjB,MAKP6iC,EAAa3nB,MACf2nB,EAAa1gC,SAAQ,SAAAmhB,GAAS,SAAK8e,oBAAoB31B,MAAzB,OAMhC7L,KAAKuM,iBAAiBzJ,GAGjB6sB,GAGF,YAAAxF,mBAAP,SACE9H,EACAuN,GAEA,OAAO5vB,KAAKoiB,MAAM,CAChBC,OAAM,EACNE,WAAYqN,GAAkC,OAAjBA,KAI1B,YAAAG,kBAAP,SAAyBrlB,GACvB,GAAI1K,KAAKw1B,YAAa,CACpB,IAAIp2B,EAASY,KAAK+gC,sBAAsB3/B,IAAIsJ,GAS5C,OARKtL,IACHA,EAAS,YAAsBsL,GAC/B1K,KAAK+gC,sBAAsBz/B,IAAIoJ,EAAUtL,GAIzCY,KAAK+gC,sBAAsBz/B,IAAIlC,EAAQA,IAElCA,EAET,OAAOsL,GAGF,YAAAslB,iBAAP,SAAwBtlB,GACd,IAAA0C,EAAcpN,KAAK8I,OAAV,UACjB,OAAOsE,EACHA,EAAUyS,UAAUnV,GACpBA,GAGI,YAAA6B,iBAAV,SAA2BzJ,GAA3B,WACO9C,KAAKghC,SACRhhC,KAAK8gC,QAAQv/B,SAAQ,SAAAwtB,GAAK,SAAKyS,oBAAoBzS,EAAzB,OAUtB,YAAA0S,eAAR,SACE1S,EACAjsB,GAEQ,IAAAylB,EAAawG,EAAL,SAQV1O,EAAOrgB,KAAKqgB,KAAU0O,GAExBjsB,IACEisB,EAAExM,YAC4B,kBAAvBzf,EAAQyf,aACjBlC,EAAK4I,2BAA4B,GAG/BnmB,EAAQ2f,iBACiD,IAAzD3f,EAAQ2f,eAAexhB,KAAKjB,KAAM+uB,EAAG1O,EAAMkI,KAO5CA,GAAa,YAAMA,EAASnpB,OAAQihB,EAAKjhB,SAC5C2vB,EAAEnS,SAASmS,EAAExG,SAAWlI,EAAMkI,IAGpC,EAzhBA,CAAmC,I,sIC5B/B2Z,GAA6B,EAmBpBC,EANI,EAAK,sBAM+C,SACnEjjC,EACAkjC,EACAC,GAMA,IAAM3hC,EAAQ0hC,IAGZlgC,UACCggC,GAGDxhC,IAAU0hC,MAEVF,GAA6B,EAE7B,SAAU,IACR,+EAkBE,MAAwB,WAAe,CAACI,KAAM,CAAC5hC,MAAK,EAAE0hC,YAAW,KAA/DE,EAAI,UAAGr9B,EAAW,KAiD1B,OA5CIwQ,EAAA,EAKF,mBAAsB,WACpBlV,OAAOC,OAAO8hC,EAAM,CAAE5hC,MAAK,EAAE0hC,YAAW,IAKpCG,EAAuBD,IAEzBr9B,EAAY,CAACq9B,KAAI,MAElB,CAACpjC,EAAWwB,EAAO0hC,IAEtB7hC,OAAOC,OAAO8hC,EAAM,CAAE5hC,MAAK,EAAE0hC,YAAW,IAG1C,aAAgB,WASd,OANIG,EAAuBD,IAEzBr9B,EAAY,CAACq9B,KAAI,IAIZpjC,GAAU,WAQXqjC,EAAuBD,IAEzBr9B,EAAY,CAACq9B,KAAI,SAGpB,CAACpjC,IAEGwB,GAGT,SAAS6hC,EAAiC,G,IACxC7hC,EAAK,QACL0hC,EAAW,cAKX,IACE,OAAO1hC,IAAU0hC,IACjB,SACA,OAAO,G,0FCzFP,EAEA7hC,OAAM,UAFQ,eAIZ,SAAUmE,EAIdL,EACAvB,GAEA,YAFA,IAAAA,MAA+CvC,OAAOmB,OAAO,OAEtD8gC,EACL,OAAAC,EAAA,GAAgB3/B,EAAQ5C,QACxBmE,GACAK,SAAS5B,GAGP,SAAU0/B,EACdtiC,EACAmE,GAEA,IAAMq+B,EAAW,mBAEdA,EAASp/B,SACVpD,IAAWwiC,EAASp/B,QAAQpD,QAC5BmE,IAAUq+B,EAASp/B,QAAQe,QAE3Bq+B,EAASp/B,QAAU,IAAI,EAAcpD,EAAQmE,EAAOq+B,EAASp/B,UAE/D,IAAMwqB,EAAQ4U,EAASp/B,QAQjB,EAAmB,mBAAS,GAApBq/B,GAAF,KAAS,MAKrB,OAJA7U,EAAM7oB,YAAc,WAClB09B,GAAQ,SAAAC,GAAQ,OAAAA,EAAA,MAGX9U,EAGT,iBACE,WACkB5tB,EACAmE,EAChB0N,GAFgB,KAAA7R,SACA,KAAAmE,QA2BV,KAAAw+B,gBAAkB,IAAI7gC,IAItB,KAAA8gC,oBAAsB,IAAKrtB,EAAA,EAAgBstB,QAAU/gC,KAqPrD,KAAAghC,kBAAoB,OAAAtgC,EAAA,GAAgB,CAC1CO,SAAS,EACTU,UAAM,EACNlE,WAAO,EACPiN,cAAe,IAAczJ,UAGvB,KAAAggC,kBAAoB,OAAAvgC,EAAA,GAAgB,CAC1CO,SAAS,EACTU,UAAM,EACNlE,WAAO,EACPiN,cAAe,IAAcyT,QAkKvB,KAAA+iB,mBAAqB,IAAKztB,EAAA,EAAgB1K,QAAU7J,KA9b1D,YAAmBmD,EAAO,IAAa8+B,OAIvC,IAAMC,EAAiBrxB,GAAYA,EAAS3S,OACtCikC,EAAeD,GAAkBA,EAAez/B,KAClD0/B,IACFrjC,KAAKqjC,aAAeA,GA6e1B,OAzeE,YAAAp+B,YAAA,WAEE,SAAU,IAAK,2EAGjB,YAAAK,YAAA,sBACE,OAAO,IAAIgQ,SAAwC,SAAAC,GACjD,EAAKstB,gBAAgBpgC,IAAI8S,GACzB,EAAKutB,oBAAoBrgC,IAAI,EAAK6gC,mBAClC,EAAKr+B,kBAeT,YAAAP,SAAA,SAAS5B,GAAT,WAQE9C,KAAKujC,eAAiB,qBAAW,eAAoBA,eAErDvjC,KAAKwjC,WAAW1gC,GAEhB,IAAMgiB,EAAW9kB,KAAKyjC,qBAEhBrkC,EAAS+iC,EACb,uBAAY,WACV,GAAI,EAAKoB,eACP,OAAO,aAGT,IAAMG,EAAS,WACb,IAAMN,EAAiB,EAAKhkC,OAItBA,EAAS0lB,EAAS/E,mBAGtBqjB,GACAA,EAAengC,UAAY7D,EAAO6D,SAClCmgC,EAAe12B,gBAAkBtN,EAAOsN,eACxC,YAAM02B,EAAez/B,KAAMvE,EAAOuE,OAKpC,EAAKT,UAAU9D,IAwCbqG,EAAeqf,EAAS5lB,UAAUwkC,GArCtB,SAAV5jC,EAAWL,GACf,IAAM0f,EAAO2F,EAAQ,KACrBrf,EAAa5F,cAQb,IACEilB,EAAS5D,mBACTzb,EAAeqf,EAAS5lB,UAAUwkC,EAAQ5jC,GAF5C,QAIEglB,EAAQ,KAAW3F,EAGrB,IAAK,EAAele,KAAKxB,EAAO,iBAE9B,MAAMA,EAGR,IAAM2jC,EAAiB,EAAKhkC,SAEzBgkC,GACAA,GAAkBA,EAAengC,UACjC,YAAMxD,EAAO2jC,EAAe3jC,SAE7B,EAAKyD,UAAU,CACbS,KAAOy/B,GAAkBA,EAAez/B,KACxClE,MAAOA,EACPwD,SAAS,EACTyJ,cAAe,IAAcjN,WAOnC,OAAO,WAAM,OAAAgG,EAAA,iBACZ,CAODqf,EACA9kB,KAAKujC,eACLvjC,KAAKE,OAAO+sB,yBAGd,WAAM,+BACN,WAAM,+BAIRjtB,KAAK2jC,2BAA2BvkC,GAEhC,IAAMmG,EAAcvF,KAAK4jC,cAAcxkC,GAOvC,OALKmG,EAAYtC,SAAWjD,KAAK6iC,gBAAgBvoB,OAC/Cta,KAAK6iC,gBAAgBthC,SAAQ,SAAAgU,GAAW,OAAAA,EAAA,MACxCvV,KAAK6iC,gBAAgB52B,SAGhB1G,GAWD,YAAAi+B,WAAR,SACE1gC,G,MAEMwgC,EAAoBtjC,KAAK6jC,wBAC7B7jC,KAAK8jC,iBAAmBhhC,GAMpBihC,EAA2B/jC,KAAKsjC,mBASpCtjC,KAAK8iC,oBAAoB3hC,IAAI4iC,IAC5B,YAAMT,EAAmBS,KAE1B/jC,KAAKsjC,kBAAoBA,EAErBS,GAA4B/jC,KAAK6E,aAMnC7E,KAAK8iC,oBAAoBx2B,OAAOy3B,GAUhC/jC,KAAK6E,WAAWua,UAAUpf,KAAKgkC,sBAK/BhkC,KAAKqjC,cAA0B,QAAX,EAAArjC,KAAKZ,cAAM,eAAEuE,OAAQ3D,KAAKqjC,aAC9CrjC,KAAKZ,YAAS,IAUlBY,KAAKgE,YAAclB,EAAQkB,aAAeigC,EAAcrjC,UAAUoD,YAClEhE,KAAKF,QAAUgD,EAAQhD,SAAWmkC,EAAcrjC,UAAUd,SAGvDE,KAAKujC,iBAAkBvjC,KAAKE,OAAO+sB,wBACN,IAA9BjtB,KAAK8jC,iBAAiBI,KACrBlkC,KAAK8jC,iBAAiBn/B,KAMvB3E,KAAK8jC,iBAAiBn/B,MACiB,YAAvC3E,KAAKsjC,kBAAkBl+B,YAYvBpF,KAAKZ,OAASY,KAAKijC,kBAEnBjjC,KAAKZ,SAAWY,KAAKgjC,mBACrBhjC,KAAKZ,SAAWY,KAAKijC,oBAErBjjC,KAAKZ,YAAS,GApBdY,KAAKZ,OAASY,KAAKgjC,mBAwBf,YAAAgB,mBAAR,WACE,IAAM1K,EAEF,GAEE6K,EAAiBnkC,KAAKE,OAAO6S,eAAema,WAsBlD,OArBIiX,GAAgB7K,EAAQ9wB,KAAK27B,GAE7BnkC,KAAK8jC,iBAAiB/wB,gBACxBumB,EAAQ9wB,KAAKxI,KAAK8jC,iBAAiB/wB,gBAarCumB,EAAQ9wB,KAAK,OAAAmF,EAAA,GACX3N,KAAK6E,YAAc7E,KAAK6E,WAAW/B,QACnC9C,KAAKsjC,oBAGAhK,EAAQpnB,OACbic,EAAA,IAmBI,YAAA0V,wBAAR,SAAgC,G,WAAA,cAC9B,IAAAl/B,EAAI,OAQDy/B,GAPA,MACQ,cACJ,UACO,iBAIC,cATe,0DAaxBd,EACJ/iC,OAAOC,OAAO4jC,EAAc,CAAE//B,MAAOrE,KAAKqE,QAkB5C,IAfErE,KAAKujC,gBAE+B,iBAAlCD,EAAkBl+B,aACgB,sBAAlCk+B,EAAkBl+B,cAKpBk+B,EAAkBl+B,YAAc,eAG7Bk+B,EAAkB19B,YACrB09B,EAAkB19B,UAAY,IAG5BjB,EAAM,CAEN,MAEE2+B,EAFwC,YAA1Cl+B,OAAW,IAAG,EAAApF,KAAK8E,wBAAuB,EAC1C,EACEw+B,EAD8B,mBAAhC1+B,OAAkB,IAAG,EAAAQ,EAAW,EAMlC7E,OAAOC,OAAO8iC,EAAmB,CAC/B1+B,mBAAkB,EAClBQ,YAAa,iBAELk+B,EAAkBl+B,cAC5Bk+B,EAAkBl+B,aACD,QAAf,EAAApF,KAAK6E,kBAAU,eAAE/B,QAAQ8B,qBACzB5E,KAAK8E,yBAGT,OAAOw+B,GAGT,YAAAx+B,sBAAA,W,QACE,OACsC,QAApC,EAAA9E,KAAK8jC,iBAAiB/wB,sBAAc,eAAE3N,eACD,QAArC,EAAApF,KAAKE,OAAO6S,eAAema,kBAAU,eAAE9nB,cACvC,eAOI,YAAApB,YAAR,SAAoBL,KACZ,YAAA7D,QAAR,SAAgBL,KAQR,YAAAgkC,mBAAR,WAIE,IAAM3e,EAAW9kB,KAAK6E,WACpB7E,KAAKujC,gBACAvjC,KAAKujC,eAAec,iBAAiBrkC,KAAKsjC,oBAC1CtjC,KAAK6E,YACL7E,KAAKE,OAAOgtB,WAAWltB,KAAKgkC,sBAEnChkC,KAAKskC,eAAiB,mBAAQ,WAAM,MAAC,CACnCjjB,QAASyD,EAASzD,QAAQ7hB,KAAKslB,GAC/B1F,UAAW0F,EAAS1F,UAAU5f,KAAKslB,GACnCpD,UAAWoD,EAASpD,UAAUliB,KAAKslB,GACnCxC,YAAawC,EAASxC,YAAY9iB,KAAKslB,GACvC3B,aAAc2B,EAAS3B,aAAa3jB,KAAKslB,GACzCzB,YAAayB,EAASzB,YAAY7jB,KAAKslB,GACvCjC,gBAAiBiC,EAASjC,gBAAgBrjB,KAAKslB,MAC7C,CAACA,IAEL,IAAMyf,KAC0B,IAA9BvkC,KAAK8jC,iBAAiBI,KACtBlkC,KAAK8jC,iBAAiBn/B,MAYxB,OATI3E,KAAKujC,gBAAkBgB,IACzBvkC,KAAKujC,eAAeiB,sBAAsB1f,GAEtCA,EAAS/E,mBAAmB9c,SAE9BjD,KAAKujC,eAAekB,0BAA0B3f,IAI3CA,GAQD,YAAA5hB,UAAR,SAAkBwhC,GAChB,IAAMtB,EAAiBpjC,KAAKZ,OACxBgkC,GAAkBA,EAAez/B,OACnC3D,KAAKqjC,aAAeD,EAAez/B,MAErC3D,KAAKZ,OAASslC,EAGd1kC,KAAKiF,cACLjF,KAAK2kC,uBAAuBD,IAGtB,YAAAC,uBAAR,SAA+BvlC,GAA/B,WACOA,EAAO6D,SAEVqS,QAAQC,UAAUzR,MAAK,WACjB1E,EAAOK,MACT,EAAKK,QAAQV,EAAOK,OACXL,EAAOuE,MAChB,EAAKK,YAAY5E,EAAOuE,SAEzBM,OAAM,SAAAxE,GACP,SAAU,IAAW,KAAC,OAKpB,YAAAsgB,iBAAR,WASE,OALK/f,KAAKZ,QACRY,KAAK2kC,uBACH3kC,KAAKZ,OAASY,KAAK6E,WAAWkb,oBAG3B/f,KAAKZ,QAWd,YAAAwkC,cAAA,SACExkC,GAEA,IAAImG,EAAcvF,KAAKkjC,mBAAmB9hC,IAAIhC,GAC9C,GAAImG,EAAa,OAAOA,EAEhB,IAAA5B,EAA2CvE,EAAvC,KAAcwlC,GAAyBxlC,EAA9B,QAAyB,YAAKA,EAA7C,qBAoBN,OAnBAY,KAAKkjC,mBAAmB5hC,IAAIlC,EAAQmG,EAAc,OAAH,IAAG,CAAH,yBAC7C5B,KAAI,GACDihC,GACA5kC,KAAKskC,gBAAc,CACtBpkC,OAAQF,KAAKE,OACb2E,WAAY7E,KAAK6E,WACjBe,UAAW5F,KAAK6E,WAAWe,UAC3B5C,QAAShD,KAAK8jC,iBAAiBn/B,KAC/B0+B,aAAcrjC,KAAKqjC,iBAGhB99B,EAAY9F,OAAS,YAAgBL,EAAOC,UAK/CkG,EAAY9F,MAAQ,IAAI,IAAY,CAAEH,cAAeF,EAAOC,UAGvDkG,GAGD,YAAAo+B,2BAAR,SAAmCvkC,IAO/BA,EAAOohB,UACPxgB,KAAK8jC,iBAAiBrjB,gBACrBrhB,EAAO6D,SACN7D,EAAOuE,MAA4C,IAApCpD,OAAOqB,KAAKxC,EAAOuE,MAAMI,QACF,eAAxC/D,KAAK6E,WAAW/B,QAAQsC,cAExB7E,OAAOC,OAAOpB,EAAQ,CACpB6D,SAAS,EACTyJ,cAAe,IAAc2U,UAE/BrhB,KAAK6E,WAAWwc,YAGtB,EA1fA,I,uFCpEM,SAAU8M,EAGd0W,EACA/hC,GAEA,OAAO,YAAQ+hC,EAAU/hC,EAASA,EAAQ8C,WAAa,CACrDA,UAAW,OAAF,IAAE,CAAF,eACHi/B,GAAYA,EAASj/B,WACtB9C,EAAQ8C,e,qJCoDjB,IAsJIk/B,EACAC,EAvJJ,0BAGU,KAAAC,MAAQ,IAAK,IAAgBjC,QAAU/gC,KAGvC,KAAAijC,KAAO,IAAI,IAIhB,KAQK,KAAAC,OAAS,IAAIn6B,QAiGb,KAAAo6B,WAAa,IAAIjkC,IAGT,KAAA8rB,MAAQhtB,KAAK81B,MAAM,IACrC,OA3GS,YAAAe,QAAP,SAAen2B,GACb,OAAO,YAAgBA,IAAUV,KAAKglC,MAAM7jC,IAAIT,IAO3C,YAAAy2B,KAAP,SAAYz2B,GACV,GAAI,YAAgBA,GAAQ,CAC1B,IAAMyR,EAtFZ,SAAwBzR,GACtB,OAAI,YAAgBA,GACX,YAAQA,GACXA,EAAMW,MAAM,GACb,aAAGiO,UAAW/O,OAAOoB,eAAejB,IAAWA,GAE7CA,EAgFU0kC,CAAY1kC,GAEzB,OADAV,KAAKklC,OAAO5jC,IAAI6Q,EAAMzR,GACfyR,EAET,OAAOzR,GAKF,YAAAo1B,MAAP,SAAap1B,GAAb,WACE,GAAI,YAAgBA,GAAQ,CAC1B,IAAMunB,EAAWjoB,KAAKklC,OAAO9jC,IAAIV,GACjC,GAAIunB,EAAU,OAAOA,EAGrB,OADc1nB,OAAOoB,eAAejB,IAElC,KAAKsL,MAAMpL,UACT,GAAIZ,KAAKglC,MAAM7jC,IAAIT,GAAQ,OAAOA,EAClC,IAAM6F,EAAgB7F,EAAgB8P,IAAIxQ,KAAK81B,MAAO91B,MActD,OAVMmI,EAAOnI,KAAKilC,KAAKpR,YAAYttB,IACzBA,QACRvG,KAAKglC,MAAMviC,IAAI0F,EAAK5B,MAAQA,GAIxBrE,SACF3B,OAAO6B,OAAOmE,IAGX4B,EAAK5B,MAGd,KAAK,KACL,KAAKhG,OAAOK,UACV,GAAIZ,KAAKglC,MAAM7jC,IAAIT,GAAQ,OAAOA,EAClC,IAAM,EAAQH,OAAOoB,eAAejB,GAC9B,EAAQ,CAAC,GACTkB,EAAO5B,KAAKqlC,WAAW3kC,GAC7B,EAAM8H,KAAK5G,EAAK0jC,MAChB,IAYMn9B,EAZA,EAAkB,EAAMpE,OAa9B,GAZAnC,EAAK2jC,OAAOhkC,SAAQ,SAAAM,GAClB,EAAM2G,KAAK,EAAKstB,MAAOp1B,EAAcmB,UAUjCsG,EAAOnI,KAAKilC,KAAKpR,YAAY,IACzB5D,OAAQ,CAChB,IAAM,EAAM9nB,EAAK8nB,OAAS1vB,OAAOmB,OAAO,GACxC1B,KAAKglC,MAAMviC,IAAI,GACfb,EAAK2jC,OAAOhkC,SAAQ,SAACM,EAAKJ,GACxB,EAAII,GAAO,EAAM,EAAkBJ,MAKjCS,SACF3B,OAAO6B,OAAO,GAGlB,OAAO+F,EAAK8nB,QAIlB,OAAOvvB,GAOD,YAAA2kC,WAAR,SAAmBpjC,GACjB,IAAML,EAAOrB,OAAOqB,KAAKK,GACnBkG,EAAOnI,KAAKilC,KAAKpR,YAAYjyB,GACnC,IAAKuG,EAAKvG,KAAM,CACdA,EAAK4P,OACL,IAAM8zB,EAAOtzB,KAAKP,UAAU7P,IACtBuG,EAAKvG,KAAO5B,KAAKmlC,WAAW/jC,IAAIkkC,KACpCtlC,KAAKmlC,WAAW7jC,IAAIgkC,EAAMn9B,EAAKvG,KAAO,CAAE2jC,OAAQ3jC,EAAM0jC,KAAI,IAG9D,OAAOn9B,EAAKvG,MAQhB,EAvHA,GAiIa4jC,EAAqBjlC,OAAOC,QAAO,SAAUE,GACxD,GAAI,YAAgBA,GAAQ,MACH,IAAnBokC,GACFW,IAEF,IAAMC,EAAYZ,EAAehP,MAAMp1B,GACnC4kC,EAAOP,EAAe3jC,IAAIskC,GAO9B,YANa,IAATJ,GACFP,EAAezjC,IACbokC,EACAJ,EAAOtzB,KAAKP,UAAUi0B,IAGnBJ,EAET,OAAOtzB,KAAKP,UAAU/Q,KACrB,CACDwD,MAAOuhC,IAOT,SAASA,IACPX,EAAiB,IAAIa,EACrBZ,EAAiB,IAAK,IAAgBh6B,QAAU7J,O,gCCrO5C,SAAUyN,EAASi3B,GACvB,IAAM,OAAOA,IAAU,WADzB,mC,+BCUA,wdAkBkBC,EACdtlC,OAAOK,UADa,eAGlB,SAAUklC,EAAUplC,GACxB,OAAiB,OAAVA,QAA4B,IAAVA,EAGpB,IAAMkM,EAAmDZ,MAAMY,QAEhE,SAAUm5B,EACd,EACAzlC,G,IADEkS,EAAU,aAAE9C,EAAE,KAAEs2B,EAAG,MAGrB,GAA0B,kBAAfxzB,IACLlS,IACFA,EAAQ63B,UACL2N,EAAUp2B,GACVo2B,EAAUE,QACX,EADkB,CAAEA,IAAG,GADN,CAAEt2B,GAAE,IAMrBo2B,EAAUp2B,KAAQo2B,EAAUE,KAC9Bt2B,EAAKs2B,IAGFF,EAAUp2B,IACb,MAAO,UAAG8C,EAAU,YACJ,kBAAP9C,GACO,kBAAPA,EACLA,EAAKsC,KAAKP,UAAU/B,IAK9B,IAAMu2B,EAAgB,CACpB3L,iBAAkByL,EAClBvQ,aAAa,EACbpB,eAAe,EAGfxL,iBAAiB,GAGb,SAAUsd,EAAgBp9B,GAC9B,OAAO,YAAQm9B,EAAen9B,GAG1B,SAAUq9B,EACdr9B,GAEA,IAAMpI,EAAQoI,EAAO8f,gBACrB,YAAiB,IAAVloB,EAAmBulC,EAAcrd,gBAAkBloB,EAGtD,SAAU0lC,EACd5T,EACAvB,GAEA,OAAO,YAAYA,GACfuB,EAAMpxB,IAAI6vB,EAAkBthB,MAAO,cACnCshB,GAAqBA,EAAkBze,WAGtC,IAAM6zB,EAAwB,qBAE/B,SAAUC,EAAuBpV,GACrC,IAAM4K,EAAQ5K,EAAe4K,MAAMuK,GACnC,OAAOvK,EAAQA,EAAM,GAAK5K,EAGtB,SAAUqV,EACdv/B,EACA5H,EACAwG,GAEA,QAAI,YAAgBxG,KACXwN,EAAQxN,GACXA,EAAO8H,OAAM,SAAA2gB,GAAQ,OAAA0e,EAA0Bv/B,EAAc6gB,EAAxC,MACrB7gB,EAAaC,WAAWC,OAAM,SAAAgD,GAC9B,GAAI,YAAQA,IAAU,YAAcA,EAAOtE,GAAY,CACrD,IAAM/D,EAAM,YAAuBqI,GACnC,OAAO27B,EAAO5kC,KAAK7B,EAAQyC,MACvBqI,EAAMlD,cACPu/B,EAA0Br8B,EAAMlD,aAAc5H,EAAOyC,GAAM+D,IAOhE,OAAO,MAMT,SAAU4gC,EACd9lC,GAEA,OAAO,YAAgBA,KACpB,YAAYA,KACZkM,EAAQlM,GAGP,SAAU+lC,IACd,OAAO,IAAI,IAGP,SAAUC,EACdh8B,EACA0C,GAOA,IAAMrG,EAAc,YAAkB,YAAuB2D,IAC7D,MAAO,CACL3D,YAAW,EACXqwB,eAAA,SAAe50B,GACb,IAAIoH,EAAqC7C,EAAYvE,GAIrD,OAHKoH,GAAOwD,IACVxD,EAAMwD,EAAUqkB,OAAOjvB,IAElBoH,GAAO,S,gCC1JpB,wEASY+8B,EATZ,QASA,SAAYA,GACV,qBACA,2BACA,mCAHF,CAAYA,MAAY,KAYxB,IAAM17B,EAAQ,IAAI/J,IAEZ,SAAU0W,EAAcgvB,GAC5B,IAAIpkC,EACJ,OAAQokC,GACN,KAAKD,EAAaxD,MAChB3gC,EAAO,QACP,MACF,KAAKmkC,EAAa5jC,SAChBP,EAAO,WACP,MACF,KAAKmkC,EAAahhC,aAChBnD,EAAO,eAGX,OAAOA,EAyFP,SAAM,EAA2B,EAAE,GACnC,IAAM,EAtFF,SAAiBkI,GACrB,IAGI9E,EAAWghC,EAHTC,EAAS57B,EAAM7J,IAAIsJ,GACzB,GAAIm8B,EAAQ,OAAOA,EAInB,QACE,cAAU,KAAe,EACzB,oBAAe,OAAQ,gDAAvB,+GAGA,8BAOF,IALA,IAAMz5B,EAA8B,GAC9Byd,EAA4B,GAC5BmD,EAA8B,GAC9B1O,EAAkC,GAExB,MAAA5U,EAAS2C,YAAT,eAAsB,CAAjC,IAAMy5B,EAAC,KACV,GAAe,uBAAXA,EAAElgC,MAKN,GAAe,wBAAXkgC,EAAElgC,KACJ,OAAQkgC,EAAEloC,WACR,IAAK,QACHisB,EAAQriB,KAAKs+B,GACb,MACF,IAAK,WACH9Y,EAAUxlB,KAAKs+B,GACf,MACF,IAAK,eACHxnB,EAAc9W,KAAKs+B,SAbvB15B,EAAU5E,KAAKs+B,GAmBnB,QACE,aAAW,EAAM,QACdjc,EAAQ9mB,QAAUiqB,EAAUjqB,QAAUub,EAAcvb,OACvD,yHAEA,uBAEF,EACE,QAAQ,EAAS,QAAgB,EAAG,OAAoB,IAEtD,oBAAG,EAAQ,SAAe,OAAO,UAAa,8EAC9C,qBAAqB,SAAgB,qBAAc,qBACnD,qDAGA,yEAA+D,8CACnE,EAAK8mB,EAAQ9mB,OAAW,EAAU,MAAM,WAAE,EAAO,QAAa,WAExD,IAAc,c,IAClB,EAAS,SACP,EACF,EAAE,OACA,EAGF,EAEE,oBAAqB,IAAlBsJ,EAAkB,6DAAgB,kBACrC,qCAGE,yEAAuD,6BAC7D,MAAY,EAAW,GAEvB,EAAIC,EAAmB,qBAAoB,GAO3C,MAAU,MANR,EAAO,MAAsB,SAAX,EAAW,UAC9B,aAEA,OAG2B,KAAC,eAE9B,OADC,MAAO,EAAQ,GAChB,EAIO,IACA,EAAoBsK,EAAc,GACxC,EACgB,EACd,QACE,oBAAG,SAAqB,eAAW,SAAiB,wBAEzD,qF,yGCrHD,SAASmvB,EAAYjgC,EAAejI,GAClC,OAAQA,EAAUA,EAAQiI,GAAM,IAAWkgC,KAG7C,SAASC,EAAOhqB,GACd,MAA0B,oBAAZA,EAAyB,IAAI,EAAWA,GAAWA,EAGnE,SAASiqB,EAAcvoC,GACrB,OAAOA,EAAKoB,QAAQgE,QAAU,EAGhC,kBAEE,WAAYmL,EAAkBvQ,GAA9B,MACE,YAAMuQ,IAAQ,K,OACd,EAAKvQ,KAAOA,E,EAEhB,OANwB,iBAMxB,EANA,CAAwB6Q,OAQxB,aAkFE,WAAYzP,GACNA,IAASC,KAAKD,QAAUA,GA8ChC,OAhIgB,EAAAitB,MAAd,WACE,OAAO,IAAIpT,GAAW,WAAM,oBAGhB,EAAAjX,KAAd,SAAmBwkC,GACjB,OAAqB,IAAjBA,EAAMpjC,OAAqB6V,EAAWoT,QACnCma,EAAM32B,IAAIy2B,GAAQ/0B,QAAO,SAAC40B,EAAGM,GAAM,OAAAN,EAAE33B,OAAF,OAG9B,EAAA6H,MAAd,SACExQ,EACAk6B,EACAC,GAEA,IAAM0G,EAAWJ,EAAOvG,GAClB4G,EAAYL,EAAOtG,GAAS,IAAI/mB,EAAWmtB,IAEjD,OAAIG,EAAcG,IAAaH,EAAcI,GACpC,IAAI1tB,GAAW,SAAAhb,GACpB,OAAO4H,EAAK5H,GACRyoC,EAAStnC,QAAQnB,IAAc,IAAWooC,KAC1CM,EAAUvnC,QAAQnB,IAAc,IAAWooC,QAG1C,IAAIptB,GAAW,SAAChb,EAAWC,GAChC,OAAO2H,EAAK5H,GACRyoC,EAAStnC,QAAQnB,EAAWC,IAAY,IAAWmoC,KACnDM,EAAUvnC,QAAQnB,EAAWC,IAAY,IAAWmoC,SAKhD,EAAAzjC,QAAd,SACE5E,EACAC,GAEA,OACED,EAAKoB,QCxEL,SACJwnC,EACA3oC,GAEA,IAAI0B,EAAU,OAAH,IAAG,CAAH,GAAQinC,GAoBnB,OAVAhnC,OAAO0N,eAAerP,EAAW,aAAc,CAC7CsP,YAAY,EACZxN,MAXiB,SAACvB,GAEhBmB,EADkB,oBAATnB,EACC,OAAH,IAAG,CAAH,eAAQmB,GAAYnB,EAAKmB,IAEtB,OAAH,IAAG,CAAH,eAAQA,GAAYnB,MAU/BoB,OAAO0N,eAAerP,EAAW,aAAc,CAC7CsP,YAAY,EACZxN,MATiB,WAAM,4BAYlB9B,EDiDD4oC,CACE5oC,EAAU0B,QEzEd,SAA6B1B,GACjC,IAAM6oC,EAAuC,CAC3C7hC,UAAWhH,EAAUgH,WAAa,GAClC+S,WAAY/Z,EAAU+Z,YAAc,GACpCf,cAAehZ,EAAUgZ,cACzBvT,MAAOzF,EAAUyF,OAWnB,OAPKojC,EAAqB7vB,gBACxB6vB,EAAqB7vB,cACmB,kBAA/B6vB,EAAqBpjC,MACxB,YAAiBojC,EAAqBpjC,aAAU6Q,EAChD,IAGDuyB,EF0DCC,CG1EJ,SAA4B9oC,GAQhC,IAPA,IAAM+oC,EAAmB,CACvB,QACA,gBACA,YACA,aACA,WAEc,MAAApnC,OAAOqB,KAAKhD,GAAZ,eAAwB,CAAnC,IAAIiD,EAAG,KACV,GAAI8lC,EAAiB/1B,QAAQ/P,GAAO,EAClC,MAAM,QAAI,QAAe,qBAAwB,OAAI,gBAIzD,OAAOjD,EH4DoBgpC,CAAkBhpC,OAEpC,IAAWooC,MAIN,EAAA73B,OAAd,SACE+P,EACA2oB,GAEA,IAAMC,EAAYb,EAAO/nB,GACzB,GAAIgoB,EAAcY,GAOhB,OANA,SAAU,IACJ,SACF,4EAGH,IACMA,EAET,IAAMC,EAAWd,EAAOY,GAExB,OAAIX,EAAca,GACT,IAAInuB,GACT,SAAAhb,GACE,OAAAkpC,EAAU/nC,QACRnB,GACA,SAAAkI,GAAM,OAAAihC,EAAShoC,QAAQ+G,IAAO,IAAxB,SACH,IAAWkgC,QAGb,IAAIptB,GAAW,SAAChb,EAAWC,GAChC,OACEipC,EAAU/nC,QAAQnB,GAAW,SAAAkI,GAC3B,OAAOihC,EAAShoC,QAAQ+G,EAAIjI,IAAY,IAAWmoC,SAC/C,IAAWA,SAUlB,YAAAhwB,MAAP,SACExQ,EACAk6B,EACAC,GAEA,OAAO3gC,KAAKmP,OACVyK,EAAW5C,MAAMxQ,EAAMk6B,EAAMC,GAAS,IAAI/mB,EAAWmtB,MAIlD,YAAA53B,OAAP,SAAchQ,GACZ,OAAOya,EAAWzK,OAAOnP,KAAMb,IAG1B,YAAAY,QAAP,SACEnB,EACAC,GAEA,MAAM,QAAI,QAAe,8BAA8B,aAG/C,YAAAiB,QAAV,SACEL,EACAX,GAEA,GAAIA,GAAYA,EAASW,MAQvB,OAPAX,EAASW,MAAMA,IAOR,EAGT,MAAMA,GAGD,YAAAuoC,WAAP,SAAkBnZ,GAEhB,OADA7uB,KAAKF,QAAU+uB,EACR7uB,MAEX,EAjIA,I,gCIpCA,6DAKM,SAAUyiC,EACdwF,GAEA,IAAM3nC,EAAU,qBAAW,eACrBJ,EAAS+nC,GAAY3nC,EAAQJ,OAQnC,OAPA,QACE,cACA,wKAGA,oBAEKA,I,gCCjBT,qLAEagoC,EACQ,oBAAZn9B,SAC4B,gBAAnC,aAAM,WAAM,OAAAyiB,UAAA,WAED2a,EAAmC,oBAAZpF,QAEvBqF,EACO,oBAAXt6B,QACe,oBAAfA,OAAOC,IAEHs6B,EAA4BD,GAAgBt6B,OAAO4H,cAEnD4yB,EAC2C,oBAA/C,aAAM,WAAM,OAAAx7B,OAAOpC,SAAP,iBAEf69B,EASJ,aAAM,WAAM,OAAA/a,UAAUE,UAAU9b,QAAQ,UAA5B,OAA8C,EAQ/C42B,EAAqBF,IAAcC,G,gCClChD,mSAYM,SAAUE,EAAcphC,GAC5B,QACE,YAAU,GAAoB,aAAf,EAAK,KACpB,0JAEA,uCAEF,IAAMqQ,EAAarQ,EAAIgG,YACpBhE,QAAO,SAAAc,GAAK,6BAAAA,EAAEvD,QACd4J,KAAI,SAAAlD,GACH,GAAwB,wBAApBA,EAAW1G,KACb,MAAM,QAAI,QACR,2DACE,OAAU,EAAK,KAEjB,kBAEJ,OAAO0G,KAQX,OALA,QACE,YAAU,EAAW,QACrB,0CAAwC,OAAU,EAAO,uBACzD,4BAEKjG,EAGH,SAAUqhC,EACdrhC,GAGA,OADAohC,EAAcphC,GACPA,EAAIgG,YAAYhE,QACrB,SAAAiE,GAAc,8BAAAA,EAAW1G,QACzB,GAGE,SAAU+hC,EAAiBthC,GAC/B,OACEA,EAAIgG,YACDhE,QACC,SAAAiE,GACE,MAAoB,wBAApBA,EAAW1G,MAAkC0G,EAAW9K,QAE3DgO,KAAI,SAACs2B,GAA+B,OAAAA,EAAGtkC,KAAH,SAAgB,IAAM,KAK3D,SAAUomC,EACdvhC,GAEA,OAAOA,EAAIgG,YAAYhE,QACrB,SAAAiE,GAAc,6BAAAA,EAAW1G,QAIvB,SAAUiiC,EAAmBxhC,GACjC,IAAMma,EAAWknB,EAAuBrhC,GAOxC,OALA,QACE,YAAQ,GACR,UADqB,EAAS,UAC9B,oCACA,yCAEKma,EAGH,SAAUsnB,EACdzhC,GAEA,QACE,YAAuB,aAAf,EAAK,KACb,0JAEA,oCAEF,QACE,YAAI,EAAAgG,YAAsB,QAC1B,gDACA,wCAEF,IAAM07B,EAAc1hC,EAAIgG,YAAY,GAOpC,OALA,QACE,YAAqB,uBAArB07B,EAAqB,KACrB,kCACA,8CAEKA,EAQH,SAAUC,EACdC,GAIA,IAAIC,EAFJT,EAAcQ,GAId,IAAuB,UAAAA,EAAS57B,YAAT,eAAsB,CAAxC,IAAIC,EAAU,KACjB,GAAwB,wBAApBA,EAAW1G,KAAgC,CAC7C,IAAMhI,EAAa0O,EAAuC1O,UAC1D,GACgB,UAAdA,GACc,aAAdA,GACc,iBAAdA,EAEA,OAAO0O,EAGa,uBAApBA,EAAW1G,MAAkCsiC,IAG/CA,EAAqB57B,GAIzB,GAAI47B,EACF,OAAOA,EAGT,MAAM,QAAI,QACR,wFACA,YAGE,SAAUC,EACd77B,GAEA,IAAM87B,EAAgB7oC,OAAOmB,OAAO,MAC9B2nC,EAAO/7B,GAAcA,EAAWlE,oBAYtC,OAXIigC,GAAQA,EAAKtlC,QACfslC,EAAK9nC,SAAQ,SAAAqI,GACPA,EAAI0/B,cACN,YACEF,EACAx/B,EAAIL,SAAS/G,KACboH,EAAI0/B,iBAKLF,I,iCC/JT,0HAqBM,SAAUG,EACd,EACA3jC,G,IADE2B,EAAU,aAGZ,OAAKA,IAAeA,EAAWxD,QA+E3B,SACJwD,GAEA,IAAMnI,EAA8B,GAEhCmI,GAAcA,EAAWxD,QAC3BwD,EAAWhG,SAAQ,SAAAiG,GACjB,GAXN,SAA8B,G,IAAU9G,EAAK,aAC3C,MAAiB,SAAVA,GAA8B,YAAVA,EAUlB8oC,CAAqBhiC,GAA1B,CAEA,IAAMiiC,EAAqBjiC,EAAUrC,UAC/BukC,EAAgBliC,EAAUhF,KAAK9B,MAErC,QACE,YAAA+oC,GACA,IADsBA,EAAyB,OAC/C,0CAA0C,OAAAC,EAAa,gBACvD,gCAEF,IAAMC,EAAaF,EAAoB,GACvC,QACE,YAAU,EAAS,MACnB,OAD6B,EAAW,KAAK,MAC7C,6BAA6B,OAAAC,EAAa,gBAC1C,4CAEF,IAAME,EAAqBD,EAAWjpC,MAGtC,QACE,YAAO,IACa,aAAjBkpC,EAAQhjC,MAAwC,iBAAjBgjC,EAAQhjC,MAC1C,4BAAqB8iC,EAAa,sDAClC,gBAEY,aAAd,EAAY,MAAuB,iBAAV,QAAc,IACtC,uCAIN,SAjHQG,CACLtiC,GACAL,OAAM,SAAC,G,IAAEM,EAAS,YAAEmiC,EAAU,aAC1BG,GAAuB,EAU3B,MAT8B,aAA1BH,EAAWjpC,MAAMkG,MACnBkjC,EAAclkC,GAAaA,EAAW+jC,EAAWjpC,MAAuB8B,KAAK9B,OAC7E,QACE,iBACA,IADAopC,EACA,mCAAmC,OAAU,EAAU,2BACvD,4BAEFA,EAAeH,EAAWjpC,MAA2BA,MAEvB,SAAzB8G,EAAUhF,KAAK9B,OAAoBopC,EAAcA,KA0BtD,SAAUC,EACdC,EACAtW,EACAtM,GAEA,IAAM6iB,EAAU,IAAIjoC,IAAIgoC,GAClBE,EAAcD,EAAQ3vB,KAe5B,OAbA,YAAMoZ,EAAM,CACV9qB,UAAS,SAACT,GACR,GACE8hC,EAAQ39B,OAAOnE,EAAK3F,KAAK9B,UACvB0mB,IAAQ6iB,EAAQ3vB,MAElB,OAAO,OAON8M,GAAO6iB,EAAQ3vB,KAAO2vB,EAAQ3vB,KAAO4vB,EAGxC,SAAUC,EAAiBz/B,GAC/B,OAAOA,GAAYq/B,EAAc,CAAC,SAAU,UAAWr/B,GAAU,K,0JC1F3DoE,EAAmBvO,OAAOK,UAAZ,eAwBhB,SAAUwkB,I,IACd,sDAEA,OAAOglB,EAAexsB,GASlB,SAAUwsB,EAAkBxsB,GAChC,IAAInX,EAASmX,EAAQ,IAAO,GACtB1J,EAAQ0J,EAAQ7Z,OACtB,GAAImQ,EAAQ,EAEV,IADA,IAAMklB,EAAS,IAAIiR,EACV5oC,EAAI,EAAGA,EAAIyS,IAASzS,EAC3BgF,EAAS2yB,EAAOpP,MAAMvjB,EAAQmX,EAAQnc,IAG1C,OAAOgF,EAWT,IAAM6jC,EACJ,SAAU7jC,EAAQwP,EAAQkf,GACxB,OAAOn1B,KAAKgqB,MAAMvjB,EAAO0uB,GAAWlf,EAAOkf,KAG/C,aACE,WACUoV,QAAA,IAAAA,MAAA,QAAAA,aAgCH,KAAAC,SAAW,IAEV,KAAAC,WAAa,IAAIzoC,IAkB3B,OAjDS,YAAAgoB,MAAP,SAAavjB,EAAawP,G,IAA1B,WAAuC,oDACrC,OAAI,YAAgBA,IAAW,YAAgBxP,IAC7ClG,OAAOqB,KAAKqU,GAAQ1U,SAAQ,SAAAmpC,GAC1B,GAAI57B,EAAe7N,KAAKwF,EAAQikC,GAAY,CAC1C,IAAMC,EAAclkC,EAAOikC,GAC3B,GAAIz0B,EAAOy0B,KAAeC,EAAa,CACrC,IAAMvrC,EAAS,EAAKmrC,WAAU,MAAf,EAAI,aAAY9jC,EAAQwP,EAAQy0B,GAAcpqC,GAAO,IAGhElB,IAAWurC,KACblkC,EAAS,EAAKmkC,oBAAoBnkC,IAC3BikC,GAAatrC,SAMxBqH,EAAS,EAAKmkC,oBAAoBnkC,IAC3BikC,GAAaz0B,EAAOy0B,MAIxBjkC,GAIFwP,GAOF,YAAA20B,oBAAP,SAA8BlqC,GAc5B,OAbI,YAAgBA,KACbV,KAAKyqC,WAAWtpC,IAAIT,KAErBA,EADEsL,MAAMY,QAAQlM,GACPA,EAAcW,MAAM,GAErB,OAAH,IAAG,CAAH,CACHiO,UAAW/O,OAAOoB,eAAejB,IAC9BA,GAGPV,KAAKyqC,WAAWhoC,IAAI/B,KAGjBA,GAEX,EAtDA","file":"static/js/apollo.f25b12f5.chunk.js","sourcesContent":["import { ExecutionResult } from 'graphql';\n\nimport { NetworkError, GraphQLErrors } from '../../errors';\nimport { Observable } from '../../utilities';\nimport { ApolloLink, Operation, FetchResult, NextLink } from '../core';\n\nexport interface ErrorResponse {\n graphQLErrors?: GraphQLErrors;\n networkError?: NetworkError;\n response?: ExecutionResult;\n operation: Operation;\n forward: NextLink;\n}\n\nexport namespace ErrorLink {\n /**\n * Callback to be triggered when an error occurs within the link stack.\n */\n export interface ErrorHandler {\n (error: ErrorResponse): Observable | void;\n }\n}\n\n// For backwards compatibility.\nexport import ErrorHandler = ErrorLink.ErrorHandler;\n\nexport function onError(errorHandler: ErrorHandler): ApolloLink {\n return new ApolloLink((operation, forward) => {\n return new Observable(observer => {\n let sub: any;\n let retriedSub: any;\n let retriedResult: any;\n\n try {\n sub = forward(operation).subscribe({\n next: result => {\n if (result.errors) {\n retriedResult = errorHandler({\n graphQLErrors: result.errors,\n response: result,\n operation,\n forward,\n });\n\n if (retriedResult) {\n retriedSub = retriedResult.subscribe({\n next: observer.next.bind(observer),\n error: observer.error.bind(observer),\n complete: observer.complete.bind(observer),\n });\n return;\n }\n }\n observer.next(result);\n },\n error: networkError => {\n retriedResult = errorHandler({\n operation,\n networkError,\n //Network errors can return GraphQL errors on for example a 403\n graphQLErrors:\n networkError &&\n networkError.result &&\n networkError.result.errors,\n forward,\n });\n if (retriedResult) {\n retriedSub = retriedResult.subscribe({\n next: observer.next.bind(observer),\n error: observer.error.bind(observer),\n complete: observer.complete.bind(observer),\n });\n return;\n }\n observer.error(networkError);\n },\n complete: () => {\n // disable the previous sub from calling complete on observable\n // if retry is in flight.\n if (!retriedResult) {\n observer.complete.bind(observer)();\n }\n },\n });\n } catch (e) {\n errorHandler({ networkError: e, operation, forward });\n observer.error(e);\n }\n\n return () => {\n if (sub) sub.unsubscribe();\n if (retriedSub) sub.unsubscribe();\n };\n });\n });\n}\n\nexport class ErrorLink extends ApolloLink {\n private link: ApolloLink;\n constructor(errorHandler: ErrorLink.ErrorHandler) {\n super();\n this.link = onError(errorHandler);\n }\n\n public request(\n operation: Operation,\n forward: NextLink,\n ): Observable | null {\n return this.link.request(operation, forward);\n }\n}\n","import { invariant } from '../../utilities/globals';\n\nimport * as React from 'react';\n\nimport { ApolloClient } from '../../core';\nimport { getApolloContext } from './ApolloContext';\n\nexport interface ApolloProviderProps {\n client: ApolloClient;\n children: React.ReactNode | React.ReactNode[] | null;\n}\n\nexport const ApolloProvider: React.FC> = ({\n client,\n children\n}) => {\n const ApolloContext = getApolloContext();\n return (\n \n {(context: any = {}) => {\n if (client && context.client !== client) {\n context = Object.assign({}, context, { client });\n }\n\n invariant(\n context.client,\n 'ApolloProvider was not passed a client instance. Make ' +\n 'sure you pass in your client via the \"client\" prop.'\n );\n\n return (\n \n {children}\n \n );\n }}\n \n );\n};\n","const { toString } = Object.prototype;\n\n/**\n * Deeply clones a value to create a new instance.\n */\nexport function cloneDeep(value: T): T {\n return cloneDeepHelper(value);\n}\n\nfunction cloneDeepHelper(val: T, seen?: Map): T {\n switch (toString.call(val)) {\n case \"[object Array]\": {\n seen = seen || new Map;\n if (seen.has(val)) return seen.get(val);\n const copy: T & any[] = (val as any).slice(0);\n seen.set(val, copy);\n copy.forEach(function (child, i) {\n copy[i] = cloneDeepHelper(child, seen);\n });\n return copy;\n }\n\n case \"[object Object]\": {\n seen = seen || new Map;\n if (seen.has(val)) return seen.get(val);\n // High fidelity polyfills of Object.create and Object.getPrototypeOf are\n // possible in all JS environments, so we will assume they exist/work.\n const copy = Object.create(Object.getPrototypeOf(val));\n seen.set(val, copy);\n Object.keys(val).forEach(key => {\n copy[key] = cloneDeepHelper((val as any)[key], seen);\n });\n return copy;\n }\n\n default:\n return val;\n }\n}\n","import '../globals'; // For __DEV__\nimport { isNonNullObject } from './objects';\n\nfunction deepFreeze(value: any) {\n const workSet = new Set([value]);\n workSet.forEach(obj => {\n if (isNonNullObject(obj) && shallowFreeze(obj) === obj) {\n Object.getOwnPropertyNames(obj).forEach(name => {\n if (isNonNullObject(obj[name])) workSet.add(obj[name]);\n });\n }\n });\n return value;\n}\n\nfunction shallowFreeze(obj: T): T | null {\n if (__DEV__ && !Object.isFrozen(obj)) {\n try {\n Object.freeze(obj);\n } catch (e) {\n // Some types like Uint8Array and Node.js's Buffer cannot be frozen, but\n // they all throw a TypeError when you try, so we re-throw any exceptions\n // that are not TypeErrors, since that would be unexpected.\n if (e instanceof TypeError) return null;\n throw e;\n }\n }\n return obj;\n}\n\nexport function maybeDeepFreeze(obj: T): T {\n if (__DEV__) {\n deepFreeze(obj);\n }\n return obj;\n}\n","import { ApolloLink } from './ApolloLink';\n\nexport const from = ApolloLink.from;\n","import { useCallback, useEffect, useRef, useState } from 'react';\nimport { DocumentNode } from 'graphql';\nimport { TypedDocumentNode } from '@graphql-typed-document-node/core';\nimport {\n MutationFunctionOptions,\n MutationHookOptions,\n MutationResult,\n MutationTuple,\n} from '../types/types';\n\nimport {\n ApolloCache,\n DefaultContext,\n mergeOptions,\n OperationVariables,\n} from '../../core';\nimport { equal } from '@wry/equality';\nimport { DocumentType, verifyDocumentType } from '../parser';\nimport { ApolloError } from '../../errors';\nimport { useApolloClient } from './useApolloClient';\n\nexport function useMutation<\n TData = any,\n TVariables = OperationVariables,\n TContext = DefaultContext,\n TCache extends ApolloCache = ApolloCache,\n>(\n mutation: DocumentNode | TypedDocumentNode,\n options?: MutationHookOptions,\n): MutationTuple {\n const client = useApolloClient(options?.client);\n verifyDocumentType(mutation, DocumentType.Mutation);\n const [result, setResult] = useState>({\n called: false,\n loading: false,\n client,\n });\n\n const ref = useRef({\n result,\n mutationId: 0,\n isMounted: true,\n client,\n mutation,\n options,\n });\n\n // TODO: Trying to assign these in a useEffect or useLayoutEffect breaks\n // higher-order components.\n {\n Object.assign(ref.current, { client, options, mutation });\n }\n\n const execute = useCallback((\n executeOptions: MutationFunctionOptions<\n TData,\n TVariables,\n TContext,\n TCache\n > = {}\n ) => {\n const {client, options, mutation} = ref.current;\n const baseOptions = { ...options, mutation };\n if (!ref.current.result.loading && !baseOptions.ignoreResults && ref.current.isMounted) {\n setResult(ref.current.result = {\n loading: true,\n error: void 0,\n data: void 0,\n called: true,\n client,\n });\n }\n\n const mutationId = ++ref.current.mutationId;\n const clientOptions = mergeOptions(\n baseOptions,\n executeOptions as any,\n );\n\n return client.mutate(clientOptions).then((response) => {\n const { data, errors } = response;\n const error =\n errors && errors.length > 0\n ? new ApolloError({ graphQLErrors: errors })\n : void 0;\n\n if (\n mutationId === ref.current.mutationId &&\n !clientOptions.ignoreResults\n ) {\n const result = {\n called: true,\n loading: false,\n data,\n error,\n client,\n };\n\n if (ref.current.isMounted && !equal(ref.current.result, result)) {\n setResult(ref.current.result = result);\n }\n }\n ref.current.options?.onCompleted?.(response.data!, clientOptions);\n executeOptions.onCompleted?.(response.data!, clientOptions);\n return response;\n }).catch((error) => {\n if (\n mutationId === ref.current.mutationId &&\n ref.current.isMounted\n ) {\n const result = {\n loading: false,\n error,\n data: void 0,\n called: true,\n client,\n };\n\n if (!equal(ref.current.result, result)) {\n setResult(ref.current.result = result);\n }\n }\n\n if (ref.current.options?.onError || clientOptions.onError) {\n ref.current.options?.onError?.(error, clientOptions);\n executeOptions.onError?.(error, clientOptions);\n // TODO(brian): why are we returning this here???\n return { data: void 0, errors: error };\n }\n\n throw error;\n });\n }, []);\n\n const reset = useCallback(() => {\n if (ref.current.isMounted) {\n setResult({ called: false, loading: false, client });\n }\n }, []);\n\n useEffect(() => {\n ref.current.isMounted = true;\n\n return () => {\n ref.current.isMounted = false;\n };\n }, []);\n\n return [execute, { reset, ...result }];\n}\n","import { DocumentNode } from 'graphql';\nimport { TypedDocumentNode } from '@graphql-typed-document-node/core';\nimport { useCallback, useMemo, useRef } from 'react';\n\nimport { OperationVariables } from '../../core';\nimport { mergeOptions } from '../../utilities';\nimport {\n LazyQueryHookOptions,\n LazyQueryResultTuple,\n QueryResult,\n} from '../types/types';\nimport { useInternalState } from './useQuery';\nimport { useApolloClient } from './useApolloClient';\n\n// The following methods, when called will execute the query, regardless of\n// whether the useLazyQuery execute function was called before.\nconst EAGER_METHODS = [\n 'refetch',\n 'reobserve',\n 'fetchMore',\n 'updateQuery',\n 'startPolling',\n 'subscribeToMore',\n] as const;\n\nexport function useLazyQuery(\n query: DocumentNode | TypedDocumentNode,\n options?: LazyQueryHookOptions\n): LazyQueryResultTuple {\n const internalState = useInternalState(\n useApolloClient(options && options.client),\n query,\n );\n\n const execOptionsRef = useRef>>();\n const merged = execOptionsRef.current\n ? mergeOptions(options, execOptionsRef.current)\n : options;\n\n const useQueryResult = internalState.useQuery({\n ...merged,\n skip: !execOptionsRef.current,\n });\n\n const initialFetchPolicy =\n useQueryResult.observable.options.initialFetchPolicy ||\n internalState.getDefaultFetchPolicy();\n\n const result: QueryResult =\n Object.assign(useQueryResult, {\n called: !!execOptionsRef.current,\n });\n\n // We use useMemo here to make sure the eager methods have a stable identity.\n const eagerMethods = useMemo(() => {\n const eagerMethods: Record = {};\n for (const key of EAGER_METHODS) {\n const method = result[key];\n eagerMethods[key] = function () {\n if (!execOptionsRef.current) {\n execOptionsRef.current = Object.create(null);\n // Only the first time populating execOptionsRef.current matters here.\n internalState.forceUpdate();\n }\n return method.apply(this, arguments);\n };\n }\n\n return eagerMethods;\n }, []);\n\n Object.assign(result, eagerMethods);\n\n const execute = useCallback<\n LazyQueryResultTuple[0]\n >(executeOptions => {\n execOptionsRef.current = executeOptions ? {\n ...executeOptions,\n fetchPolicy: executeOptions.fetchPolicy || initialFetchPolicy,\n } : {\n fetchPolicy: initialFetchPolicy,\n };\n\n const promise = internalState\n .asyncUpdate() // Like internalState.forceUpdate, but returns a Promise.\n .then(queryResult => Object.assign(queryResult, eagerMethods));\n\n // Because the return value of `useLazyQuery` is usually floated, we need\n // to catch the promise to prevent unhandled rejections.\n promise.catch(() => {});\n\n return promise;\n }, []);\n\n return [execute, result];\n}\n","import '../../utilities/globals';\nimport { useState, useRef, useEffect } from 'react';\nimport { DocumentNode } from 'graphql';\nimport { TypedDocumentNode } from '@graphql-typed-document-node/core';\nimport { invariant } from '../../utilities/globals'\nimport { equal } from '@wry/equality';\n\nimport { DocumentType, verifyDocumentType } from '../parser';\nimport {\n SubscriptionHookOptions,\n SubscriptionResult\n} from '../types/types';\nimport { OperationVariables } from '../../core';\nimport { useApolloClient } from './useApolloClient';\n\nexport function useSubscription(\n subscription: DocumentNode | TypedDocumentNode,\n options?: SubscriptionHookOptions,\n) {\n const hasIssuedDeprecationWarningRef = useRef(false);\n const client = useApolloClient(options?.client);\n verifyDocumentType(subscription, DocumentType.Subscription);\n const [result, setResult] = useState>({\n loading: !options?.skip,\n error: void 0,\n data: void 0,\n variables: options?.variables,\n });\n\n if (!hasIssuedDeprecationWarningRef.current) {\n hasIssuedDeprecationWarningRef.current = true;\n\n if (options?.onSubscriptionData) {\n invariant.warn(\n options.onData\n ? \"'useSubscription' supports only the 'onSubscriptionData' or 'onData' option, but not both. Only the 'onData' option will be used.\"\n : \"'onSubscriptionData' is deprecated and will be removed in a future major version. Please use the 'onData' option instead.\"\n );\n }\n\n if (options?.onSubscriptionComplete) {\n invariant.warn(\n options.onComplete\n ? \"'useSubscription' supports only the 'onSubscriptionComplete' or 'onComplete' option, but not both. Only the 'onComplete' option will be used.\"\n : \"'onSubscriptionComplete' is deprecated and will be removed in a future major version. Please use the 'onComplete' option instead.\"\n );\n }\n }\n\n const [observable, setObservable] = useState(() => {\n if (options?.skip) {\n return null;\n }\n\n return client.subscribe({\n query: subscription,\n variables: options?.variables,\n fetchPolicy: options?.fetchPolicy,\n context: options?.context,\n });\n });\n\n const canResetObservableRef = useRef(false);\n useEffect(() => {\n return () => {\n canResetObservableRef.current = true;\n };\n }, []);\n\n const ref = useRef({ client, subscription, options });\n useEffect(() => {\n let shouldResubscribe = options?.shouldResubscribe;\n if (typeof shouldResubscribe === 'function') {\n shouldResubscribe = !!shouldResubscribe(options!);\n }\n\n if (options?.skip) {\n if (!options?.skip !== !ref.current.options?.skip || canResetObservableRef.current) {\n setResult({\n loading: false,\n data: void 0,\n error: void 0,\n variables: options?.variables,\n });\n setObservable(null);\n canResetObservableRef.current = false;\n }\n } else if (\n (shouldResubscribe !== false &&\n (client !== ref.current.client ||\n subscription !== ref.current.subscription ||\n options?.fetchPolicy !== ref.current.options?.fetchPolicy ||\n !options?.skip !== !ref.current.options?.skip ||\n !equal(options?.variables, ref.current.options?.variables))) ||\n canResetObservableRef.current\n ) {\n setResult({\n loading: true,\n data: void 0,\n error: void 0,\n variables: options?.variables,\n });\n setObservable(client.subscribe({\n query: subscription,\n variables: options?.variables,\n fetchPolicy: options?.fetchPolicy,\n context: options?.context,\n }));\n canResetObservableRef.current = false;\n }\n\n Object.assign(ref.current, { client, subscription, options });\n }, [client, subscription, options, canResetObservableRef.current]);\n\n useEffect(() => {\n if (!observable) {\n return;\n }\n\n const subscription = observable.subscribe({\n next(fetchResult) {\n const result = {\n loading: false,\n // TODO: fetchResult.data can be null but SubscriptionResult.data\n // expects TData | undefined only\n data: fetchResult.data!,\n error: void 0,\n variables: options?.variables,\n };\n setResult(result);\n\n if (ref.current.options?.onData) {\n ref.current.options.onData({\n client,\n data: result\n });\n } else if (ref.current.options?.onSubscriptionData) {\n ref.current.options.onSubscriptionData({\n client,\n subscriptionData: result\n });\n }\n },\n error(error) {\n setResult({\n loading: false,\n data: void 0,\n error,\n variables: options?.variables,\n });\n ref.current.options?.onError?.(error);\n },\n complete() {\n if (ref.current.options?.onComplete) {\n ref.current.options.onComplete();\n } else if (ref.current.options?.onSubscriptionComplete) {\n ref.current.options.onSubscriptionComplete();\n }\n },\n });\n\n return () => {\n subscription.unsubscribe();\n };\n }, [observable]);\n\n return result;\n}\n","import { __assign } from \"tslib\";\nimport { parse } from 'graphql';\nvar docCache = new Map();\nvar fragmentSourceMap = new Map();\nvar printFragmentWarnings = true;\nvar experimentalFragmentVariables = false;\nfunction normalize(string) {\n return string.replace(/[\\s,]+/g, ' ').trim();\n}\nfunction cacheKeyFromLoc(loc) {\n return normalize(loc.source.body.substring(loc.start, loc.end));\n}\nfunction processFragments(ast) {\n var seenKeys = new Set();\n var definitions = [];\n ast.definitions.forEach(function (fragmentDefinition) {\n if (fragmentDefinition.kind === 'FragmentDefinition') {\n var fragmentName = fragmentDefinition.name.value;\n var sourceKey = cacheKeyFromLoc(fragmentDefinition.loc);\n var sourceKeySet = fragmentSourceMap.get(fragmentName);\n if (sourceKeySet && !sourceKeySet.has(sourceKey)) {\n if (printFragmentWarnings) {\n console.warn(\"Warning: fragment with name \" + fragmentName + \" already exists.\\n\"\n + \"graphql-tag enforces all fragment names across your application to be unique; read more about\\n\"\n + \"this in the docs: http://dev.apollodata.com/core/fragments.html#unique-names\");\n }\n }\n else if (!sourceKeySet) {\n fragmentSourceMap.set(fragmentName, sourceKeySet = new Set);\n }\n sourceKeySet.add(sourceKey);\n if (!seenKeys.has(sourceKey)) {\n seenKeys.add(sourceKey);\n definitions.push(fragmentDefinition);\n }\n }\n else {\n definitions.push(fragmentDefinition);\n }\n });\n return __assign(__assign({}, ast), { definitions: definitions });\n}\nfunction stripLoc(doc) {\n var workSet = new Set(doc.definitions);\n workSet.forEach(function (node) {\n if (node.loc)\n delete node.loc;\n Object.keys(node).forEach(function (key) {\n var value = node[key];\n if (value && typeof value === 'object') {\n workSet.add(value);\n }\n });\n });\n var loc = doc.loc;\n if (loc) {\n delete loc.startToken;\n delete loc.endToken;\n }\n return doc;\n}\nfunction parseDocument(source) {\n var cacheKey = normalize(source);\n if (!docCache.has(cacheKey)) {\n var parsed = parse(source, {\n experimentalFragmentVariables: experimentalFragmentVariables,\n allowLegacyFragmentVariables: experimentalFragmentVariables\n });\n if (!parsed || parsed.kind !== 'Document') {\n throw new Error('Not a valid GraphQL document.');\n }\n docCache.set(cacheKey, stripLoc(processFragments(parsed)));\n }\n return docCache.get(cacheKey);\n}\nexport function gql(literals) {\n var args = [];\n for (var _i = 1; _i < arguments.length; _i++) {\n args[_i - 1] = arguments[_i];\n }\n if (typeof literals === 'string') {\n literals = [literals];\n }\n var result = literals[0];\n args.forEach(function (arg, i) {\n if (arg && arg.kind === 'Document') {\n result += arg.loc.source.body;\n }\n else {\n result += arg;\n }\n result += literals[i + 1];\n });\n return parseDocument(result);\n}\nexport function resetCaches() {\n docCache.clear();\n fragmentSourceMap.clear();\n}\nexport function disableFragmentWarnings() {\n printFragmentWarnings = false;\n}\nexport function enableExperimentalFragmentVariables() {\n experimentalFragmentVariables = true;\n}\nexport function disableExperimentalFragmentVariables() {\n experimentalFragmentVariables = false;\n}\nvar extras = {\n gql: gql,\n resetCaches: resetCaches,\n disableFragmentWarnings: disableFragmentWarnings,\n enableExperimentalFragmentVariables: enableExperimentalFragmentVariables,\n disableExperimentalFragmentVariables: disableExperimentalFragmentVariables\n};\n(function (gql_1) {\n gql_1.gql = extras.gql, gql_1.resetCaches = extras.resetCaches, gql_1.disableFragmentWarnings = extras.disableFragmentWarnings, gql_1.enableExperimentalFragmentVariables = extras.enableExperimentalFragmentVariables, gql_1.disableExperimentalFragmentVariables = extras.disableExperimentalFragmentVariables;\n})(gql || (gql = {}));\ngql[\"default\"] = gql;\nexport default gql;\n//# sourceMappingURL=index.js.map","export function filterInPlace(\n array: T[],\n test: (elem: T) => boolean,\n context?: any,\n): T[] {\n let target = 0;\n array.forEach(function (elem, i) {\n if (test.call(this, elem, i, array)) {\n array[target++] = elem;\n }\n }, context);\n array.length = target;\n return array;\n}\n","import { invariant } from '../globals';\n\nimport {\n DocumentNode,\n SelectionNode,\n SelectionSetNode,\n OperationDefinitionNode,\n FieldNode,\n DirectiveNode,\n FragmentDefinitionNode,\n ArgumentNode,\n FragmentSpreadNode,\n VariableDefinitionNode,\n VariableNode,\n visit,\n ASTNode,\n} from 'graphql';\n\n// TODO(brian): A hack until this issue is resolved (https://github.com/graphql/graphql-js/issues/3356)\ntype Kind = any;\n\nimport {\n checkDocument,\n getOperationDefinition,\n getFragmentDefinition,\n getFragmentDefinitions,\n getMainDefinition,\n} from './getFromAST';\nimport { filterInPlace } from '../common/filterInPlace';\nimport { isField, isInlineFragment } from './storeUtils';\nimport {\n createFragmentMap,\n FragmentMap,\n} from './fragments';\n\nexport type RemoveNodeConfig = {\n name?: string;\n test?: (node: N) => boolean;\n remove?: boolean;\n};\n\nexport type GetNodeConfig = {\n name?: string;\n test?: (node: N) => boolean;\n};\n\nexport type RemoveDirectiveConfig = RemoveNodeConfig;\nexport type GetDirectiveConfig = GetNodeConfig;\nexport type RemoveArgumentsConfig = RemoveNodeConfig;\nexport type GetFragmentSpreadConfig = GetNodeConfig;\nexport type RemoveFragmentSpreadConfig = RemoveNodeConfig;\nexport type RemoveFragmentDefinitionConfig = RemoveNodeConfig<\n FragmentDefinitionNode\n>;\nexport type RemoveVariableDefinitionConfig = RemoveNodeConfig<\n VariableDefinitionNode\n>;\n\nconst TYPENAME_FIELD: FieldNode = {\n kind: 'Field' as Kind,\n name: {\n kind: 'Name' as Kind,\n value: '__typename',\n },\n};\n\nfunction isEmpty(\n op: OperationDefinitionNode | FragmentDefinitionNode,\n fragmentMap: FragmentMap,\n): boolean {\n return !op || op.selectionSet.selections.every(\n selection => selection.kind === 'FragmentSpread' &&\n isEmpty(fragmentMap[selection.name.value], fragmentMap)\n );\n}\n\nfunction nullIfDocIsEmpty(doc: DocumentNode) {\n return isEmpty(\n getOperationDefinition(doc) || getFragmentDefinition(doc),\n createFragmentMap(getFragmentDefinitions(doc)),\n )\n ? null\n : doc;\n}\n\nfunction getDirectiveMatcher(\n directives: (RemoveDirectiveConfig | GetDirectiveConfig)[],\n) {\n return function directiveMatcher(directive: DirectiveNode) {\n return directives.some(\n dir =>\n (dir.name && dir.name === directive.name.value) ||\n (dir.test && dir.test(directive)),\n );\n };\n}\n\nexport function removeDirectivesFromDocument(\n directives: RemoveDirectiveConfig[],\n doc: DocumentNode,\n): DocumentNode | null {\n const variablesInUse: Record = Object.create(null);\n let variablesToRemove: RemoveArgumentsConfig[] = [];\n\n const fragmentSpreadsInUse: Record = Object.create(null);\n let fragmentSpreadsToRemove: RemoveFragmentSpreadConfig[] = [];\n\n let modifiedDoc = nullIfDocIsEmpty(\n visit(doc, {\n Variable: {\n enter(node, _key, parent) {\n // Store each variable that's referenced as part of an argument\n // (excluding operation definition variables), so we know which\n // variables are being used. If we later want to remove a variable\n // we'll first check to see if it's being used, before continuing with\n // the removal.\n if (\n (parent as VariableDefinitionNode).kind !== 'VariableDefinition'\n ) {\n variablesInUse[node.name.value] = true;\n }\n },\n },\n\n Field: {\n enter(node) {\n if (directives && node.directives) {\n // If `remove` is set to true for a directive, and a directive match\n // is found for a field, remove the field as well.\n const shouldRemoveField = directives.some(\n directive => directive.remove,\n );\n\n if (\n shouldRemoveField &&\n node.directives &&\n node.directives.some(getDirectiveMatcher(directives))\n ) {\n if (node.arguments) {\n // Store field argument variables so they can be removed\n // from the operation definition.\n node.arguments.forEach(arg => {\n if (arg.value.kind === 'Variable') {\n variablesToRemove.push({\n name: (arg.value as VariableNode).name.value,\n });\n }\n });\n }\n\n if (node.selectionSet) {\n // Store fragment spread names so they can be removed from the\n // document.\n getAllFragmentSpreadsFromSelectionSet(node.selectionSet).forEach(\n frag => {\n fragmentSpreadsToRemove.push({\n name: frag.name.value,\n });\n },\n );\n }\n\n // Remove the field.\n return null;\n }\n }\n },\n },\n\n FragmentSpread: {\n enter(node) {\n // Keep track of referenced fragment spreads. This is used to\n // determine if top level fragment definitions should be removed.\n fragmentSpreadsInUse[node.name.value] = true;\n },\n },\n\n Directive: {\n enter(node) {\n // If a matching directive is found, remove it.\n if (getDirectiveMatcher(directives)(node)) {\n return null;\n }\n },\n },\n }),\n );\n\n // If we've removed fields with arguments, make sure the associated\n // variables are also removed from the rest of the document, as long as they\n // aren't being used elsewhere.\n if (\n modifiedDoc &&\n filterInPlace(variablesToRemove, v => !!v.name && !variablesInUse[v.name]).length\n ) {\n modifiedDoc = removeArgumentsFromDocument(variablesToRemove, modifiedDoc);\n }\n\n // If we've removed selection sets with fragment spreads, make sure the\n // associated fragment definitions are also removed from the rest of the\n // document, as long as they aren't being used elsewhere.\n if (\n modifiedDoc &&\n filterInPlace(fragmentSpreadsToRemove, fs => !!fs.name && !fragmentSpreadsInUse[fs.name])\n .length\n ) {\n modifiedDoc = removeFragmentSpreadFromDocument(\n fragmentSpreadsToRemove,\n modifiedDoc,\n );\n }\n\n return modifiedDoc;\n}\n\nexport const addTypenameToDocument = Object.assign(function <\n TNode extends ASTNode\n>(\n doc: TNode\n): TNode {\n return visit(doc, {\n SelectionSet: {\n enter(node, _key, parent) {\n // Don't add __typename to OperationDefinitions.\n if (\n parent &&\n (parent as OperationDefinitionNode).kind === 'OperationDefinition'\n ) {\n return;\n }\n\n // No changes if no selections.\n const { selections } = node;\n if (!selections) {\n return;\n }\n\n // If selections already have a __typename, or are part of an\n // introspection query, do nothing.\n const skip = selections.some(selection => {\n return (\n isField(selection) &&\n (selection.name.value === '__typename' ||\n selection.name.value.lastIndexOf('__', 0) === 0)\n );\n });\n if (skip) {\n return;\n }\n\n // If this SelectionSet is @export-ed as an input variable, it should\n // not have a __typename field (see issue #4691).\n const field = parent as FieldNode;\n if (\n isField(field) &&\n field.directives &&\n field.directives.some(d => d.name.value === 'export')\n ) {\n return;\n }\n\n // Create and return a new SelectionSet with a __typename Field.\n return {\n ...node,\n selections: [...selections, TYPENAME_FIELD],\n };\n },\n },\n });\n}, {\n added(field: FieldNode): boolean {\n return field === TYPENAME_FIELD;\n },\n});\n\nconst connectionRemoveConfig = {\n test: (directive: DirectiveNode) => {\n const willRemove = directive.name.value === 'connection';\n if (willRemove) {\n if (\n !directive.arguments ||\n !directive.arguments.some(arg => arg.name.value === 'key')\n ) {\n invariant.warn(\n 'Removing an @connection directive even though it does not have a key. ' +\n 'You may want to use the key parameter to specify a store key.',\n );\n }\n }\n\n return willRemove;\n },\n};\n\nexport function removeConnectionDirectiveFromDocument(doc: DocumentNode) {\n return removeDirectivesFromDocument(\n [connectionRemoveConfig],\n checkDocument(doc),\n );\n}\n\nfunction hasDirectivesInSelectionSet(\n directives: GetDirectiveConfig[],\n selectionSet: SelectionSetNode | undefined,\n nestedCheck = true,\n): boolean {\n return (\n !!selectionSet &&\n selectionSet.selections &&\n selectionSet.selections.some(selection =>\n hasDirectivesInSelection(directives, selection, nestedCheck),\n )\n );\n}\n\nfunction hasDirectivesInSelection(\n directives: GetDirectiveConfig[],\n selection: SelectionNode,\n nestedCheck = true,\n): boolean {\n if (!isField(selection)) {\n return true;\n }\n\n if (!selection.directives) {\n return false;\n }\n\n return (\n selection.directives.some(getDirectiveMatcher(directives)) ||\n (nestedCheck &&\n hasDirectivesInSelectionSet(\n directives,\n selection.selectionSet,\n nestedCheck,\n ))\n );\n}\n\nfunction getArgumentMatcher(config: RemoveArgumentsConfig[]) {\n return function argumentMatcher(argument: ArgumentNode) {\n return config.some(\n (aConfig: RemoveArgumentsConfig) =>\n argument.value &&\n argument.value.kind === 'Variable' &&\n argument.value.name &&\n (aConfig.name === argument.value.name.value ||\n (aConfig.test && aConfig.test(argument))),\n );\n };\n}\n\nexport function removeArgumentsFromDocument(\n config: RemoveArgumentsConfig[],\n doc: DocumentNode,\n): DocumentNode | null {\n const argMatcher = getArgumentMatcher(config);\n\n return nullIfDocIsEmpty(\n visit(doc, {\n OperationDefinition: {\n enter(node) {\n return {\n ...node,\n // Remove matching top level variables definitions.\n variableDefinitions: node.variableDefinitions ? node.variableDefinitions.filter(\n varDef =>\n !config.some(arg => arg.name === varDef.variable.name.value),\n ) : [],\n };\n },\n },\n\n Field: {\n enter(node) {\n // If `remove` is set to true for an argument, and an argument match\n // is found for a field, remove the field as well.\n const shouldRemoveField = config.some(argConfig => argConfig.remove);\n\n if (shouldRemoveField) {\n let argMatchCount = 0;\n if (node.arguments) {\n node.arguments.forEach(arg => {\n if (argMatcher(arg)) {\n argMatchCount += 1;\n }\n });\n }\n\n if (argMatchCount === 1) {\n return null;\n }\n }\n },\n },\n\n Argument: {\n enter(node) {\n // Remove all matching arguments.\n if (argMatcher(node)) {\n return null;\n }\n },\n },\n }),\n );\n}\n\nexport function removeFragmentSpreadFromDocument(\n config: RemoveFragmentSpreadConfig[],\n doc: DocumentNode,\n): DocumentNode | null {\n function enter(\n node: FragmentSpreadNode | FragmentDefinitionNode,\n ): null | void {\n if (config.some(def => def.name === node.name.value)) {\n return null;\n }\n }\n\n return nullIfDocIsEmpty(\n visit(doc, {\n FragmentSpread: { enter },\n FragmentDefinition: { enter },\n }),\n );\n}\n\nfunction getAllFragmentSpreadsFromSelectionSet(\n selectionSet: SelectionSetNode,\n): FragmentSpreadNode[] {\n const allFragments: FragmentSpreadNode[] = [];\n\n selectionSet.selections.forEach(selection => {\n if (\n (isField(selection) || isInlineFragment(selection)) &&\n selection.selectionSet\n ) {\n getAllFragmentSpreadsFromSelectionSet(selection.selectionSet).forEach(\n frag => allFragments.push(frag),\n );\n } else if (selection.kind === 'FragmentSpread') {\n allFragments.push(selection);\n }\n });\n\n return allFragments;\n}\n\n// If the incoming document is a query, return it as is. Otherwise, build a\n// new document containing a query operation based on the selection set\n// of the previous main operation.\nexport function buildQueryFromSelectionSet(\n document: DocumentNode,\n): DocumentNode {\n const definition = getMainDefinition(document);\n const definitionOperation = (definition).operation;\n\n if (definitionOperation === 'query') {\n // Already a query, so return the existing document.\n return document;\n }\n\n // Build a new query using the selection set of the main operation.\n const modifiedDoc = visit(document, {\n OperationDefinition: {\n enter(node) {\n return {\n ...node,\n operation: 'query',\n };\n },\n },\n });\n return modifiedDoc;\n}\n\n// Remove fields / selection sets that include an @client directive.\nexport function removeClientSetsFromDocument(\n document: DocumentNode,\n): DocumentNode | null {\n checkDocument(document);\n\n let modifiedDoc = removeDirectivesFromDocument(\n [\n {\n test: (directive: DirectiveNode) => directive.name.value === 'client',\n remove: true,\n },\n ],\n document,\n );\n\n // After a fragment definition has had its @client related document\n // sets removed, if the only field it has left is a __typename field,\n // remove the entire fragment operation to prevent it from being fired\n // on the server.\n if (modifiedDoc) {\n modifiedDoc = visit(modifiedDoc, {\n FragmentDefinition: {\n enter(node) {\n if (node.selectionSet) {\n const isTypenameOnly = node.selectionSet.selections.every(\n selection =>\n isField(selection) && selection.name.value === '__typename',\n );\n if (isTypenameOnly) {\n return null;\n }\n }\n },\n },\n });\n }\n\n return modifiedDoc;\n}\n","import { dep, OptimisticDependencyFunction } from \"optimism\";\nimport { Slot } from \"@wry/context\";\nimport { InMemoryCache } from \"./inMemoryCache\";\nimport { ApolloCache } from '../../core';\n\nexport interface ReactiveVar {\n (newValue?: T): T;\n onNextChange(listener: ReactiveListener): () => void;\n attachCache(cache: ApolloCache): this;\n forgetCache(cache: ApolloCache): boolean;\n}\n\nexport type ReactiveListener = (value: T) => any;\n\n// Contextual Slot that acquires its value when custom read functions are\n// called in Policies#readField.\nexport const cacheSlot = new Slot>();\n\nconst cacheInfoMap = new WeakMap, {\n vars: Set>;\n dep: OptimisticDependencyFunction>;\n}>();\n\nfunction getCacheInfo(cache: ApolloCache) {\n let info = cacheInfoMap.get(cache)!;\n if (!info) {\n cacheInfoMap.set(cache, info = {\n vars: new Set,\n dep: dep(),\n });\n }\n return info;\n}\n\nexport function forgetCache(cache: ApolloCache) {\n getCacheInfo(cache).vars.forEach(rv => rv.forgetCache(cache));\n}\n\n// Calling forgetCache(cache) serves to silence broadcasts and allows the\n// cache to be garbage collected. However, the varsByCache WeakMap\n// preserves the set of reactive variables that were previously associated\n// with this cache, which makes it possible to \"recall\" the cache at a\n// later time, by reattaching it to those variables. If the cache has been\n// garbage collected in the meantime, because it is no longer reachable,\n// you won't be able to call recallCache(cache), and the cache will\n// automatically disappear from the varsByCache WeakMap.\nexport function recallCache(cache: ApolloCache) {\n getCacheInfo(cache).vars.forEach(rv => rv.attachCache(cache));\n}\n\nexport function makeVar(value: T): ReactiveVar {\n const caches = new Set>();\n const listeners = new Set>();\n\n const rv: ReactiveVar = function (newValue) {\n if (arguments.length > 0) {\n if (value !== newValue) {\n value = newValue!;\n caches.forEach(cache => {\n // Invalidate any fields with custom read functions that\n // consumed this variable, so query results involving those\n // fields will be recomputed the next time we read them.\n getCacheInfo(cache).dep.dirty(rv);\n // Broadcast changes to any caches that have previously read\n // from this variable.\n broadcast(cache);\n });\n // Finally, notify any listeners added via rv.onNextChange.\n const oldListeners = Array.from(listeners);\n listeners.clear();\n oldListeners.forEach(listener => listener(value));\n }\n } else {\n // When reading from the variable, obtain the current cache from\n // context via cacheSlot. This isn't entirely foolproof, but it's\n // the same system that powers varDep.\n const cache = cacheSlot.getValue();\n if (cache) {\n attach(cache);\n getCacheInfo(cache).dep(rv);\n }\n }\n\n return value;\n };\n\n rv.onNextChange = listener => {\n listeners.add(listener);\n return () => {\n listeners.delete(listener);\n };\n };\n\n const attach = rv.attachCache = cache => {\n caches.add(cache);\n getCacheInfo(cache).vars.add(rv);\n return rv;\n };\n\n rv.forgetCache = cache => caches.delete(cache);\n\n return rv;\n}\n\ntype Broadcastable = ApolloCache & {\n // This method is protected in InMemoryCache, which we are ignoring, but\n // we still want some semblance of type safety when we call it.\n broadcastWatches?: InMemoryCache[\"broadcastWatches\"];\n};\n\nfunction broadcast(cache: Broadcastable) {\n if (cache.broadcastWatches) {\n cache.broadcastWatches();\n }\n}\n","/**\n * The current status of a query’s execution in our system.\n */\nexport enum NetworkStatus {\n /**\n * The query has never been run before and the query is now currently running. A query will still\n * have this network status even if a partial data result was returned from the cache, but a\n * query was dispatched anyway.\n */\n loading = 1,\n\n /**\n * If `setVariables` was called and a query was fired because of that then the network status\n * will be `setVariables` until the result of that query comes back.\n */\n setVariables = 2,\n\n /**\n * Indicates that `fetchMore` was called on this query and that the query created is currently in\n * flight.\n */\n fetchMore = 3,\n\n /**\n * Similar to the `setVariables` network status. It means that `refetch` was called on a query\n * and the refetch request is currently in flight.\n */\n refetch = 4,\n\n /**\n * Indicates that a polling query is currently in flight. So for example if you are polling a\n * query every 10 seconds then the network status will switch to `poll` every 10 seconds whenever\n * a poll request has been sent but not resolved.\n */\n poll = 6,\n\n /**\n * No request is in flight for this query, and no errors happened. Everything is OK.\n */\n ready = 7,\n\n /**\n * No request is in flight for this query, but one or more errors were detected.\n */\n error = 8,\n}\n\n/**\n * Returns true if there is currently a network request in flight according to a given network\n * status.\n */\nexport function isNetworkRequestInFlight(\n networkStatus?: NetworkStatus,\n): boolean {\n return networkStatus ? networkStatus < 7 : false;\n}\n","export function isNonEmptyArray(value?: ArrayLike): value is Array {\n return Array.isArray(value) && value.length > 0;\n}\n","import { maybe } from \"./maybe\";\n\ndeclare global {\n // Despite our attempts to reuse the React Native __DEV__ constant instead of\n // inventing something new and Apollo-specific, declaring a useful type for\n // __DEV__ unfortunately conflicts (TS2451) with the global declaration in\n // @types/react-native/index.d.ts.\n //\n // To hide that harmless conflict, we @ts-ignore this line, which should\n // continue to provide a type for __DEV__ elsewhere in the Apollo Client\n // codebase, even when @types/react-native is not in use.\n //\n // However, because TypeScript drops @ts-ignore comments when generating .d.ts\n // files (https://github.com/microsoft/TypeScript/issues/38628), we also\n // sanitize the dist/utilities/globals/global.d.ts file to avoid declaring\n // __DEV__ globally altogether when @apollo/client is installed in the\n // node_modules directory of an application.\n //\n // @ts-ignore\n const __DEV__: boolean | undefined;\n}\n\nexport default (\n maybe(() => globalThis) ||\n maybe(() => window) ||\n maybe(() => self) ||\n maybe(() => global) ||\n // We don't expect the Function constructor ever to be invoked at runtime, as\n // long as at least one of globalThis, window, self, or global is defined, so\n // we are under no obligation to make it easy for static analysis tools to\n // detect syntactic usage of the Function constructor. If you think you can\n // improve your static analysis to detect this obfuscation, think again. This\n // is an arms race you cannot win, at least not in JavaScript.\n maybe(function() { return maybe.constructor(\"return this\")() })\n) as typeof globalThis & {\n __DEV__: typeof __DEV__;\n};\n","import { invariant, InvariantError } from '../globals';\n\nimport {\n DocumentNode,\n FragmentDefinitionNode,\n InlineFragmentNode,\n SelectionNode,\n} from 'graphql';\n\n// TODO(brian): A hack until this issue is resolved (https://github.com/graphql/graphql-js/issues/3356)\ntype Kind = any;\ntype OperationTypeNode = any;\n/**\n * Returns a query document which adds a single query operation that only\n * spreads the target fragment inside of it.\n *\n * So for example a document of:\n *\n * ```graphql\n * fragment foo on Foo { a b c }\n * ```\n *\n * Turns into:\n *\n * ```graphql\n * { ...foo }\n *\n * fragment foo on Foo { a b c }\n * ```\n *\n * The target fragment will either be the only fragment in the document, or a\n * fragment specified by the provided `fragmentName`. If there is more than one\n * fragment, but a `fragmentName` was not defined then an error will be thrown.\n */\nexport function getFragmentQueryDocument(\n document: DocumentNode,\n fragmentName?: string,\n): DocumentNode {\n let actualFragmentName = fragmentName;\n\n // Build an array of all our fragment definitions that will be used for\n // validations. We also do some validations on the other definitions in the\n // document while building this list.\n const fragments: Array = [];\n document.definitions.forEach(definition => {\n // Throw an error if we encounter an operation definition because we will\n // define our own operation definition later on.\n if (definition.kind === 'OperationDefinition') {\n throw new InvariantError(\n `Found a ${definition.operation} operation${\n definition.name ? ` named '${definition.name.value}'` : ''\n }. ` +\n 'No operations are allowed when using a fragment as a query. Only fragments are allowed.',\n );\n }\n // Add our definition to the fragments array if it is a fragment\n // definition.\n if (definition.kind === 'FragmentDefinition') {\n fragments.push(definition);\n }\n });\n\n // If the user did not give us a fragment name then let us try to get a\n // name from a single fragment in the definition.\n if (typeof actualFragmentName === 'undefined') {\n invariant(\n fragments.length === 1,\n `Found ${\n fragments.length\n } fragments. \\`fragmentName\\` must be provided when there is not exactly 1 fragment.`,\n );\n actualFragmentName = fragments[0].name.value;\n }\n\n // Generate a query document with an operation that simply spreads the\n // fragment inside of it.\n const query: DocumentNode = {\n ...document,\n definitions: [\n {\n kind: 'OperationDefinition' as Kind,\n // OperationTypeNode is an enum\n operation: 'query' as OperationTypeNode,\n selectionSet: {\n kind: 'SelectionSet' as Kind,\n selections: [\n {\n kind: 'FragmentSpread' as Kind,\n name: {\n kind: 'Name' as Kind,\n value: actualFragmentName,\n },\n },\n ],\n },\n },\n ...document.definitions,\n ],\n };\n\n return query;\n}\n\n/**\n * This is an interface that describes a map from fragment names to fragment definitions.\n */\nexport interface FragmentMap {\n [fragmentName: string]: FragmentDefinitionNode;\n}\n\nexport type FragmentMapFunction =\n (fragmentName: string) => FragmentDefinitionNode | null;\n\n// Utility function that takes a list of fragment definitions and makes a hash out of them\n// that maps the name of the fragment to the fragment definition.\nexport function createFragmentMap(\n fragments: FragmentDefinitionNode[] = [],\n): FragmentMap {\n const symTable: FragmentMap = {};\n fragments.forEach(fragment => {\n symTable[fragment.name.value] = fragment;\n });\n return symTable;\n}\n\nexport function getFragmentFromSelection(\n selection: SelectionNode,\n fragmentMap?: FragmentMap | FragmentMapFunction,\n): InlineFragmentNode | FragmentDefinitionNode | null {\n switch (selection.kind) {\n case 'InlineFragment':\n return selection;\n case 'FragmentSpread': {\n const fragmentName = selection.name.value;\n if (typeof fragmentMap === \"function\") {\n return fragmentMap(fragmentName);\n }\n const fragment = fragmentMap && fragmentMap[fragmentName];\n invariant(fragment, `No fragment named ${fragmentName}`);\n return fragment || null;\n }\n default:\n return null;\n }\n}\n","import { TupleToIntersection } from './mergeDeep';\n\n/**\n * Merges the provided objects shallowly and removes\n * all properties with an `undefined` value\n */\nexport function compact(\n ...objects: TArgs\n): TupleToIntersection {\n const result = Object.create(null);\n\n objects.forEach(obj => {\n if (!obj) return;\n Object.keys(obj).forEach(key => {\n const value = (obj as any)[key];\n if (value !== void 0) {\n result[key] = value;\n }\n });\n });\n\n return result;\n}\n","import * as React from 'react';\nimport { ApolloClient } from '../../core';\nimport { canUseSymbol } from '../../utilities';\nimport type { RenderPromises } from '../ssr';\n\nexport interface ApolloContextValue {\n client?: ApolloClient;\n renderPromises?: RenderPromises;\n}\n\n// To make sure Apollo Client doesn't create more than one React context\n// (which can lead to problems like having an Apollo Client instance added\n// in one context, then attempting to retrieve it from another different\n// context), a single Apollo context is created and tracked in global state.\nconst contextKey = canUseSymbol\n ? Symbol.for('__APOLLO_CONTEXT__')\n : '__APOLLO_CONTEXT__';\n\nexport function getApolloContext(): React.Context {\n let context = (React.createContext as any)[contextKey] as React.Context;\n if (!context) {\n Object.defineProperty(React.createContext, contextKey, {\n value: context = React.createContext({}),\n enumerable: false,\n writable: false,\n configurable: true,\n });\n context.displayName = 'ApolloContext';\n }\n return context;\n}\n\nexport { getApolloContext as resetApolloContext }\n","export function isNonNullObject(obj: any): obj is Record {\n return obj !== null && typeof obj === 'object';\n}\n","import global from \"./global\";\nimport { maybe } from \"./maybe\";\n\n// To keep string-based find/replace minifiers from messing with __DEV__ inside\n// string literals or properties like global.__DEV__, we construct the \"__DEV__\"\n// string in a roundabout way that won't be altered by find/replace strategies.\nconst __ = \"__\";\nconst GLOBAL_KEY = [__, __].join(\"DEV\");\n\nfunction getDEV() {\n try {\n return Boolean(__DEV__);\n } catch {\n Object.defineProperty(global, GLOBAL_KEY, {\n // In a buildless browser environment, maybe(() => process.env.NODE_ENV)\n // evaluates as undefined, so __DEV__ becomes true by default, but can be\n // initialized to false instead by a script/module that runs earlier.\n value: maybe(() => process.env.NODE_ENV) !== \"production\",\n enumerable: false,\n configurable: true,\n writable: true,\n });\n // Using computed property access rather than global.__DEV__ here prevents\n // string-based find/replace strategies from munging this to global.false:\n return (global as any)[GLOBAL_KEY];\n }\n}\n\nexport default getDEV();\n","// The ordering of these imports is important, because it ensures the temporary\n// process.env.NODE_ENV polyfill is defined globally (if necessary) before we\n// import { Source } from 'graphql'. The instanceOf function that we really care\n// about (the one that uses process.env.NODE_ENV) is not exported from the\n// top-level graphql package, but graphql/language/source uses instanceOf, and\n// has relatively few dependencies, so importing it here should not increase\n// bundle sizes as much as other options.\nimport { remove } from 'ts-invariant/process';\nimport { Source } from 'graphql';\n\nexport function removeTemporaryGlobals() {\n // Using Source here here just to make sure it won't be tree-shaken away.\n return typeof Source === \"function\" ? remove() : remove();\n}\n","import { invariant, InvariantError } from \"ts-invariant\";\n\n// Just in case the graphql package switches from process.env.NODE_ENV to\n// __DEV__, make sure __DEV__ is polyfilled before importing graphql.\nimport DEV from \"./DEV\";\nexport { DEV }\nexport function checkDEV() {\n invariant(\"boolean\" === typeof DEV, DEV);\n}\n\n// Import graphql/jsutils/instanceOf safely, working around its unchecked usage\n// of process.env.NODE_ENV and https://github.com/graphql/graphql-js/pull/2894.\nimport { removeTemporaryGlobals } from \"./fix-graphql\";\n\n// Synchronously undo the global process.env.NODE_ENV polyfill that we created\n// temporarily while importing the offending graphql/jsutils/instanceOf module.\nremoveTemporaryGlobals();\n\nexport { maybe } from \"./maybe\";\nexport { default as global } from \"./global\";\nexport { invariant, InvariantError }\n\n// Ensure __DEV__ was properly initialized, and prevent tree-shaking bundlers\n// from mistakenly pruning the ./DEV module (see issue #8674).\ncheckDEV();\n","import '../utilities/globals';\n\nimport { GraphQLError } from 'graphql';\n\nimport { isNonEmptyArray } from '../utilities';\nimport { ServerParseError } from '../link/http';\nimport { ServerError } from '../link/utils';\n\nexport function isApolloError(err: Error): err is ApolloError {\n return err.hasOwnProperty('graphQLErrors');\n}\n\n// Sets the error message on this error according to the\n// the GraphQL and network errors that are present.\n// If the error message has already been set through the\n// constructor or otherwise, this function is a nop.\nconst generateErrorMessage = (err: ApolloError) => {\n let message = '';\n // If we have GraphQL errors present, add that to the error message.\n if (isNonEmptyArray(err.graphQLErrors) || isNonEmptyArray(err.clientErrors)) {\n const errors = ((err.graphQLErrors || []) as readonly Error[])\n .concat(err.clientErrors || []);\n errors.forEach((error: Error) => {\n const errorMessage = error\n ? error.message\n : 'Error message not found.';\n message += `${errorMessage}\\n`;\n });\n }\n\n if (err.networkError) {\n message += `${err.networkError.message}\\n`;\n }\n\n // strip newline from the end of the message\n message = message.replace(/\\n$/, '');\n return message;\n};\n\nexport type GraphQLErrors = ReadonlyArray;\n\nexport type NetworkError = Error | ServerParseError | ServerError | null;\n\nexport class ApolloError extends Error {\n public message: string;\n public graphQLErrors: GraphQLErrors;\n public clientErrors: ReadonlyArray;\n public networkError: Error | ServerParseError | ServerError | null;\n\n // An object that can be used to provide some additional information\n // about an error, e.g. specifying the type of error this is. Used\n // internally within Apollo Client.\n public extraInfo: any;\n\n // Constructs an instance of ApolloError given a GraphQLError\n // or a network error. Note that one of these has to be a valid\n // value or the constructed error will be meaningless.\n constructor({\n graphQLErrors,\n clientErrors,\n networkError,\n errorMessage,\n extraInfo,\n }: {\n graphQLErrors?: ReadonlyArray;\n clientErrors?: ReadonlyArray;\n networkError?: Error | ServerParseError | ServerError | null;\n errorMessage?: string;\n extraInfo?: any;\n }) {\n super(errorMessage);\n this.graphQLErrors = graphQLErrors || [];\n this.clientErrors = clientErrors || [];\n this.networkError = networkError || null;\n this.message = errorMessage || generateErrorMessage(this);\n this.extraInfo = extraInfo;\n\n // We're not using `Object.setPrototypeOf` here as it isn't fully\n // supported on Android (see issue #3236).\n (this as any).__proto__ = ApolloError.prototype;\n }\n}\n","import { InvariantError } from '../globals';\n\nimport {\n DirectiveNode,\n FieldNode,\n IntValueNode,\n FloatValueNode,\n StringValueNode,\n BooleanValueNode,\n ObjectValueNode,\n ListValueNode,\n EnumValueNode,\n NullValueNode,\n VariableNode,\n InlineFragmentNode,\n ValueNode,\n SelectionNode,\n NameNode,\n SelectionSetNode,\n DocumentNode,\n} from 'graphql';\n\nimport { isNonNullObject } from '../common/objects';\nimport { FragmentMap, getFragmentFromSelection } from './fragments';\n\nexport interface Reference {\n readonly __ref: string;\n}\n\nexport function makeReference(id: string): Reference {\n return { __ref: String(id) };\n}\n\nexport function isReference(obj: any): obj is Reference {\n return Boolean(obj && typeof obj === 'object' && typeof obj.__ref === 'string');\n}\n\nexport type StoreValue =\n | number\n | string\n | string[]\n | Reference\n | Reference[]\n | null\n | undefined\n | void\n | Object;\n\nexport interface StoreObject {\n __typename?: string;\n [storeFieldName: string]: StoreValue;\n}\n\nexport function isDocumentNode(value: any): value is DocumentNode {\n return (\n isNonNullObject(value) &&\n (value as DocumentNode).kind === \"Document\" &&\n Array.isArray((value as DocumentNode).definitions)\n );\n}\n\nfunction isStringValue(value: ValueNode): value is StringValueNode {\n return value.kind === 'StringValue';\n}\n\nfunction isBooleanValue(value: ValueNode): value is BooleanValueNode {\n return value.kind === 'BooleanValue';\n}\n\nfunction isIntValue(value: ValueNode): value is IntValueNode {\n return value.kind === 'IntValue';\n}\n\nfunction isFloatValue(value: ValueNode): value is FloatValueNode {\n return value.kind === 'FloatValue';\n}\n\nfunction isVariable(value: ValueNode): value is VariableNode {\n return value.kind === 'Variable';\n}\n\nfunction isObjectValue(value: ValueNode): value is ObjectValueNode {\n return value.kind === 'ObjectValue';\n}\n\nfunction isListValue(value: ValueNode): value is ListValueNode {\n return value.kind === 'ListValue';\n}\n\nfunction isEnumValue(value: ValueNode): value is EnumValueNode {\n return value.kind === 'EnumValue';\n}\n\nfunction isNullValue(value: ValueNode): value is NullValueNode {\n return value.kind === 'NullValue';\n}\n\nexport function valueToObjectRepresentation(\n argObj: any,\n name: NameNode,\n value: ValueNode,\n variables?: Object,\n) {\n if (isIntValue(value) || isFloatValue(value)) {\n argObj[name.value] = Number(value.value);\n } else if (isBooleanValue(value) || isStringValue(value)) {\n argObj[name.value] = value.value;\n } else if (isObjectValue(value)) {\n const nestedArgObj = {};\n value.fields.map(obj =>\n valueToObjectRepresentation(nestedArgObj, obj.name, obj.value, variables),\n );\n argObj[name.value] = nestedArgObj;\n } else if (isVariable(value)) {\n const variableValue = (variables || ({} as any))[value.name.value];\n argObj[name.value] = variableValue;\n } else if (isListValue(value)) {\n argObj[name.value] = value.values.map(listValue => {\n const nestedArgArrayObj = {};\n valueToObjectRepresentation(\n nestedArgArrayObj,\n name,\n listValue,\n variables,\n );\n return (nestedArgArrayObj as any)[name.value];\n });\n } else if (isEnumValue(value)) {\n argObj[name.value] = (value as EnumValueNode).value;\n } else if (isNullValue(value)) {\n argObj[name.value] = null;\n } else {\n throw new InvariantError(\n `The inline argument \"${name.value}\" of kind \"${(value as any).kind}\"` +\n 'is not supported. Use variables instead of inline arguments to ' +\n 'overcome this limitation.',\n );\n }\n}\n\nexport function storeKeyNameFromField(\n field: FieldNode,\n variables?: Object,\n): string {\n let directivesObj: any = null;\n if (field.directives) {\n directivesObj = {};\n field.directives.forEach(directive => {\n directivesObj[directive.name.value] = {};\n\n if (directive.arguments) {\n directive.arguments.forEach(({ name, value }) =>\n valueToObjectRepresentation(\n directivesObj[directive.name.value],\n name,\n value,\n variables,\n ),\n );\n }\n });\n }\n\n let argObj: any = null;\n if (field.arguments && field.arguments.length) {\n argObj = {};\n field.arguments.forEach(({ name, value }) =>\n valueToObjectRepresentation(argObj, name, value, variables),\n );\n }\n\n return getStoreKeyName(field.name.value, argObj, directivesObj);\n}\n\nexport type Directives = {\n [directiveName: string]: {\n [argName: string]: any;\n };\n};\n\nconst KNOWN_DIRECTIVES: string[] = [\n 'connection',\n 'include',\n 'skip',\n 'client',\n 'rest',\n 'export',\n];\n\nexport const getStoreKeyName = Object.assign(function (\n fieldName: string,\n args?: Record | null,\n directives?: Directives,\n): string {\n if (\n args &&\n directives &&\n directives['connection'] &&\n directives['connection']['key']\n ) {\n if (\n directives['connection']['filter'] &&\n (directives['connection']['filter'] as string[]).length > 0\n ) {\n const filterKeys = directives['connection']['filter']\n ? (directives['connection']['filter'] as string[])\n : [];\n filterKeys.sort();\n\n const filteredArgs = {} as { [key: string]: any };\n filterKeys.forEach(key => {\n filteredArgs[key] = args[key];\n });\n\n return `${directives['connection']['key']}(${stringify(\n filteredArgs,\n )})`;\n } else {\n return directives['connection']['key'];\n }\n }\n\n let completeFieldName: string = fieldName;\n\n if (args) {\n // We can't use `JSON.stringify` here since it's non-deterministic,\n // and can lead to different store key names being created even though\n // the `args` object used during creation has the same properties/values.\n const stringifiedArgs: string = stringify(args);\n completeFieldName += `(${stringifiedArgs})`;\n }\n\n if (directives) {\n Object.keys(directives).forEach(key => {\n if (KNOWN_DIRECTIVES.indexOf(key) !== -1) return;\n if (directives[key] && Object.keys(directives[key]).length) {\n completeFieldName += `@${key}(${stringify(directives[key])})`;\n } else {\n completeFieldName += `@${key}`;\n }\n });\n }\n\n return completeFieldName;\n}, {\n setStringify(s: typeof stringify) {\n const previous = stringify;\n stringify = s;\n return previous;\n },\n});\n\n// Default stable JSON.stringify implementation. Can be updated/replaced with\n// something better by calling getStoreKeyName.setStringify.\nlet stringify = function defaultStringify(value: any): string {\n return JSON.stringify(value, stringifyReplacer);\n};\n\nfunction stringifyReplacer(_key: string, value: any): any {\n if (isNonNullObject(value) && !Array.isArray(value)) {\n value = Object.keys(value).sort().reduce((copy, key) => {\n copy[key] = value[key];\n return copy;\n }, {} as Record);\n }\n return value;\n}\n\nexport function argumentsObjectFromField(\n field: FieldNode | DirectiveNode,\n variables?: Record,\n): Object | null {\n if (field.arguments && field.arguments.length) {\n const argObj: Object = {};\n field.arguments.forEach(({ name, value }) =>\n valueToObjectRepresentation(argObj, name, value, variables),\n );\n return argObj;\n }\n return null;\n}\n\nexport function resultKeyNameFromField(field: FieldNode): string {\n return field.alias ? field.alias.value : field.name.value;\n}\n\nexport function getTypenameFromResult(\n result: Record,\n selectionSet: SelectionSetNode,\n fragmentMap?: FragmentMap,\n): string | undefined {\n if (typeof result.__typename === 'string') {\n return result.__typename;\n }\n\n for (const selection of selectionSet.selections) {\n if (isField(selection)) {\n if (selection.name.value === '__typename') {\n return result[resultKeyNameFromField(selection)];\n }\n } else {\n const typename = getTypenameFromResult(\n result,\n getFragmentFromSelection(selection, fragmentMap)!.selectionSet,\n fragmentMap,\n );\n if (typeof typename === 'string') {\n return typename;\n }\n }\n }\n}\n\nexport function isField(selection: SelectionNode): selection is FieldNode {\n return selection.kind === 'Field';\n}\n\nexport function isInlineFragment(\n selection: SelectionNode,\n): selection is InlineFragmentNode {\n return selection.kind === 'InlineFragment';\n}\n\nexport type VariableValue = (node: VariableNode) => any;\n","import { invariant } from '../../utilities/globals';\n\nimport { print } from 'graphql';\nimport {\n DocumentNode,\n ExecutionResult,\n GraphQLError,\n} from 'graphql';\n\nimport { ApolloLink, Operation } from '../core';\nimport {\n Observable,\n Observer,\n ObservableSubscription,\n compact,\n isNonEmptyArray,\n} from '../../utilities';\nimport { NetworkError } from '../../errors';\nimport { ServerError } from '../utils';\n\nexport const VERSION = 1;\n\nexport interface ErrorResponse {\n graphQLErrors?: readonly GraphQLError[];\n networkError?: NetworkError;\n response?: ExecutionResult;\n operation: Operation;\n}\n\ntype SHA256Function = (...args: any[]) => string | PromiseLike;\ntype GenerateHashFunction = (document: DocumentNode) => string | PromiseLike;\n\nexport namespace PersistedQueryLink {\n interface BaseOptions {\n disable?: (error: ErrorResponse) => boolean;\n useGETForHashedQueries?: boolean;\n };\n\n interface SHA256Options extends BaseOptions {\n sha256: SHA256Function;\n generateHash?: never;\n };\n\n interface GenerateHashOptions extends BaseOptions {\n sha256?: never;\n generateHash: GenerateHashFunction;\n };\n\n export type Options = SHA256Options | GenerateHashOptions;\n}\n\nfunction collectErrorsByMessage(\n graphQLErrors: TError[] | readonly TError[] | undefined,\n): Record {\n const collected: Record = Object.create(null);\n if (isNonEmptyArray(graphQLErrors)) {\n graphQLErrors.forEach(error => collected[error.message] = error);\n }\n return collected;\n}\n\nconst defaultOptions = {\n disable: ({ graphQLErrors, operation }: ErrorResponse) => {\n const errorMessages = collectErrorsByMessage(graphQLErrors);\n\n // if the server doesn't support persisted queries, don't try anymore\n if (errorMessages.PersistedQueryNotSupported) {\n return true;\n }\n\n if (errorMessages.PersistedQueryNotFound) {\n return false;\n }\n\n const { response } = operation.getContext();\n // if the server responds with bad request\n // Apollo Server responds with 400 for GET and 500 for POST when no query is found\n if (\n response &&\n response.status &&\n (response.status === 400 || response.status === 500)\n ) {\n return true;\n }\n\n return false;\n },\n useGETForHashedQueries: false,\n};\n\nfunction operationDefinesMutation(operation: Operation) {\n return operation.query.definitions.some(\n d => d.kind === 'OperationDefinition' && d.operation === 'mutation');\n}\n\nconst { hasOwnProperty } = Object.prototype;\n\nconst hashesByQuery = new WeakMap<\n DocumentNode,\n Record>\n>();\n\nlet nextHashesChildKey = 0;\n\nexport const createPersistedQueryLink = (\n options: PersistedQueryLink.Options,\n) => {\n // Ensure a SHA-256 hash function is provided, if a custom hash\n // generation function is not provided. We don't supply a SHA-256 hash\n // function by default, to avoid forcing one as a dependency. Developers\n // should pick the most appropriate SHA-256 function (sync or async) for\n // their needs/environment, or provide a fully custom hash generation\n // function (via the `generateHash` option) if they want to handle\n // hashing with something other than SHA-256.\n invariant(\n options && (\n typeof options.sha256 === 'function' ||\n typeof options.generateHash === 'function'\n ),\n 'Missing/invalid \"sha256\" or \"generateHash\" function. Please ' +\n 'configure one using the \"createPersistedQueryLink(options)\" options ' +\n 'parameter.'\n );\n\n const {\n sha256,\n // If both a `sha256` and `generateHash` option are provided, the\n // `sha256` option will be ignored. Developers can configure and\n // use any hashing approach they want in a custom `generateHash`\n // function; they aren't limited to SHA-256.\n generateHash = (query: DocumentNode) =>\n Promise.resolve(sha256!(print(query))),\n disable,\n useGETForHashedQueries\n } = compact(defaultOptions, options);\n\n let supportsPersistedQueries = true;\n\n const hashesChildKey = 'forLink' + nextHashesChildKey++;\n\n const getHashPromise = (query: DocumentNode) =>\n new Promise(resolve => resolve(generateHash(query)));\n\n function getQueryHash(query: DocumentNode): Promise {\n if (!query || typeof query !== 'object') {\n // If the query is not an object, we won't be able to store its hash as\n // a property of query[hashesKey], so we let generateHash(query) decide\n // what to do with the bogus query.\n return getHashPromise(query);\n }\n let hashes = hashesByQuery.get(query)!;\n if (!hashes) hashesByQuery.set(query, hashes = Object.create(null));\n return hasOwnProperty.call(hashes, hashesChildKey)\n ? hashes[hashesChildKey]\n : hashes[hashesChildKey] = getHashPromise(query);\n }\n\n return new ApolloLink((operation, forward) => {\n invariant(\n forward,\n 'PersistedQueryLink cannot be the last link in the chain.'\n );\n\n const { query } = operation;\n\n return new Observable((observer: Observer) => {\n let subscription: ObservableSubscription;\n let retried = false;\n let originalFetchOptions: any;\n let setFetchOptions = false;\n const retry = (\n {\n response,\n networkError,\n }: { response?: ExecutionResult; networkError?: ServerError },\n cb: () => void,\n ) => {\n if (!retried && ((response && response.errors) || networkError)) {\n retried = true;\n\n const graphQLErrors: GraphQLError[] = [];\n\n const responseErrors = response && response.errors;\n if (isNonEmptyArray(responseErrors)) {\n graphQLErrors.push(...responseErrors);\n }\n\n // Network errors can return GraphQL errors on for example a 403\n const networkErrors =\n networkError &&\n networkError.result &&\n networkError.result.errors as GraphQLError[];\n if (isNonEmptyArray(networkErrors)) {\n graphQLErrors.push(...networkErrors);\n }\n\n const disablePayload = {\n response,\n networkError,\n operation,\n graphQLErrors: isNonEmptyArray(graphQLErrors) ? graphQLErrors : void 0,\n };\n\n // if the server doesn't support persisted queries, don't try anymore\n supportsPersistedQueries = !disable(disablePayload);\n\n // if its not found, we can try it again, otherwise just report the error\n if (\n collectErrorsByMessage(graphQLErrors).PersistedQueryNotFound ||\n !supportsPersistedQueries\n ) {\n // need to recall the link chain\n if (subscription) subscription.unsubscribe();\n // actually send the query this time\n operation.setContext({\n http: {\n includeQuery: true,\n includeExtensions: supportsPersistedQueries,\n },\n fetchOptions: {\n // Since we're including the full query, which may be\n // large, we should send it in the body of a POST request.\n // See issue #7456.\n method: 'POST',\n },\n });\n if (setFetchOptions) {\n operation.setContext({ fetchOptions: originalFetchOptions });\n }\n subscription = forward(operation).subscribe(handler);\n\n return;\n }\n }\n cb();\n };\n const handler = {\n next: (response: ExecutionResult) => {\n retry({ response }, () => observer.next!(response));\n },\n error: (networkError: ServerError) => {\n retry({ networkError }, () => observer.error!(networkError));\n },\n complete: observer.complete!.bind(observer),\n };\n\n // don't send the query the first time\n operation.setContext({\n http: {\n includeQuery: !supportsPersistedQueries,\n includeExtensions: supportsPersistedQueries,\n },\n });\n\n // If requested, set method to GET if there are no mutations. Remember the\n // original fetchOptions so we can restore them if we fall back to a\n // non-hashed request.\n if (\n useGETForHashedQueries &&\n supportsPersistedQueries &&\n !operationDefinesMutation(operation)\n ) {\n operation.setContext(\n ({ fetchOptions = {} }: { fetchOptions: Record }) => {\n originalFetchOptions = fetchOptions;\n return {\n fetchOptions: {\n ...fetchOptions,\n method: 'GET',\n },\n };\n },\n );\n setFetchOptions = true;\n }\n\n if (supportsPersistedQueries) {\n getQueryHash(query).then((sha256Hash) => {\n operation.extensions.persistedQuery = {\n version: VERSION,\n sha256Hash,\n };\n subscription = forward(operation).subscribe(handler);\n }).catch(observer.error!.bind(observer));;\n } else {\n subscription = forward(operation).subscribe(handler);\n }\n\n return () => {\n if (subscription) subscription.unsubscribe();\n };\n });\n });\n};\n","const prefixCounts = new Map();\n\n// These IDs won't be globally unique, but they will be unique within this\n// process, thanks to the counter, and unguessable thanks to the random suffix.\nexport function makeUniqueId(prefix: string) {\n const count = prefixCounts.get(prefix) || 1;\n prefixCounts.set(prefix, count + 1);\n return `${prefix}:${count}:${Math.random().toString(36).slice(2)}`;\n}\n","import { InvariantError } from '../../utilities/globals';\n\nexport type ClientParseError = InvariantError & {\n parseError: Error;\n};\n\nexport const serializeFetchParameter = (p: any, label: string) => {\n let serialized;\n try {\n serialized = JSON.stringify(p);\n } catch (e) {\n const parseError = new InvariantError(\n `Network request failed. ${label} is not serializable: ${e.message}`,\n ) as ClientParseError;\n parseError.parseError = e;\n throw parseError;\n }\n return serialized;\n};\n","/**\n * Original source:\n * https://github.com/kmalakoff/response-iterator/blob/master/src/iterators/nodeStream.ts\n */\n\nimport { Readable as NodeReadableStream } from \"stream\";\nimport { canUseAsyncIteratorSymbol } from \"../../../utilities\";\n\ninterface NodeStreamIterator {\n next(): Promise>;\n [Symbol.asyncIterator]?(): AsyncIterator;\n}\n\nexport default function nodeStreamIterator(\n stream: NodeReadableStream\n): AsyncIterableIterator {\n let cleanup: (() => void) | null = null;\n let error: Error | null = null;\n let done = false;\n const data: unknown[] = [];\n\n const waiting: [\n (\n value:\n | IteratorResult\n | PromiseLike>\n ) => void,\n (reason?: any) => void\n ][] = [];\n\n function onData(chunk: any) {\n if (error) return;\n if (waiting.length) {\n const shiftedArr = waiting.shift();\n if (Array.isArray(shiftedArr) && shiftedArr[0]) {\n return shiftedArr[0]({ value: chunk, done: false });\n }\n }\n data.push(chunk);\n }\n function onError(err: Error) {\n error = err;\n const all = waiting.slice();\n all.forEach(function (pair) {\n pair[1](err);\n });\n !cleanup || cleanup();\n }\n function onEnd() {\n done = true;\n const all = waiting.slice();\n all.forEach(function (pair) {\n pair[0]({ value: undefined, done: true });\n });\n !cleanup || cleanup();\n }\n\n cleanup = function () {\n cleanup = null;\n stream.removeListener(\"data\", onData);\n stream.removeListener(\"error\", onError);\n stream.removeListener(\"end\", onEnd);\n stream.removeListener(\"finish\", onEnd);\n stream.removeListener(\"close\", onEnd);\n };\n stream.on(\"data\", onData);\n stream.on(\"error\", onError);\n stream.on(\"end\", onEnd);\n stream.on(\"finish\", onEnd);\n stream.on(\"close\", onEnd);\n\n function getNext(): Promise> {\n return new Promise(function (resolve, reject) {\n if (error) return reject(error);\n if (data.length) return resolve({ value: data.shift() as T, done: false });\n if (done) return resolve({ value: undefined, done: true });\n waiting.push([resolve, reject]);\n });\n }\n\n const iterator: NodeStreamIterator = {\n next(): Promise> {\n return getNext();\n },\n };\n\n if (canUseAsyncIteratorSymbol) {\n iterator[Symbol.asyncIterator] = function (): AsyncIterator {\n return this;\n };\n }\n\n return iterator as AsyncIterableIterator;\n}\n","/**\n * Original source:\n * https://github.com/kmalakoff/response-iterator/blob/master/src/iterators/reader.ts\n */\n\nimport { canUseAsyncIteratorSymbol } from \"../../../utilities\";\n\ninterface ReaderIterator {\n next(): Promise>;\n [Symbol.asyncIterator]?(): AsyncIterator;\n}\n\nexport default function readerIterator(\n reader: ReadableStreamDefaultReader\n): AsyncIterableIterator {\n const iterator: ReaderIterator = {\n next() {\n return reader.read();\n },\n };\n\n if (canUseAsyncIteratorSymbol) {\n iterator[Symbol.asyncIterator] = function (): AsyncIterator {\n return this;\n };\n }\n\n return iterator as AsyncIterableIterator;\n}\n","/**\n * Original source:\n * https://github.com/kmalakoff/response-iterator/blob/master/src/index.ts\n */\n\nimport { Response as NodeResponse } from \"node-fetch\";\nimport {\n isAsyncIterableIterator,\n isBlob,\n isNodeResponse,\n isNodeReadableStream,\n isReadableStream,\n isStreamableBlob,\n} from \"../../utilities/common/responseIterator\";\n\nimport asyncIterator from \"./iterators/async\";\nimport nodeStreamIterator from \"./iterators/nodeStream\";\nimport promiseIterator from \"./iterators/promise\";\nimport readerIterator from \"./iterators/reader\";\n\nexport function responseIterator(\n response: Response | NodeResponse\n): AsyncIterableIterator {\n let body: unknown = response;\n\n if (isNodeResponse(response)) body = response.body;\n\n if (isAsyncIterableIterator(body)) return asyncIterator(body);\n\n if (isReadableStream(body)) return readerIterator(body.getReader());\n\n // this errors without casting to ReadableStream\n // because Blob.stream() returns a NodeJS ReadableStream\n if (isStreamableBlob(body)) {\n return readerIterator(\n (body.stream() as unknown as ReadableStream).getReader()\n );\n }\n\n if (isBlob(body)) return promiseIterator(body.arrayBuffer());\n\n if (isNodeReadableStream(body)) return nodeStreamIterator(body);\n\n throw new Error(\n \"Unknown body type for responseIterator. Please pass a streamable response.\"\n );\n}\n","import { Response as NodeResponse } from \"node-fetch\";\nimport { Readable as NodeReadableStream } from \"stream\";\nimport { canUseAsyncIteratorSymbol } from \"./canUse\";\n\nexport function isNodeResponse(value: any): value is NodeResponse {\n return !!(value as NodeResponse).body;\n}\n\nexport function isReadableStream(value: any): value is ReadableStream {\n return !!(value as ReadableStream).getReader;\n}\n\nexport function isAsyncIterableIterator(\n value: any\n): value is AsyncIterableIterator {\n return !!(\n canUseAsyncIteratorSymbol &&\n (value as AsyncIterableIterator)[Symbol.asyncIterator]\n );\n}\n\nexport function isStreamableBlob(value: any): value is Blob {\n return !!(value as Blob).stream;\n}\n\nexport function isBlob(value: any): value is Blob {\n return !!(value as Blob).arrayBuffer;\n}\n\nexport function isNodeReadableStream(value: any): value is NodeReadableStream {\n return !!(value as NodeReadableStream).pipe;\n}\n","/**\n * Original source:\n * https://github.com/kmalakoff/response-iterator/blob/master/src/iterators/async.ts\n */\n\nexport default function asyncIterator(\n source: AsyncIterableIterator\n): AsyncIterableIterator {\n const iterator = source[Symbol.asyncIterator]();\n return {\n next(): Promise> {\n return iterator.next();\n },\n [Symbol.asyncIterator](): AsyncIterableIterator {\n return this;\n },\n };\n}\n","/**\n * Original source:\n * https://github.com/kmalakoff/response-iterator/blob/master/src/iterators/promise.ts\n */\n\nimport { canUseAsyncIteratorSymbol } from \"../../../utilities\";\n\ninterface PromiseIterator {\n next(): Promise>;\n [Symbol.asyncIterator]?(): AsyncIterator;\n}\n\nexport default function promiseIterator(\n promise: Promise\n): AsyncIterableIterator {\n let resolved = false;\n\n const iterator: PromiseIterator = {\n next(): Promise> {\n if (resolved)\n return Promise.resolve({\n value: undefined,\n done: true,\n });\n resolved = true;\n return new Promise(function (resolve, reject) {\n promise\n .then(function (value) {\n resolve({ value: value as unknown as T, done: false });\n })\n .catch(reject);\n });\n },\n };\n\n if (canUseAsyncIteratorSymbol) {\n iterator[Symbol.asyncIterator] = function (): AsyncIterator {\n return this;\n };\n }\n\n return iterator as AsyncIterableIterator;\n}\n","export type ServerError = Error & {\n response: Response;\n result: Record;\n statusCode: number;\n};\n\nexport const throwServerError = (\n response: Response,\n result: any,\n message: string\n) => {\n const error = new Error(message) as ServerError;\n error.name = 'ServerError';\n error.response = response;\n error.statusCode = response.status;\n error.result = result;\n throw error;\n};\n","import { responseIterator } from \"./responseIterator\";\nimport { Operation } from \"../core\";\nimport { throwServerError } from \"../utils\";\nimport { Observer } from \"../../utilities\";\n\nconst { hasOwnProperty } = Object.prototype;\n\nexport type ServerParseError = Error & {\n response: Response;\n statusCode: number;\n bodyText: string;\n};\n\nexport async function readMultipartBody>(\n response: Response,\n observer: Observer\n) {\n if (TextDecoder === undefined) {\n throw new Error(\n \"TextDecoder must be defined in the environment: please import a polyfill.\"\n );\n }\n const decoder = new TextDecoder(\"utf-8\");\n const contentType = response.headers?.get('content-type');\n const delimiter = \"boundary=\";\n\n // parse boundary value and ignore any subsequent name/value pairs after ;\n // https://www.rfc-editor.org/rfc/rfc9110.html#name-parameters\n // e.g. multipart/mixed;boundary=\"graphql\";deferSpec=20220824\n // if no boundary is specified, default to -\n const boundaryVal = contentType?.includes(delimiter)\n ? contentType\n ?.substring(contentType?.indexOf(delimiter) + delimiter.length)\n .replace(/['\"]/g, \"\")\n .replace(/\\;(.*)/gm, \"\")\n .trim()\n : \"-\";\n\n let boundary = `--${boundaryVal}`;\n let buffer = \"\";\n const iterator = responseIterator(response);\n let running = true;\n\n while (running) {\n const { value, done } = await iterator.next();\n const chunk = typeof value === \"string\" ? value : decoder.decode(value);\n running = !done;\n buffer += chunk;\n let bi = buffer.indexOf(boundary);\n\n while (bi > -1) {\n let message: string;\n [message, buffer] = [\n buffer.slice(0, bi),\n buffer.slice(bi + boundary.length),\n ];\n if (message.trim()) {\n const i = message.indexOf(\"\\r\\n\\r\\n\");\n const headers = parseHeaders(message.slice(0, i));\n const contentType = headers[\"content-type\"];\n if (\n contentType &&\n contentType.toLowerCase().indexOf(\"application/json\") === -1\n ) {\n throw new Error(\"Unsupported patch content type: application/json is required.\");\n }\n const body = message.slice(i);\n\n try {\n const result = parseJsonBody(response, body.replace(\"\\r\\n\", \"\"));\n if (\n Object.keys(result).length > 1 ||\n \"data\" in result ||\n \"incremental\" in result ||\n \"errors\" in result\n ) {\n // for the last chunk with only `hasNext: false`,\n // we don't need to call observer.next as there is no data/errors\n observer.next?.(result);\n }\n } catch (err) {\n handleError(err, observer);\n }\n }\n bi = buffer.indexOf(boundary);\n }\n }\n observer.complete?.();\n}\n\nexport function parseHeaders(headerText: string): Record {\n const headersInit: Record = {};\n headerText.split(\"\\n\").forEach((line) => {\n const i = line.indexOf(\":\");\n if (i > -1) {\n // normalize headers to lowercase\n const name = line.slice(0, i).trim().toLowerCase();\n const value = line.slice(i + 1).trim();\n headersInit[name] = value;\n }\n });\n return headersInit;\n}\n\nexport function parseJsonBody(response: Response, bodyText: string): T {\n if (response.status >= 300) {\n // Network error\n const getResult = () => {\n try {\n return JSON.parse(bodyText);\n } catch (err) {\n return bodyText\n }\n }\n throwServerError(\n response,\n getResult(),\n `Response not successful: Received status code ${response.status}`,\n );\n }\n\n try {\n return JSON.parse(bodyText) as T;\n } catch (err) {\n const parseError = err as ServerParseError;\n parseError.name = \"ServerParseError\";\n parseError.response = response;\n parseError.statusCode = response.status;\n parseError.bodyText = bodyText;\n throw parseError;\n }\n}\n\nexport function handleError(err: any, observer: Observer) {\n if (err.name === \"AbortError\") return;\n // if it is a network error, BUT there is graphql result info fire\n // the next observer before calling error this gives apollo-client\n // (and react-apollo) the `graphqlErrors` and `networkErrors` to\n // pass to UI this should only happen if we *also* have data as\n // part of the response key per the spec\n if (err.result && err.result.errors && err.result.data) {\n // if we don't call next, the UI can only show networkError\n // because AC didn't get any graphqlErrors this is graphql\n // execution result info (i.e errors and possibly data) this is\n // because there is no formal spec how errors should translate to\n // http status codes. So an auth error (401) could have both data\n // from a public field, errors from a private field, and a status\n // of 401\n // {\n // user { // this will have errors\n // firstName\n // }\n // products { // this is public so will have data\n // cost\n // }\n // }\n //\n // the result of above *could* look like this:\n // {\n // data: { products: [{ cost: \"$10\" }] },\n // errors: [{\n // message: 'your session has timed out',\n // path: []\n // }]\n // }\n // status code of above would be a 401\n // in the UI you want to show data where you can, errors as data where you can\n // and use correct http status codes\n observer.next?.(err.result);\n }\n\n observer.error?.(err);\n}\n\nexport function readJsonBody>(\n response: Response,\n operation: Operation,\n observer: Observer\n) {\n parseAndCheckHttpResponse(operation)(response)\n .then((result) => {\n observer.next?.(result);\n observer.complete?.();\n })\n .catch((err) => handleError(err, observer));\n}\n\nexport function parseAndCheckHttpResponse(operations: Operation | Operation[]) {\n return (response: Response) =>\n response\n .text()\n .then((bodyText) => parseJsonBody(response, bodyText))\n .then((result: any) => {\n if (response.status >= 300) {\n // Network error\n throwServerError(\n response,\n result,\n `Response not successful: Received status code ${response.status}`\n );\n }\n if (\n !Array.isArray(result) &&\n !hasOwnProperty.call(result, \"data\") &&\n !hasOwnProperty.call(result, \"errors\")\n ) {\n // Data error\n throwServerError(\n response,\n result,\n `Server response was missing for query '${\n Array.isArray(operations)\n ? operations.map((op) => op.operationName)\n : operations.operationName\n }'.`\n );\n }\n return result;\n });\n}\n","import { InvariantError } from '../../utilities/globals';\n\nexport const checkFetcher = (fetcher: WindowOrWorkerGlobalScope['fetch'] | undefined) => {\n if (!fetcher && typeof fetch === 'undefined') {\n throw new InvariantError(`\n\"fetch\" has not been found globally and no fetcher has been \\\nconfigured. To fix this, install a fetch package (like \\\nhttps://www.npmjs.com/package/cross-fetch), instantiate the \\\nfetcher, and pass it into your HttpLink constructor. For example:\n\nimport fetch from 'cross-fetch';\nimport { ApolloClient, HttpLink } from '@apollo/client';\nconst client = new ApolloClient({\n link: new HttpLink({ uri: '/graphql', fetch })\n});\n `);\n }\n};\n","import { ASTNode, print } from 'graphql';\n\nimport { Operation } from '../core';\n\nexport interface Printer {\n (node: ASTNode, originalPrint: typeof print): string\n};\n\nexport interface UriFunction {\n (operation: Operation): string;\n}\n\nexport interface Body {\n query?: string;\n operationName?: string;\n variables?: Record;\n extensions?: Record;\n}\n\nexport interface HttpOptions {\n /**\n * The URI to use when fetching operations.\n *\n * Defaults to '/graphql'.\n */\n uri?: string | UriFunction;\n\n /**\n * Passes the extensions field to your graphql server.\n *\n * Defaults to false.\n */\n includeExtensions?: boolean;\n\n /**\n * A `fetch`-compatible API to use when making requests.\n */\n fetch?: WindowOrWorkerGlobalScope['fetch'];\n\n /**\n * An object representing values to be sent as headers on the request.\n */\n headers?: any;\n\n /**\n * If set to true, header names won't be automatically normalized to \n * lowercase. This allows for non-http-spec-compliant servers that might \n * expect capitalized header names.\n */\n preserveHeaderCase?: boolean;\n\n /**\n * The credentials policy you want to use for the fetch call.\n */\n credentials?: string;\n\n /**\n * Any overrides of the fetch options argument to pass to the fetch call.\n */\n fetchOptions?: any;\n\n /**\n * If set to true, use the HTTP GET method for query operations. Mutations\n * will still use the method specified in fetchOptions.method (which defaults\n * to POST).\n */\n useGETForQueries?: boolean;\n\n /**\n * If set to true, the default behavior of stripping unused variables\n * from the request will be disabled.\n *\n * Unused variables are likely to trigger server-side validation errors,\n * per https://spec.graphql.org/draft/#sec-All-Variables-Used, but this\n * includeUnusedVariables option can be useful if your server deviates\n * from the GraphQL specification by not strictly enforcing that rule.\n */\n includeUnusedVariables?: boolean;\n /**\n * A function to substitute for the default query print function. Can be\n * used to apply changes to the results of the print function.\n */\n print?: Printer;\n}\n\nexport interface HttpQueryOptions {\n includeQuery?: boolean;\n includeExtensions?: boolean;\n preserveHeaderCase?: boolean;\n}\n\nexport interface HttpConfig {\n http?: HttpQueryOptions;\n options?: any;\n headers?: any;\n credentials?: any;\n}\n\nconst defaultHttpOptions: HttpQueryOptions = {\n includeQuery: true,\n includeExtensions: false,\n preserveHeaderCase: false,\n};\n\nconst defaultHeaders = {\n // headers are case insensitive (https://stackoverflow.com/a/5259004)\n accept: '*/*',\n // The content-type header describes the type of the body of the request, and\n // so it typically only is sent with requests that actually have bodies. One\n // could imagine that Apollo Client would remove this header when constructing\n // a GET request (which has no body), but we historically have not done that.\n // This means that browsers will preflight all Apollo Client requests (even\n // GET requests). Apollo Server's CSRF prevention feature (introduced in\n // AS3.7) takes advantage of this fact and does not block requests with this\n // header. If you want to drop this header from GET requests, then you should\n // probably replace it with a `apollo-require-preflight` header, or servers\n // with CSRF prevention enabled might block your GET request. See\n // https://www.apollographql.com/docs/apollo-server/security/cors/#preventing-cross-site-request-forgery-csrf\n // for more details.\n 'content-type': 'application/json',\n};\n\nconst defaultOptions = {\n method: 'POST',\n};\n\nexport const fallbackHttpConfig = {\n http: defaultHttpOptions,\n headers: defaultHeaders,\n options: defaultOptions,\n};\n\nexport const defaultPrinter: Printer = (ast, printer) => printer(ast);\n\nexport function selectHttpOptionsAndBody(\n operation: Operation,\n fallbackConfig: HttpConfig,\n ...configs: Array\n) {\n configs.unshift(fallbackConfig);\n return selectHttpOptionsAndBodyInternal(\n operation,\n defaultPrinter,\n ...configs,\n );\n}\n\nexport function selectHttpOptionsAndBodyInternal(\n operation: Operation,\n printer: Printer,\n ...configs: HttpConfig[]\n) {\n let options = {} as HttpConfig & Record;\n let http = {} as HttpQueryOptions;\n\n configs.forEach(config => {\n options = {\n ...options,\n ...config.options,\n headers: {\n ...options.headers,\n ...config.headers,\n }\n };\n\n if (config.credentials) {\n options.credentials = config.credentials;\n }\n\n http = {\n ...http,\n ...config.http,\n };\n });\n\n options.headers = removeDuplicateHeaders(options.headers, http.preserveHeaderCase);\n\n //The body depends on the http options\n const { operationName, extensions, variables, query } = operation;\n const body: Body = { operationName, variables };\n\n if (http.includeExtensions) (body as any).extensions = extensions;\n\n // not sending the query (i.e persisted queries)\n if (http.includeQuery) (body as any).query = printer(query, print);\n\n return {\n options,\n body,\n };\n};\n\n// Remove potential duplicate header names, preserving last (by insertion order).\n// This is done to prevent unintentionally duplicating a header instead of \n// overwriting it (See #8447 and #8449).\nfunction removeDuplicateHeaders(\n headers: Record,\n preserveHeaderCase: boolean | undefined\n): typeof headers {\n\n // If we're not preserving the case, just remove duplicates w/ normalization.\n if (!preserveHeaderCase) {\n const normalizedHeaders = Object.create(null);\n Object.keys(Object(headers)).forEach(name => {\n normalizedHeaders[name.toLowerCase()] = headers[name];\n });\n return normalizedHeaders; \n }\n\n // If we are preserving the case, remove duplicates w/ normalization,\n // preserving the original name.\n // This allows for non-http-spec-compliant servers that expect intentionally \n // capitalized header names (See #6741).\n const headerData = Object.create(null);\n Object.keys(Object(headers)).forEach(name => {\n headerData[name.toLowerCase()] = { originalName: name, value: headers[name] }\n });\n\n const normalizedHeaders = Object.create(null);\n Object.keys(headerData).forEach(name => {\n normalizedHeaders[headerData[name].originalName] = headerData[name].value;\n });\n return normalizedHeaders;\n}\n","import { Observable } from '../../utilities';\n\nexport function fromError(errorValue: any): Observable {\n return new Observable(observer => {\n observer.error(errorValue);\n });\n}\n","import '../../utilities/globals';\n\nimport { visit, DefinitionNode, VariableDefinitionNode } from 'graphql';\n\nimport { ApolloLink } from '../core';\nimport { Observable, hasDirectives } from '../../utilities';\nimport { serializeFetchParameter } from './serializeFetchParameter';\nimport { selectURI } from './selectURI';\nimport {\n handleError,\n readMultipartBody,\n readJsonBody\n} from './parseAndCheckHttpResponse';\nimport { checkFetcher } from './checkFetcher';\nimport {\n selectHttpOptionsAndBodyInternal,\n defaultPrinter,\n fallbackHttpConfig,\n HttpOptions\n} from './selectHttpOptionsAndBody';\nimport { createSignalIfSupported } from './createSignalIfSupported';\nimport { rewriteURIForGET } from './rewriteURIForGET';\nimport { fromError } from '../utils';\nimport { maybe } from '../../utilities';\n\nconst backupFetch = maybe(() => fetch);\n\nexport const createHttpLink = (linkOptions: HttpOptions = {}) => {\n let {\n uri = '/graphql',\n // use default global fetch if nothing passed in\n fetch: preferredFetch,\n print = defaultPrinter,\n includeExtensions,\n preserveHeaderCase,\n useGETForQueries,\n includeUnusedVariables = false,\n ...requestOptions\n } = linkOptions;\n\n if (__DEV__) {\n // Make sure at least one of preferredFetch, window.fetch, or backupFetch is\n // defined, so requests won't fail at runtime.\n checkFetcher(preferredFetch || backupFetch);\n }\n\n const linkConfig = {\n http: { includeExtensions, preserveHeaderCase },\n options: requestOptions.fetchOptions,\n credentials: requestOptions.credentials,\n headers: requestOptions.headers,\n };\n\n return new ApolloLink(operation => {\n let chosenURI = selectURI(operation, uri);\n\n const context = operation.getContext();\n\n // `apollographql-client-*` headers are automatically set if a\n // `clientAwareness` object is found in the context. These headers are\n // set first, followed by the rest of the headers pulled from\n // `context.headers`. If desired, `apollographql-client-*` headers set by\n // the `clientAwareness` object can be overridden by\n // `apollographql-client-*` headers set in `context.headers`.\n const clientAwarenessHeaders: {\n 'apollographql-client-name'?: string;\n 'apollographql-client-version'?: string;\n } = {};\n\n if (context.clientAwareness) {\n const { name, version } = context.clientAwareness;\n if (name) {\n clientAwarenessHeaders['apollographql-client-name'] = name;\n }\n if (version) {\n clientAwarenessHeaders['apollographql-client-version'] = version;\n }\n }\n\n const contextHeaders = { ...clientAwarenessHeaders, ...context.headers };\n\n const contextConfig = {\n http: context.http,\n options: context.fetchOptions,\n credentials: context.credentials,\n headers: contextHeaders,\n };\n\n //uses fallback, link, and then context to build options\n const { options, body } = selectHttpOptionsAndBodyInternal(\n operation,\n print,\n fallbackHttpConfig,\n linkConfig,\n contextConfig,\n );\n\n if (body.variables && !includeUnusedVariables) {\n const unusedNames = new Set(Object.keys(body.variables));\n visit(operation.query, {\n Variable(node, _key, parent) {\n // A variable type definition at the top level of a query is not\n // enough to silence server-side errors about the variable being\n // unused, so variable definitions do not count as usage.\n // https://spec.graphql.org/draft/#sec-All-Variables-Used\n if (parent && (parent as VariableDefinitionNode).kind !== 'VariableDefinition') {\n unusedNames.delete(node.name.value);\n }\n },\n });\n if (unusedNames.size) {\n // Make a shallow copy of body.variables (with keys in the same\n // order) and then delete unused variables from the copy.\n body.variables = { ...body.variables };\n unusedNames.forEach(name => {\n delete body.variables![name];\n });\n }\n }\n\n let controller: any;\n if (!(options as any).signal) {\n const { controller: _controller, signal } = createSignalIfSupported();\n controller = _controller;\n if (controller) (options as any).signal = signal;\n }\n\n // If requested, set method to GET if there are no mutations.\n const definitionIsMutation = (d: DefinitionNode) => {\n return d.kind === 'OperationDefinition' && d.operation === 'mutation';\n };\n if (\n useGETForQueries &&\n !operation.query.definitions.some(definitionIsMutation)\n ) {\n options.method = 'GET';\n }\n\n // does not match custom directives beginning with @defer\n if (hasDirectives(['defer'], operation.query)) {\n options.headers.accept = \"multipart/mixed; deferSpec=20220824, application/json\";\n }\n\n if (options.method === 'GET') {\n const { newURI, parseError } = rewriteURIForGET(chosenURI, body);\n if (parseError) {\n return fromError(parseError);\n }\n chosenURI = newURI;\n } else {\n try {\n (options as any).body = serializeFetchParameter(body, 'Payload');\n } catch (parseError) {\n return fromError(parseError);\n }\n }\n\n return new Observable(observer => {\n // Prefer linkOptions.fetch (preferredFetch) if provided, and otherwise\n // fall back to the *current* global window.fetch function (see issue\n // #7832), or (if all else fails) the backupFetch function we saved when\n // this module was first evaluated. This last option protects against the\n // removal of window.fetch, which is unlikely but not impossible.\n const currentFetch = preferredFetch || maybe(() => fetch) || backupFetch;\n\n currentFetch!(chosenURI, options)\n .then(response => {\n operation.setContext({ response });\n const ctype = response.headers?.get('content-type');\n\n if (ctype !== null && /^multipart\\/mixed/i.test(ctype)) {\n return readMultipartBody(response, observer);\n } else {\n return readJsonBody(response, operation, observer);\n }\n })\n .catch(err => handleError(err, observer));\n\n return () => {\n // XXX support canceling this request\n // https://developers.google.com/web/updates/2017/09/abortable-fetch\n if (controller) controller.abort();\n };\n });\n });\n};\n","import { Operation } from '../core';\n\nexport const selectURI = (\n operation: Operation,\n fallbackURI?: string | ((operation: Operation) => string),\n) => {\n const context = operation.getContext();\n const contextURI = context.uri;\n\n if (contextURI) {\n return contextURI;\n } else if (typeof fallbackURI === 'function') {\n return fallbackURI(operation);\n } else {\n return (fallbackURI as string) || '/graphql';\n }\n};\n","export const createSignalIfSupported = () => {\n if (typeof AbortController === 'undefined')\n return { controller: false, signal: false };\n\n const controller = new AbortController();\n const signal = controller.signal;\n return { controller, signal };\n};\n","import { serializeFetchParameter } from './serializeFetchParameter';\nimport { Body } from './selectHttpOptionsAndBody';\n\n// For GET operations, returns the given URI rewritten with parameters, or a\n// parse error.\nexport function rewriteURIForGET(chosenURI: string, body: Body) {\n // Implement the standard HTTP GET serialization, plus 'extensions'. Note\n // the extra level of JSON serialization!\n const queryParams: string[] = [];\n const addQueryParam = (key: string, value: string) => {\n queryParams.push(`${key}=${encodeURIComponent(value)}`);\n };\n\n if ('query' in body) {\n addQueryParam('query', body.query!);\n }\n if (body.operationName) {\n addQueryParam('operationName', body.operationName);\n }\n if (body.variables) {\n let serializedVariables;\n try {\n serializedVariables = serializeFetchParameter(\n body.variables,\n 'Variables map',\n );\n } catch (parseError) {\n return { parseError };\n }\n addQueryParam('variables', serializedVariables);\n }\n if (body.extensions) {\n let serializedExtensions;\n try {\n serializedExtensions = serializeFetchParameter(\n body.extensions,\n 'Extensions map',\n );\n } catch (parseError) {\n return { parseError };\n }\n addQueryParam('extensions', serializedExtensions);\n }\n\n // Reconstruct the URI with added query params.\n // XXX This assumes that the URI is well-formed and that it doesn't\n // already contain any of these query params. We could instead use the\n // URL API and take a polyfill (whatwg-url@6) for older browsers that\n // don't support URLSearchParams. Note that some browsers (and\n // versions of whatwg-url) support URL but not URLSearchParams!\n let fragment = '',\n preFragment = chosenURI;\n const fragmentStart = chosenURI.indexOf('#');\n if (fragmentStart !== -1) {\n fragment = chosenURI.substr(fragmentStart);\n preFragment = chosenURI.substr(0, fragmentStart);\n }\n const queryParamsPrefix = preFragment.indexOf('?') === -1 ? '?' : '&';\n const newURI =\n preFragment + queryParamsPrefix + queryParams.join('&') + fragment;\n return { newURI };\n}\n","import { ApolloLink, RequestHandler } from '../core';\nimport { HttpOptions } from './selectHttpOptionsAndBody';\nimport { createHttpLink } from './createHttpLink';\n\nexport class HttpLink extends ApolloLink {\n public requester: RequestHandler;\n constructor(public options: HttpOptions = {}) {\n super(createHttpLink(options).request);\n }\n}\n","import { ApolloLink } from './ApolloLink';\n\nexport const execute = ApolloLink.execute;\n","import { Observable, Observer } from \"./Observable\";\n\n// Like Observable.prototype.map, except that the mapping function can\n// optionally return a Promise (or be async).\nexport function asyncMap(\n observable: Observable,\n mapFn: (value: V) => R | PromiseLike,\n catchFn?: (error: any) => R | PromiseLike,\n): Observable {\n return new Observable(observer => {\n const { next, error, complete } = observer;\n let activeCallbackCount = 0;\n let completed = false;\n let promiseQueue = {\n // Normally we would initialize promiseQueue to Promise.resolve(), but\n // in this case, for backwards compatibility, we need to be careful to\n // invoke the first callback synchronously.\n then(callback: () => any) {\n return new Promise(resolve => resolve(callback()));\n },\n } as Promise;\n\n function makeCallback(\n examiner: typeof mapFn | typeof catchFn,\n delegate: typeof next | typeof error,\n ): (arg: any) => void {\n if (examiner) {\n return arg => {\n ++activeCallbackCount;\n const both = () => examiner(arg);\n promiseQueue = promiseQueue.then(both, both).then(\n result => {\n --activeCallbackCount;\n next && next.call(observer, result);\n if (completed) {\n handler.complete!();\n }\n },\n error => {\n --activeCallbackCount;\n throw error;\n },\n ).catch(caught => {\n error && error.call(observer, caught);\n });\n };\n } else {\n return arg => delegate && delegate.call(observer, arg);\n }\n }\n\n const handler: Observer = {\n next: makeCallback(mapFn, next),\n error: makeCallback(catchFn, error),\n complete() {\n completed = true;\n if (!activeCallbackCount) {\n complete && complete.call(observer);\n }\n },\n };\n\n const sub = observable.subscribe(handler);\n return () => sub.unsubscribe();\n });\n}\n","import { ExecutionResult } from 'graphql';\n\nexport function graphQLResultHasError(result: ExecutionResult): boolean {\n return (result.errors && result.errors.length > 0) || false;\n}\n","import { Observer } from \"./Observable\";\n\nexport function iterateObserversSafely(\n observers: Set>,\n method: keyof Observer,\n argument?: A,\n) {\n // In case observers is modified during iteration, we need to commit to the\n // original elements, which also provides an opportunity to filter them down\n // to just the observers with the given method.\n const observersWithMethod: Observer[] = [];\n observers.forEach(obs => obs[method] && observersWithMethod.push(obs));\n observersWithMethod.forEach(obs => (obs as any)[method](argument));\n}\n","import { Observable } from \"./Observable\";\nimport { canUseSymbol } from \"../common/canUse\";\n\n// Generic implementations of Observable.prototype methods like map and\n// filter need to know how to create a new Observable from an Observable\n// subclass (like Concast or ObservableQuery). Those methods assume\n// (perhaps unwisely?) that they can call the subtype's constructor with a\n// Subscriber function, even though the subclass constructor might expect\n// different parameters. Defining this static Symbol.species property on\n// the subclass is a hint to generic Observable code to use the default\n// constructor instead of trying to do `new Subclass(observer => ...)`.\nexport function fixObservableSubclass<\n S extends new (...args: any[]) => Observable,\n>(subclass: S): S {\n function set(key: symbol | string) {\n // Object.defineProperty is necessary because the Symbol.species\n // property is a getter by default in modern JS environments, so we\n // can't assign to it with a normal assignment expression.\n Object.defineProperty(subclass, key, { value: Observable });\n }\n if (canUseSymbol && Symbol.species) {\n set(Symbol.species);\n }\n // The \"@@species\" string is used as a fake Symbol.species value in some\n // polyfill systems (including the SymbolSpecies variable used by\n // zen-observable), so we should set it as well, to be safe.\n set(\"@@species\");\n return subclass;\n}\n","import { Observable, Observer, ObservableSubscription, Subscriber } from \"./Observable\";\nimport { iterateObserversSafely } from \"./iteration\";\nimport { fixObservableSubclass } from \"./subclassing\";\n\ntype MaybeAsync = T | PromiseLike;\n\nfunction isPromiseLike(value: MaybeAsync): value is PromiseLike {\n return value && typeof (value as any).then === \"function\";\n}\n\n// Any individual Source can be an Observable or a promise for one.\ntype Source = MaybeAsync>;\n\nexport type ConcastSourcesIterable = Iterable>;\nexport type ConcastSourcesArray = Array>;\n\n// A Concast observable concatenates the given sources into a single\n// non-overlapping sequence of Ts, automatically unwrapping any promises,\n// and broadcasts the T elements of that sequence to any number of\n// subscribers, all without creating a bunch of intermediary Observable\n// wrapper objects.\n//\n// Even though any number of observers can subscribe to the Concast, each\n// source observable is guaranteed to receive at most one subscribe call,\n// and the results are multicast to all observers.\n//\n// In addition to broadcasting every next/error message to this.observers,\n// the Concast stores the most recent message using this.latest, so any\n// new observers can immediately receive the latest message, even if it\n// was originally delivered in the past. This behavior means we can assume\n// every active observer in this.observers has received the same most\n// recent message.\n//\n// With the exception of this.latest replay, a Concast is a \"hot\"\n// observable in the sense that it does not replay past results from the\n// beginning of time for each new observer.\n//\n// Could we have used some existing RxJS class instead? Concast is\n// similar to a BehaviorSubject, because it is multicast and redelivers\n// the latest next/error message to new subscribers. Unlike Subject,\n// Concast does not expose an Observer interface (this.handlers is\n// intentionally private), since Concast gets its inputs from the\n// concatenated sources. If we ever switch to RxJS, there may be some\n// value in reusing their code, but for now we use zen-observable, which\n// does not contain any Subject implementations.\nexport class Concast extends Observable {\n // Active observers receiving broadcast messages. Thanks to this.latest,\n // we can assume all observers in this Set have received the same most\n // recent message, though possibly at different times in the past.\n private observers = new Set>();\n\n // This property starts off undefined to indicate the initial\n // subscription has not yet begun, then points to each source\n // subscription in turn, and finally becomes null after the sources have\n // been exhausted. After that, it stays null.\n private sub?: ObservableSubscription | null;\n\n // Not only can the individual elements of the iterable be promises, but\n // also the iterable itself can be wrapped in a promise.\n constructor(sources: MaybeAsync> | Subscriber) {\n super(observer => {\n this.addObserver(observer);\n return () => this.removeObserver(observer);\n });\n\n // Suppress rejection warnings for this.promise, since it's perfectly\n // acceptable to pay no attention to this.promise if you're consuming\n // the results through the normal observable API.\n this.promise.catch(_ => {});\n\n // If someone accidentally tries to create a Concast using a subscriber\n // function, recover by creating an Observable from that subscriber and\n // using it as the source.\n if (typeof sources === \"function\") {\n sources = [new Observable(sources)];\n }\n\n if (isPromiseLike(sources)) {\n sources.then(\n iterable => this.start(iterable),\n this.handlers.error,\n );\n } else {\n this.start(sources);\n }\n }\n\n // A consumable array of source observables, incrementally consumed\n // each time this.handlers.complete is called.\n private sources: Source[];\n\n private start(sources: ConcastSourcesIterable) {\n if (this.sub !== void 0) return;\n\n // In practice, sources is most often simply an Array of observables.\n // TODO Consider using sources[Symbol.iterator]() to take advantage\n // of the laziness of non-Array iterables.\n this.sources = Array.from(sources);\n\n // Calling this.handlers.complete() kicks off consumption of the first\n // source observable. It's tempting to do this step lazily in\n // addObserver, but this.promise can be accessed without calling\n // addObserver, so consumption needs to begin eagerly.\n this.handlers.complete();\n }\n\n private deliverLastMessage(observer: Observer) {\n if (this.latest) {\n const nextOrError = this.latest[0];\n const method = observer[nextOrError];\n if (method) {\n method.call(observer, this.latest[1]);\n }\n // If the subscription is already closed, and the last message was\n // a 'next' message, simulate delivery of the final 'complete'\n // message again.\n if (this.sub === null &&\n nextOrError === \"next\" &&\n observer.complete) {\n observer.complete();\n }\n }\n }\n\n public addObserver(observer: Observer) {\n if (!this.observers.has(observer)) {\n // Immediately deliver the most recent message, so we can always\n // be sure all observers have the latest information.\n this.deliverLastMessage(observer);\n this.observers.add(observer);\n }\n }\n\n public removeObserver(observer: Observer) {\n if (\n this.observers.delete(observer) &&\n this.observers.size < 1\n ) {\n // In case there are still any listeners in this.nextResultListeners, and\n // no error or completion has been broadcast yet, make sure those\n // observers have a chance to run and then remove themselves from\n // this.observers.\n this.handlers.complete();\n }\n }\n\n // Any Concast object can be trivially converted to a Promise, without\n // having to create a new wrapper Observable. This promise provides an\n // easy way to observe the final state of the Concast.\n private resolve: (result?: T | PromiseLike) => void;\n private reject: (reason: any) => void;\n public readonly promise = new Promise((resolve, reject) => {\n this.resolve = resolve;\n this.reject = reject;\n });\n\n // Name and argument of the most recently invoked observer method, used\n // to deliver latest results immediately to new observers.\n private latest?: [\"next\", T] | [\"error\", any];\n\n // Bound handler functions that can be reused for every internal\n // subscription.\n private handlers = {\n next: (result: T) => {\n if (this.sub !== null) {\n this.latest = [\"next\", result];\n this.notify(\"next\", result);\n iterateObserversSafely(this.observers, \"next\", result);\n }\n },\n\n error: (error: any) => {\n const { sub } = this;\n if (sub !== null) {\n // Delay unsubscribing from the underlying subscription slightly,\n // so that immediately subscribing another observer can keep the\n // subscription active.\n if (sub) setTimeout(() => sub.unsubscribe());\n this.sub = null;\n this.latest = [\"error\", error];\n this.reject(error);\n this.notify(\"error\", error);\n iterateObserversSafely(this.observers, \"error\", error);\n }\n },\n\n complete: () => {\n const { sub } = this;\n if (sub !== null) {\n const value = this.sources.shift();\n if (!value) {\n if (sub) setTimeout(() => sub.unsubscribe());\n this.sub = null;\n if (this.latest &&\n this.latest[0] === \"next\") {\n this.resolve(this.latest[1]);\n } else {\n this.resolve();\n }\n this.notify(\"complete\");\n // We do not store this.latest = [\"complete\"], because doing so\n // discards useful information about the previous next (or\n // error) message. Instead, if new observers subscribe after\n // this Concast has completed, they will receive the final\n // 'next' message (unless there was an error) immediately\n // followed by a 'complete' message (see addObserver).\n iterateObserversSafely(this.observers, \"complete\");\n } else if (isPromiseLike(value)) {\n value.then(obs => this.sub = obs.subscribe(this.handlers));\n } else {\n this.sub = value.subscribe(this.handlers);\n }\n }\n },\n };\n\n private nextResultListeners = new Set();\n\n private notify(\n method: Parameters[0],\n arg?: Parameters[1],\n ) {\n const { nextResultListeners } = this;\n if (nextResultListeners.size) {\n // Replacing this.nextResultListeners first ensures it does not grow while\n // we are iterating over it, potentially leading to infinite loops.\n this.nextResultListeners = new Set;\n nextResultListeners.forEach(listener => listener(method, arg));\n }\n }\n\n // We need a way to run callbacks just *before* the next result (or error or\n // completion) is delivered by this Concast, so we can be sure any code that\n // runs as a result of delivering that result/error observes the effects of\n // running the callback(s). It was tempting to reuse the Observer type instead\n // of introducing NextResultListener, but that messes with the sizing and\n // maintenance of this.observers, and ends up being more code overall.\n beforeNext(callback: NextResultListener) {\n let called = false;\n this.nextResultListeners.add((method, arg) => {\n if (!called) {\n called = true;\n callback(method, arg);\n }\n });\n }\n\n // A public way to abort observation and broadcast.\n public cancel = (reason: any) => {\n this.reject(reason);\n this.sources = [];\n this.handlers.complete();\n }\n}\n\ntype NextResultListener = (\n method: \"next\" | \"error\" | \"complete\",\n arg?: any,\n) => any;\n\n// Necessary because the Concast constructor has a different signature\n// than the Observable constructor.\nfixObservableSubclass(Concast);\n","import { invariant } from '../utilities/globals';\nimport { DocumentNode } from 'graphql';\nimport { equal } from '@wry/equality';\n\nimport { NetworkStatus, isNetworkRequestInFlight } from './networkStatus';\nimport {\n Concast,\n cloneDeep,\n compact,\n getOperationDefinition,\n Observable,\n Observer,\n ObservableSubscription,\n iterateObserversSafely,\n isNonEmptyArray,\n fixObservableSubclass,\n getQueryDefinition,\n} from '../utilities';\nimport { ApolloError } from '../errors';\nimport { QueryManager } from './QueryManager';\nimport {\n ApolloQueryResult,\n OperationVariables,\n TypedDocumentNode,\n} from './types';\nimport {\n WatchQueryOptions,\n FetchMoreQueryOptions,\n SubscribeToMoreOptions,\n NextFetchPolicyContext,\n} from './watchQueryOptions';\nimport { QueryInfo } from './QueryInfo';\nimport { MissingFieldError } from '../cache';\nimport { MissingTree } from '../cache/core/types/common';\n\nconst {\n assign,\n hasOwnProperty,\n} = Object;\n\nexport interface FetchMoreOptions<\n TData = any,\n TVariables = OperationVariables\n> {\n updateQuery?: (\n previousQueryResult: TData,\n options: {\n fetchMoreResult?: TData;\n variables?: TVariables;\n },\n ) => TData;\n}\n\nexport interface UpdateQueryOptions {\n variables?: TVariables;\n}\n\ninterface Last {\n result: ApolloQueryResult;\n variables?: TVariables;\n error?: ApolloError;\n}\n\nexport class ObservableQuery<\n TData = any,\n TVariables = OperationVariables\n> extends Observable> {\n public readonly options: WatchQueryOptions;\n public readonly queryId: string;\n public readonly queryName?: string;\n\n public get query(): TypedDocumentNode {\n // This transform is heavily cached, so it should not be expensive to\n // transform the same this.options.query document repeatedly.\n return this.queryManager.transform(this.options.query).document;\n }\n\n // Computed shorthand for this.options.variables, preserved for\n // backwards compatibility.\n public get variables(): TVariables | undefined {\n return this.options.variables;\n }\n\n private isTornDown: boolean;\n private queryManager: QueryManager;\n private observers = new Set>>();\n private subscriptions = new Set();\n\n private last?: Last;\n\n private queryInfo: QueryInfo;\n\n // When this.concast is defined, this.observer is the Observer currently\n // subscribed to that Concast.\n private concast?: Concast>;\n private observer?: Observer>;\n\n private pollingInfo?: {\n interval: number;\n timeout: ReturnType;\n };\n\n constructor({\n queryManager,\n queryInfo,\n options,\n }: {\n queryManager: QueryManager;\n queryInfo: QueryInfo;\n options: WatchQueryOptions;\n }) {\n super((observer: Observer>) => {\n // Zen Observable has its own error function, so in order to log correctly\n // we need to provide a custom error callback.\n try {\n var subObserver = (observer as any)._subscription._observer;\n if (subObserver && !subObserver.error) {\n subObserver.error = defaultSubscriptionObserverErrorCallback;\n }\n } catch {}\n\n const first = !this.observers.size;\n this.observers.add(observer);\n\n // Deliver most recent error or result.\n const last = this.last;\n if (last && last.error) {\n observer.error && observer.error(last.error);\n } else if (last && last.result) {\n observer.next && observer.next(last.result);\n }\n\n // Initiate observation of this query if it hasn't been reported to\n // the QueryManager yet.\n if (first) {\n // Blindly catching here prevents unhandled promise rejections,\n // and is safe because the ObservableQuery handles this error with\n // this.observer.error, so we're not just swallowing the error by\n // ignoring it here.\n this.reobserve().catch(() => {});\n }\n\n return () => {\n if (this.observers.delete(observer) && !this.observers.size) {\n this.tearDownQuery();\n }\n };\n });\n\n // related classes\n this.queryInfo = queryInfo;\n this.queryManager = queryManager;\n\n // active state\n this.isTornDown = false;\n\n const {\n watchQuery: {\n fetchPolicy: defaultFetchPolicy = \"cache-first\",\n } = {},\n } = queryManager.defaultOptions;\n\n const {\n fetchPolicy = defaultFetchPolicy,\n initialFetchPolicy = (\n // Make sure we don't store \"standby\" as the initialFetchPolicy.\n fetchPolicy === \"standby\" ? defaultFetchPolicy : fetchPolicy\n ),\n } = options;\n\n this.options = {\n ...options,\n\n // Remember the initial options.fetchPolicy so we can revert back to this\n // policy when variables change. This information can also be specified\n // (or overridden) by providing options.initialFetchPolicy explicitly.\n initialFetchPolicy,\n\n // This ensures this.options.fetchPolicy always has a string value, in\n // case options.fetchPolicy was not provided.\n fetchPolicy,\n };\n\n this.queryId = queryInfo.queryId || queryManager.generateQueryId();\n\n const opDef = getOperationDefinition(this.query);\n this.queryName = opDef && opDef.name && opDef.name.value;\n }\n\n public result(): Promise> {\n return new Promise((resolve, reject) => {\n // TODO: this code doesn’t actually make sense insofar as the observer\n // will never exist in this.observers due how zen-observable wraps observables.\n // https://github.com/zenparsing/zen-observable/blob/master/src/Observable.js#L169\n const observer: Observer> = {\n next: (result: ApolloQueryResult) => {\n resolve(result);\n\n // Stop the query within the QueryManager if we can before\n // this function returns.\n //\n // We do this in order to prevent observers piling up within\n // the QueryManager. Notice that we only fully unsubscribe\n // from the subscription in a setTimeout(..., 0) call. This call can\n // actually be handled by the browser at a much later time. If queries\n // are fired in the meantime, observers that should have been removed\n // from the QueryManager will continue to fire, causing an unnecessary\n // performance hit.\n this.observers.delete(observer);\n if (!this.observers.size) {\n this.queryManager.removeQuery(this.queryId);\n }\n\n setTimeout(() => {\n subscription.unsubscribe();\n }, 0);\n },\n error: reject,\n };\n const subscription = this.subscribe(observer);\n });\n }\n\n public getCurrentResult(saveAsLastResult = true): ApolloQueryResult {\n // Use the last result as long as the variables match this.variables.\n const lastResult = this.getLastResult(true);\n\n const networkStatus =\n this.queryInfo.networkStatus ||\n (lastResult && lastResult.networkStatus) ||\n NetworkStatus.ready;\n\n const result = {\n ...lastResult,\n loading: isNetworkRequestInFlight(networkStatus),\n networkStatus,\n } as ApolloQueryResult;\n\n const { fetchPolicy = \"cache-first\" } = this.options;\n if (\n // These fetch policies should never deliver data from the cache, unless\n // redelivering a previously delivered result.\n fetchPolicy === 'network-only' ||\n fetchPolicy === 'no-cache' ||\n fetchPolicy === 'standby' ||\n // If this.options.query has @client(always: true) fields, we cannot\n // trust diff.result, since it was read from the cache without running\n // local resolvers (and it's too late to run resolvers now, since we must\n // return a result synchronously).\n this.queryManager.transform(this.options.query).hasForcedResolvers\n ) {\n // Fall through.\n } else {\n const diff = this.queryInfo.getDiff();\n\n if (diff.complete || this.options.returnPartialData) {\n result.data = diff.result;\n }\n\n if (equal(result.data, {})) {\n result.data = void 0 as any;\n }\n\n if (diff.complete) {\n // Similar to setting result.partial to false, but taking advantage of the\n // falsiness of missing fields.\n delete result.partial;\n\n // If the diff is complete, and we're using a FetchPolicy that\n // terminates after a complete cache read, we can assume the next result\n // we receive will have NetworkStatus.ready and !loading.\n if (\n diff.complete &&\n result.networkStatus === NetworkStatus.loading &&\n (fetchPolicy === 'cache-first' ||\n fetchPolicy === 'cache-only')\n ) {\n result.networkStatus = NetworkStatus.ready;\n result.loading = false;\n }\n } else {\n result.partial = true;\n }\n\n if (\n __DEV__ &&\n !diff.complete &&\n !this.options.partialRefetch &&\n !result.loading &&\n !result.data &&\n !result.error\n ) {\n logMissingFieldErrors(diff.missing);\n }\n }\n\n if (saveAsLastResult) {\n this.updateLastResult(result);\n }\n\n return result;\n }\n\n // Compares newResult to the snapshot we took of this.lastResult when it was\n // first received.\n public isDifferentFromLastResult(\n newResult: ApolloQueryResult,\n variables?: TVariables\n ) {\n return (\n !this.last ||\n !equal(this.last.result, newResult) ||\n (variables && !equal(this.last.variables, variables))\n );\n }\n\n private getLast>(\n key: K,\n variablesMustMatch?: boolean,\n ) {\n const last = this.last;\n if (\n last &&\n last[key] &&\n (!variablesMustMatch || equal(last.variables, this.variables))\n ) {\n return last[key];\n }\n }\n\n public getLastResult(variablesMustMatch?: boolean): ApolloQueryResult | undefined {\n return this.getLast(\"result\", variablesMustMatch);\n }\n\n public getLastError(variablesMustMatch?: boolean): ApolloError | undefined {\n return this.getLast(\"error\", variablesMustMatch);\n }\n\n public resetLastResults(): void {\n delete this.last;\n this.isTornDown = false;\n }\n\n public resetQueryStoreErrors() {\n this.queryManager.resetErrors(this.queryId);\n }\n\n /**\n * Update the variables of this observable query, and fetch the new results.\n * This method should be preferred over `setVariables` in most use cases.\n *\n * @param variables: The new set of variables. If there are missing variables,\n * the previous values of those variables will be used.\n */\n public refetch(variables?: Partial): Promise> {\n const reobserveOptions: Partial> = {\n // Always disable polling for refetches.\n pollInterval: 0,\n };\n\n // Unless the provided fetchPolicy always consults the network\n // (no-cache, network-only, or cache-and-network), override it with\n // network-only to force the refetch for this fetchQuery call.\n const { fetchPolicy } = this.options;\n if (fetchPolicy === 'cache-and-network') {\n reobserveOptions.fetchPolicy = fetchPolicy;\n } else if (fetchPolicy === 'no-cache') {\n reobserveOptions.fetchPolicy = 'no-cache';\n } else {\n reobserveOptions.fetchPolicy = 'network-only';\n }\n\n if (__DEV__ && variables && hasOwnProperty.call(variables, \"variables\")) {\n const queryDef = getQueryDefinition(this.query);\n const vars = queryDef.variableDefinitions;\n if (!vars || !vars.some(v => v.variable.name.value === \"variables\")) {\n invariant.warn(`Called refetch(${\n JSON.stringify(variables)\n }) for query ${\n queryDef.name?.value || JSON.stringify(queryDef)\n }, which does not declare a $variables variable.\nDid you mean to call refetch(variables) instead of refetch({ variables })?`);\n }\n }\n\n if (variables && !equal(this.options.variables, variables)) {\n // Update the existing options with new variables\n reobserveOptions.variables = this.options.variables = {\n ...this.options.variables,\n ...variables,\n } as TVariables;\n }\n\n this.queryInfo.resetLastWrite();\n return this.reobserve(reobserveOptions, NetworkStatus.refetch);\n }\n\n public fetchMore<\n TFetchData = TData,\n TFetchVars = TVariables,\n >(fetchMoreOptions: FetchMoreQueryOptions & {\n updateQuery?: (\n previousQueryResult: TData,\n options: {\n fetchMoreResult: TFetchData;\n variables: TFetchVars;\n },\n ) => TData;\n }): Promise> {\n const combinedOptions = {\n ...(fetchMoreOptions.query ? fetchMoreOptions : {\n ...this.options,\n query: this.query,\n ...fetchMoreOptions,\n variables: {\n ...this.options.variables,\n ...fetchMoreOptions.variables,\n },\n }),\n // The fetchMore request goes immediately to the network and does\n // not automatically write its result to the cache (hence no-cache\n // instead of network-only), because we allow the caller of\n // fetchMore to provide an updateQuery callback that determines how\n // the data gets written to the cache.\n fetchPolicy: \"no-cache\",\n } as WatchQueryOptions;\n\n const qid = this.queryManager.generateQueryId();\n\n // Simulate a loading result for the original query with\n // result.networkStatus === NetworkStatus.fetchMore.\n const { queryInfo } = this;\n const originalNetworkStatus = queryInfo.networkStatus;\n queryInfo.networkStatus = NetworkStatus.fetchMore;\n if (combinedOptions.notifyOnNetworkStatusChange) {\n this.observe();\n }\n\n const updatedQuerySet = new Set();\n\n return this.queryManager.fetchQuery(\n qid,\n combinedOptions,\n NetworkStatus.fetchMore,\n ).then(fetchMoreResult => {\n this.queryManager.removeQuery(qid);\n\n if (queryInfo.networkStatus === NetworkStatus.fetchMore) {\n queryInfo.networkStatus = originalNetworkStatus;\n }\n\n // Performing this cache update inside a cache.batch transaction ensures\n // any affected cache.watch watchers are notified at most once about any\n // updates. Most watchers will be using the QueryInfo class, which\n // responds to notifications by calling reobserveCacheFirst to deliver\n // fetchMore cache results back to this ObservableQuery.\n this.queryManager.cache.batch({\n update: cache => {\n const { updateQuery } = fetchMoreOptions;\n if (updateQuery) {\n cache.updateQuery({\n query: this.query,\n variables: this.variables,\n returnPartialData: true,\n optimistic: false,\n }, previous => updateQuery(previous!, {\n fetchMoreResult: fetchMoreResult.data,\n variables: combinedOptions.variables as TFetchVars,\n }));\n\n } else {\n // If we're using a field policy instead of updateQuery, the only\n // thing we need to do is write the new data to the cache using\n // combinedOptions.variables (instead of this.variables, which is\n // what this.updateQuery uses, because it works by abusing the\n // original field value, keyed by the original variables).\n cache.writeQuery({\n query: combinedOptions.query,\n variables: combinedOptions.variables,\n data: fetchMoreResult.data,\n });\n }\n },\n\n onWatchUpdated: watch => {\n // Record the DocumentNode associated with any watched query whose\n // data were updated by the cache writes above.\n updatedQuerySet.add(watch.query);\n },\n });\n\n return fetchMoreResult;\n\n }).finally(() => {\n // In case the cache writes above did not generate a broadcast\n // notification (which would have been intercepted by onWatchUpdated),\n // likely because the written data were the same as what was already in\n // the cache, we still want fetchMore to deliver its final loading:false\n // result with the unchanged data.\n if (!updatedQuerySet.has(this.query)) {\n reobserveCacheFirst(this);\n }\n });\n }\n\n // XXX the subscription variables are separate from the query variables.\n // if you want to update subscription variables, right now you have to do that separately,\n // and you can only do it by stopping the subscription and then subscribing again with new variables.\n public subscribeToMore<\n TSubscriptionData = TData,\n TSubscriptionVariables = TVariables\n >(\n options: SubscribeToMoreOptions<\n TData,\n TSubscriptionVariables,\n TSubscriptionData\n >,\n ) {\n const subscription = this.queryManager\n .startGraphQLSubscription({\n query: options.document,\n variables: options.variables,\n context: options.context,\n })\n .subscribe({\n next: (subscriptionData: { data: TSubscriptionData }) => {\n const { updateQuery } = options;\n if (updateQuery) {\n this.updateQuery(\n (previous, { variables }) =>\n updateQuery(previous, {\n subscriptionData,\n variables,\n }),\n );\n }\n },\n error: (err: any) => {\n if (options.onError) {\n options.onError(err);\n return;\n }\n invariant.error('Unhandled GraphQL subscription error', err);\n },\n });\n\n this.subscriptions.add(subscription);\n\n return () => {\n if (this.subscriptions.delete(subscription)) {\n subscription.unsubscribe();\n }\n };\n }\n\n public setOptions(\n newOptions: Partial>,\n ): Promise> {\n return this.reobserve(newOptions);\n }\n\n /**\n * This is for *internal* use only. Most users should instead use `refetch`\n * in order to be properly notified of results even when they come from cache.\n *\n * Update the variables of this observable query, and fetch the new results\n * if they've changed. If you want to force new results, use `refetch`.\n *\n * Note: the `next` callback will *not* fire if the variables have not changed\n * or if the result is coming from cache.\n *\n * Note: the promise will return the old results immediately if the variables\n * have not changed.\n *\n * Note: the promise will return null immediately if the query is not active\n * (there are no subscribers).\n *\n * @private\n *\n * @param variables: The new set of variables. If there are missing variables,\n * the previous values of those variables will be used.\n */\n public setVariables(\n variables: TVariables,\n ): Promise | void> {\n if (equal(this.variables, variables)) {\n // If we have no observers, then we don't actually want to make a network\n // request. As soon as someone observes the query, the request will kick\n // off. For now, we just store any changes. (See #1077)\n return this.observers.size\n ? this.result()\n : Promise.resolve();\n }\n\n this.options.variables = variables;\n\n // See comment above\n if (!this.observers.size) {\n return Promise.resolve();\n }\n\n return this.reobserve({\n // Reset options.fetchPolicy to its original value.\n fetchPolicy: this.options.initialFetchPolicy,\n variables,\n }, NetworkStatus.setVariables);\n }\n\n public updateQuery(\n mapFn: (\n previousQueryResult: TData,\n options: Pick, \"variables\">,\n ) => TData,\n ): void {\n const { queryManager } = this;\n const { result } = queryManager.cache.diff({\n query: this.options.query,\n variables: this.variables,\n returnPartialData: true,\n optimistic: false,\n });\n\n const newResult = mapFn(result!, {\n variables: (this as any).variables,\n });\n\n if (newResult) {\n queryManager.cache.writeQuery({\n query: this.options.query,\n data: newResult,\n variables: this.variables,\n });\n\n queryManager.broadcastQueries();\n }\n }\n\n public startPolling(pollInterval: number) {\n this.options.pollInterval = pollInterval;\n this.updatePolling();\n }\n\n public stopPolling() {\n this.options.pollInterval = 0;\n this.updatePolling();\n }\n\n // Update options.fetchPolicy according to options.nextFetchPolicy.\n private applyNextFetchPolicy(\n reason: NextFetchPolicyContext[\"reason\"],\n // It's possible to use this method to apply options.nextFetchPolicy to\n // options.fetchPolicy even if options !== this.options, though that happens\n // most often when the options are temporary, used for only one request and\n // then thrown away, so nextFetchPolicy may not end up mattering.\n options: WatchQueryOptions,\n ) {\n if (options.nextFetchPolicy) {\n const {\n fetchPolicy = \"cache-first\",\n initialFetchPolicy = fetchPolicy,\n } = options;\n\n if (fetchPolicy === \"standby\") {\n // Do nothing, leaving options.fetchPolicy unchanged.\n } else if (typeof options.nextFetchPolicy === \"function\") {\n // When someone chooses \"cache-and-network\" or \"network-only\" as their\n // initial FetchPolicy, they often do not want future cache updates to\n // trigger unconditional network requests, which is what repeatedly\n // applying the \"cache-and-network\" or \"network-only\" policies would\n // seem to imply. Instead, when the cache reports an update after the\n // initial network request, it may be desirable for subsequent network\n // requests to be triggered only if the cache result is incomplete. To\n // that end, the options.nextFetchPolicy option provides an easy way to\n // update options.fetchPolicy after the initial network request, without\n // having to call observableQuery.setOptions.\n options.fetchPolicy = options.nextFetchPolicy(fetchPolicy, {\n reason,\n options,\n observable: this,\n initialFetchPolicy,\n });\n } else if (reason === \"variables-changed\") {\n options.fetchPolicy = initialFetchPolicy;\n } else {\n options.fetchPolicy = options.nextFetchPolicy;\n }\n }\n\n return options.fetchPolicy;\n }\n\n private fetch(\n options: WatchQueryOptions,\n newNetworkStatus?: NetworkStatus,\n ): Concast> {\n // TODO Make sure we update the networkStatus (and infer fetchVariables)\n // before actually committing to the fetch.\n this.queryManager.setObservableQuery(this);\n return this.queryManager.fetchQueryObservable(\n this.queryId,\n options,\n newNetworkStatus,\n );\n }\n\n // Turns polling on or off based on this.options.pollInterval.\n private updatePolling() {\n // Avoid polling in SSR mode\n if (this.queryManager.ssrMode) {\n return;\n }\n\n const {\n pollingInfo,\n options: {\n pollInterval,\n },\n } = this;\n\n if (!pollInterval) {\n if (pollingInfo) {\n clearTimeout(pollingInfo.timeout);\n delete this.pollingInfo;\n }\n return;\n }\n\n if (pollingInfo &&\n pollingInfo.interval === pollInterval) {\n return;\n }\n\n invariant(\n pollInterval,\n 'Attempted to start a polling query without a polling interval.',\n );\n\n const info = pollingInfo || (this.pollingInfo = {} as any);\n info.interval = pollInterval;\n\n const maybeFetch = () => {\n if (this.pollingInfo) {\n if (!isNetworkRequestInFlight(this.queryInfo.networkStatus)) {\n this.reobserve({\n // Most fetchPolicy options don't make sense to use in a polling context, as\n // users wouldn't want to be polling the cache directly. However, network-only and\n // no-cache are both useful for when the user wants to control whether or not the\n // polled results are written to the cache.\n fetchPolicy: this.options.initialFetchPolicy === 'no-cache' ? 'no-cache' : 'network-only',\n }, NetworkStatus.poll).then(poll, poll);\n } else {\n poll();\n }\n };\n };\n\n const poll = () => {\n const info = this.pollingInfo;\n if (info) {\n clearTimeout(info.timeout);\n info.timeout = setTimeout(maybeFetch, info.interval);\n }\n };\n\n poll();\n }\n\n private updateLastResult(\n newResult: ApolloQueryResult,\n variables = this.variables,\n ) {\n this.last = {\n ...this.last,\n result: this.queryManager.assumeImmutableResults\n ? newResult\n : cloneDeep(newResult),\n variables,\n };\n if (!isNonEmptyArray(newResult.errors)) {\n delete this.last.error;\n }\n return this.last;\n }\n\n public reobserve(\n newOptions?: Partial>,\n newNetworkStatus?: NetworkStatus,\n ): Promise> {\n this.isTornDown = false;\n\n const useDisposableConcast =\n // Refetching uses a disposable Concast to allow refetches using different\n // options/variables, without permanently altering the options of the\n // original ObservableQuery.\n newNetworkStatus === NetworkStatus.refetch ||\n // The fetchMore method does not actually call the reobserve method, but,\n // if it did, it would definitely use a disposable Concast.\n newNetworkStatus === NetworkStatus.fetchMore ||\n // Polling uses a disposable Concast so the polling options (which force\n // fetchPolicy to be \"network-only\" or \"no-cache\") won't override the original options.\n newNetworkStatus === NetworkStatus.poll;\n\n // Save the old variables, since Object.assign may modify them below.\n const oldVariables = this.options.variables;\n const oldFetchPolicy = this.options.fetchPolicy;\n\n const mergedOptions = compact(this.options, newOptions || {});\n const options = useDisposableConcast\n // Disposable Concast fetches receive a shallow copy of this.options\n // (merged with newOptions), leaving this.options unmodified.\n ? mergedOptions\n : assign(this.options, mergedOptions);\n\n if (!useDisposableConcast) {\n // We can skip calling updatePolling if we're not changing this.options.\n this.updatePolling();\n\n // Reset options.fetchPolicy to its original value when variables change,\n // unless a new fetchPolicy was provided by newOptions.\n if (\n newOptions &&\n newOptions.variables &&\n !equal(newOptions.variables, oldVariables) &&\n // Don't mess with the fetchPolicy if it's currently \"standby\".\n options.fetchPolicy !== \"standby\" &&\n // If we're changing the fetchPolicy anyway, don't try to change it here\n // using applyNextFetchPolicy. The explicit options.fetchPolicy wins.\n options.fetchPolicy === oldFetchPolicy\n ) {\n this.applyNextFetchPolicy(\"variables-changed\", options);\n if (newNetworkStatus === void 0) {\n newNetworkStatus = NetworkStatus.setVariables;\n }\n }\n }\n\n const variables = options.variables && { ...options.variables };\n const concast = this.fetch(options, newNetworkStatus);\n const observer: Observer> = {\n next: result => {\n this.reportResult(result, variables);\n },\n error: error => {\n this.reportError(error, variables);\n },\n };\n\n if (!useDisposableConcast) {\n // We use the {add,remove}Observer methods directly to avoid wrapping\n // observer with an unnecessary SubscriptionObserver object.\n if (this.concast && this.observer) {\n this.concast.removeObserver(this.observer);\n }\n\n this.concast = concast;\n this.observer = observer;\n }\n\n concast.addObserver(observer);\n\n return concast.promise;\n }\n\n // (Re)deliver the current result to this.observers without applying fetch\n // policies or making network requests.\n private observe() {\n this.reportResult(\n // Passing false is important so that this.getCurrentResult doesn't\n // save the fetchMore result as this.lastResult, causing it to be\n // ignored due to the this.isDifferentFromLastResult check in\n // this.reportResult.\n this.getCurrentResult(false),\n this.variables,\n );\n }\n\n private reportResult(\n result: ApolloQueryResult,\n variables: TVariables | undefined,\n ) {\n const lastError = this.getLastError();\n if (lastError || this.isDifferentFromLastResult(result, variables)) {\n if (lastError || !result.partial || this.options.returnPartialData) {\n this.updateLastResult(result, variables);\n }\n\n iterateObserversSafely(this.observers, 'next', result);\n }\n }\n\n private reportError(\n error: ApolloError,\n variables: TVariables | undefined,\n ) {\n // Since we don't get the current result on errors, only the error, we\n // must mirror the updates that occur in QueryStore.markQueryError here\n const errorResult = {\n ...this.getLastResult(),\n error,\n errors: error.graphQLErrors,\n networkStatus: NetworkStatus.error,\n loading: false,\n } as ApolloQueryResult;\n\n this.updateLastResult(errorResult, variables);\n\n iterateObserversSafely(this.observers, 'error', this.last!.error = error);\n }\n\n public hasObservers() {\n return this.observers.size > 0;\n }\n\n private tearDownQuery() {\n if (this.isTornDown) return;\n if (this.concast && this.observer) {\n this.concast.removeObserver(this.observer);\n delete this.concast;\n delete this.observer;\n }\n\n this.stopPolling();\n // stop all active GraphQL subscriptions\n this.subscriptions.forEach(sub => sub.unsubscribe());\n this.subscriptions.clear();\n this.queryManager.stopQuery(this.queryId);\n this.observers.clear();\n this.isTornDown = true;\n }\n}\n\n// Necessary because the ObservableQuery constructor has a different\n// signature than the Observable constructor.\nfixObservableSubclass(ObservableQuery);\n\n// Reobserve with fetchPolicy effectively set to \"cache-first\", triggering\n// delivery of any new data from the cache, possibly falling back to the network\n// if any cache data are missing. This allows _complete_ cache results to be\n// delivered without also kicking off unnecessary network requests when\n// this.options.fetchPolicy is \"cache-and-network\" or \"network-only\". When\n// this.options.fetchPolicy is any other policy (\"cache-first\", \"cache-only\",\n// \"standby\", or \"no-cache\"), we call this.reobserve() as usual.\nexport function reobserveCacheFirst(\n obsQuery: ObservableQuery,\n) {\n const { fetchPolicy, nextFetchPolicy } = obsQuery.options;\n\n if (\n fetchPolicy === \"cache-and-network\" ||\n fetchPolicy === \"network-only\"\n ) {\n return obsQuery.reobserve({\n fetchPolicy: \"cache-first\",\n // Use a temporary nextFetchPolicy function that replaces itself with the\n // previous nextFetchPolicy value and returns the original fetchPolicy.\n nextFetchPolicy(this: WatchQueryOptions) {\n // Replace this nextFetchPolicy function in the options object with the\n // original this.options.nextFetchPolicy value.\n this.nextFetchPolicy = nextFetchPolicy;\n // If the original nextFetchPolicy value was a function, give it a\n // chance to decide what happens here.\n if (typeof nextFetchPolicy === \"function\") {\n return nextFetchPolicy.apply(this, arguments);\n }\n // Otherwise go back to the original this.options.fetchPolicy.\n return fetchPolicy!;\n },\n });\n }\n\n return obsQuery.reobserve();\n}\n\nfunction defaultSubscriptionObserverErrorCallback(error: ApolloError) {\n invariant.error('Unhandled error', error.message, error.stack);\n}\n\nexport function logMissingFieldErrors(\n missing: MissingFieldError[] | MissingTree | undefined,\n) {\n if (__DEV__ && missing) {\n invariant.debug(`Missing cache result fields: ${\n JSON.stringify(missing)\n }`, missing);\n }\n}\n","import { invariant } from '../utilities/globals';\n\nimport {\n DocumentNode,\n OperationDefinitionNode,\n SelectionSetNode,\n SelectionNode,\n InlineFragmentNode,\n FragmentDefinitionNode,\n FieldNode,\n ASTNode,\n visit,\n BREAK,\n} from 'graphql';\n\nimport { ApolloCache } from '../cache';\nimport {\n FragmentMap,\n StoreObject,\n argumentsObjectFromField,\n buildQueryFromSelectionSet,\n createFragmentMap,\n getFragmentDefinitions,\n getMainDefinition,\n hasDirectives,\n isField,\n isInlineFragment,\n mergeDeep,\n mergeDeepArray,\n removeClientSetsFromDocument,\n resultKeyNameFromField,\n shouldInclude,\n} from '../utilities';\nimport { ApolloClient } from './ApolloClient';\nimport { Resolvers, OperationVariables } from './types';\nimport { FetchResult } from '../link/core';\nimport { cacheSlot } from '../cache';\n\nexport type Resolver = (\n rootValue?: any,\n args?: any,\n context?: any,\n info?: {\n field: FieldNode;\n fragmentMap: FragmentMap;\n },\n) => any;\n\nexport type VariableMap = { [name: string]: any };\n\nexport type FragmentMatcher = (\n rootValue: any,\n typeCondition: string,\n context: any,\n) => boolean;\n\nexport type ExecContext = {\n fragmentMap: FragmentMap;\n context: any;\n variables: VariableMap;\n fragmentMatcher: FragmentMatcher;\n defaultOperationType: string;\n exportedVariables: Record;\n onlyRunForcedResolvers: boolean;\n};\n\nexport type LocalStateOptions = {\n cache: ApolloCache;\n client?: ApolloClient;\n resolvers?: Resolvers | Resolvers[];\n fragmentMatcher?: FragmentMatcher;\n};\n\nexport class LocalState {\n private cache: ApolloCache;\n private client: ApolloClient;\n private resolvers?: Resolvers;\n private fragmentMatcher: FragmentMatcher;\n\n constructor({\n cache,\n client,\n resolvers,\n fragmentMatcher,\n }: LocalStateOptions) {\n this.cache = cache;\n\n if (client) {\n this.client = client;\n }\n\n if (resolvers) {\n this.addResolvers(resolvers);\n }\n\n if (fragmentMatcher) {\n this.setFragmentMatcher(fragmentMatcher);\n }\n }\n\n public addResolvers(resolvers: Resolvers | Resolvers[]) {\n this.resolvers = this.resolvers || {};\n if (Array.isArray(resolvers)) {\n resolvers.forEach(resolverGroup => {\n this.resolvers = mergeDeep(this.resolvers, resolverGroup);\n });\n } else {\n this.resolvers = mergeDeep(this.resolvers, resolvers);\n }\n }\n\n public setResolvers(resolvers: Resolvers | Resolvers[]) {\n this.resolvers = {};\n this.addResolvers(resolvers);\n }\n\n public getResolvers() {\n return this.resolvers || {};\n }\n\n // Run local client resolvers against the incoming query and remote data.\n // Locally resolved field values are merged with the incoming remote data,\n // and returned. Note that locally resolved fields will overwrite\n // remote data using the same field name.\n public async runResolvers({\n document,\n remoteResult,\n context,\n variables,\n onlyRunForcedResolvers = false,\n }: {\n document: DocumentNode | null;\n remoteResult: FetchResult;\n context?: Record;\n variables?: Record;\n onlyRunForcedResolvers?: boolean;\n }): Promise> {\n if (document) {\n return this.resolveDocument(\n document,\n remoteResult.data,\n context,\n variables,\n this.fragmentMatcher,\n onlyRunForcedResolvers,\n ).then(localResult => ({\n ...remoteResult,\n data: localResult.result,\n }));\n }\n\n return remoteResult;\n }\n\n public setFragmentMatcher(fragmentMatcher: FragmentMatcher) {\n this.fragmentMatcher = fragmentMatcher;\n }\n\n public getFragmentMatcher(): FragmentMatcher {\n return this.fragmentMatcher;\n }\n\n // Client queries contain everything in the incoming document (if a @client\n // directive is found).\n public clientQuery(document: DocumentNode) {\n if (hasDirectives(['client'], document)) {\n if (this.resolvers) {\n return document;\n }\n }\n return null;\n }\n\n // Server queries are stripped of all @client based selection sets.\n public serverQuery(document: DocumentNode) {\n return removeClientSetsFromDocument(document);\n }\n\n public prepareContext(context?: Record) {\n const { cache } = this;\n return {\n ...context,\n cache,\n // Getting an entry's cache key is useful for local state resolvers.\n getCacheKey(obj: StoreObject) {\n return cache.identify(obj);\n },\n };\n }\n\n // To support `@client @export(as: \"someVar\")` syntax, we'll first resolve\n // @client @export fields locally, then pass the resolved values back to be\n // used alongside the original operation variables.\n public async addExportedVariables(\n document: DocumentNode,\n variables: OperationVariables = {},\n context = {},\n ) {\n if (document) {\n return this.resolveDocument(\n document,\n this.buildRootValueFromCache(document, variables) || {},\n this.prepareContext(context),\n variables,\n ).then(data => ({\n ...variables,\n ...data.exportedVariables,\n }));\n }\n\n return {\n ...variables,\n };\n }\n\n public shouldForceResolvers(document: ASTNode) {\n let forceResolvers = false;\n visit(document, {\n Directive: {\n enter(node) {\n if (node.name.value === 'client' && node.arguments) {\n forceResolvers = node.arguments.some(\n arg =>\n arg.name.value === 'always' &&\n arg.value.kind === 'BooleanValue' &&\n arg.value.value === true,\n );\n if (forceResolvers) {\n return BREAK;\n }\n }\n },\n },\n });\n return forceResolvers;\n }\n\n // Query the cache and return matching data.\n private buildRootValueFromCache(\n document: DocumentNode,\n variables?: Record,\n ) {\n return this.cache.diff({\n query: buildQueryFromSelectionSet(document),\n variables,\n returnPartialData: true,\n optimistic: false,\n }).result;\n }\n\n private async resolveDocument(\n document: DocumentNode,\n rootValue: TData,\n context: any = {},\n variables: VariableMap = {},\n fragmentMatcher: FragmentMatcher = () => true,\n onlyRunForcedResolvers: boolean = false,\n ) {\n const mainDefinition = getMainDefinition(document);\n const fragments = getFragmentDefinitions(document);\n const fragmentMap = createFragmentMap(fragments);\n\n const definitionOperation = (mainDefinition as OperationDefinitionNode)\n .operation;\n\n const defaultOperationType = definitionOperation\n ? definitionOperation.charAt(0).toUpperCase() +\n definitionOperation.slice(1)\n : 'Query';\n\n const { cache, client } = this;\n const execContext: ExecContext = {\n fragmentMap,\n context: {\n ...context,\n cache,\n client,\n },\n variables,\n fragmentMatcher,\n defaultOperationType,\n exportedVariables: {},\n onlyRunForcedResolvers,\n };\n\n return this.resolveSelectionSet(\n mainDefinition.selectionSet,\n rootValue,\n execContext,\n ).then(result => ({\n result,\n exportedVariables: execContext.exportedVariables,\n }));\n }\n\n private async resolveSelectionSet(\n selectionSet: SelectionSetNode,\n rootValue: TData,\n execContext: ExecContext,\n ) {\n const { fragmentMap, context, variables } = execContext;\n const resultsToMerge: TData[] = [rootValue];\n\n const execute = async (selection: SelectionNode): Promise => {\n if (!shouldInclude(selection, variables)) {\n // Skip this entirely.\n return;\n }\n\n if (isField(selection)) {\n return this.resolveField(selection, rootValue, execContext).then(\n fieldResult => {\n if (typeof fieldResult !== 'undefined') {\n resultsToMerge.push({\n [resultKeyNameFromField(selection)]: fieldResult,\n } as TData);\n }\n },\n );\n }\n\n let fragment: InlineFragmentNode | FragmentDefinitionNode;\n\n if (isInlineFragment(selection)) {\n fragment = selection;\n } else {\n // This is a named fragment.\n fragment = fragmentMap[selection.name.value];\n invariant(fragment, `No fragment named ${selection.name.value}`);\n }\n\n if (fragment && fragment.typeCondition) {\n const typeCondition = fragment.typeCondition.name.value;\n if (execContext.fragmentMatcher(rootValue, typeCondition, context)) {\n return this.resolveSelectionSet(\n fragment.selectionSet,\n rootValue,\n execContext,\n ).then(fragmentResult => {\n resultsToMerge.push(fragmentResult);\n });\n }\n }\n };\n\n return Promise.all(selectionSet.selections.map(execute)).then(function() {\n return mergeDeepArray(resultsToMerge);\n });\n }\n\n private async resolveField(\n field: FieldNode,\n rootValue: any,\n execContext: ExecContext,\n ): Promise {\n const { variables } = execContext;\n const fieldName = field.name.value;\n const aliasedFieldName = resultKeyNameFromField(field);\n const aliasUsed = fieldName !== aliasedFieldName;\n const defaultResult = rootValue[aliasedFieldName] || rootValue[fieldName];\n let resultPromise = Promise.resolve(defaultResult);\n\n // Usually all local resolvers are run when passing through here, but\n // if we've specifically identified that we only want to run forced\n // resolvers (that is, resolvers for fields marked with\n // `@client(always: true)`), then we'll skip running non-forced resolvers.\n if (\n !execContext.onlyRunForcedResolvers ||\n this.shouldForceResolvers(field)\n ) {\n const resolverType =\n rootValue.__typename || execContext.defaultOperationType;\n const resolverMap = this.resolvers && this.resolvers[resolverType];\n if (resolverMap) {\n const resolve = resolverMap[aliasUsed ? fieldName : aliasedFieldName];\n if (resolve) {\n resultPromise = Promise.resolve(\n // In case the resolve function accesses reactive variables,\n // set cacheSlot to the current cache instance.\n cacheSlot.withValue(this.cache, resolve, [\n rootValue,\n argumentsObjectFromField(field, variables),\n execContext.context,\n { field, fragmentMap: execContext.fragmentMap },\n ])\n );\n }\n }\n }\n\n return resultPromise.then((result = defaultResult) => {\n // If an @export directive is associated with the current field, store\n // the `as` export variable name and current result for later use.\n if (field.directives) {\n field.directives.forEach(directive => {\n if (directive.name.value === 'export' && directive.arguments) {\n directive.arguments.forEach(arg => {\n if (arg.name.value === 'as' && arg.value.kind === 'StringValue') {\n execContext.exportedVariables[arg.value.value] = result;\n }\n });\n }\n });\n }\n\n // Handle all scalar types here.\n if (!field.selectionSet) {\n return result;\n }\n\n // From here down, the field has a selection set, which means it's trying\n // to query a GraphQLObjectType.\n if (result == null) {\n // Basically any field in a GraphQL response can be null, or missing\n return result;\n }\n\n if (Array.isArray(result)) {\n return this.resolveSubSelectedArray(field, result, execContext);\n }\n\n // Returned value is an object, and the query has a sub-selection. Recurse.\n if (field.selectionSet) {\n return this.resolveSelectionSet(\n field.selectionSet,\n result,\n execContext,\n );\n }\n });\n }\n\n private resolveSubSelectedArray(\n field: FieldNode,\n result: any[],\n execContext: ExecContext,\n ): any {\n return Promise.all(\n result.map(item => {\n if (item === null) {\n return null;\n }\n\n // This is a nested array, recurse.\n if (Array.isArray(item)) {\n return this.resolveSubSelectedArray(field, item, execContext);\n }\n\n // This is an object, run the selection set on it.\n if (field.selectionSet) {\n return this.resolveSelectionSet(field.selectionSet, item, execContext);\n }\n }),\n );\n }\n}\n","import { DocumentNode, GraphQLError } from 'graphql';\nimport { equal } from \"@wry/equality\";\n\nimport { Cache, ApolloCache } from '../cache';\nimport { DeepMerger } from \"../utilities\"\nimport { WatchQueryOptions, ErrorPolicy } from './watchQueryOptions';\nimport { ObservableQuery, reobserveCacheFirst } from './ObservableQuery';\nimport { QueryListener } from './types';\nimport { FetchResult } from '../link/core';\nimport {\n ObservableSubscription,\n isNonEmptyArray,\n graphQLResultHasError,\n canUseWeakMap,\n} from '../utilities';\nimport {\n NetworkStatus,\n isNetworkRequestInFlight,\n} from './networkStatus';\nimport { ApolloError } from '../errors';\nimport { QueryManager } from './QueryManager';\n\nexport type QueryStoreValue = Pick;\n\nexport const enum CacheWriteBehavior {\n FORBID,\n OVERWRITE,\n MERGE,\n};\n\nconst destructiveMethodCounts = new (\n canUseWeakMap ? WeakMap : Map\n), number>();\n\nfunction wrapDestructiveCacheMethod(\n cache: ApolloCache,\n methodName: keyof ApolloCache,\n) {\n const original = cache[methodName];\n if (typeof original === \"function\") {\n cache[methodName] = function () {\n destructiveMethodCounts.set(\n cache,\n // The %1e15 allows the count to wrap around to 0 safely every\n // quadrillion evictions, so there's no risk of overflow. To be\n // clear, this is more of a pedantic principle than something\n // that matters in any conceivable practical scenario.\n (destructiveMethodCounts.get(cache)! + 1) % 1e15,\n );\n return original.apply(this, arguments);\n };\n }\n}\n\nfunction cancelNotifyTimeout(info: QueryInfo) {\n if (info[\"notifyTimeout\"]) {\n clearTimeout(info[\"notifyTimeout\"]);\n info[\"notifyTimeout\"] = void 0;\n }\n}\n\n// A QueryInfo object represents a single query managed by the\n// QueryManager, which tracks all QueryInfo objects by queryId in its\n// this.queries Map. QueryInfo objects store the latest results and errors\n// for the given query, and are responsible for reporting those results to\n// the corresponding ObservableQuery, via the QueryInfo.notify method.\n// Results are reported asynchronously whenever setDiff marks the\n// QueryInfo object as dirty, though a call to the QueryManager's\n// broadcastQueries method may trigger the notification before it happens\n// automatically. This class used to be a simple interface type without\n// any field privacy or meaningful methods, which is why it still has so\n// many public fields. The effort to lock down and simplify the QueryInfo\n// interface is ongoing, and further improvements are welcome.\nexport class QueryInfo {\n listeners = new Set();\n document: DocumentNode | null = null;\n lastRequestId = 1;\n subscriptions = new Set();\n variables?: Record;\n networkStatus?: NetworkStatus;\n networkError?: Error | null;\n graphQLErrors?: ReadonlyArray;\n stopped = false;\n\n private cache: ApolloCache;\n\n constructor(\n queryManager: QueryManager,\n public readonly queryId = queryManager.generateQueryId(),\n ) {\n const cache = this.cache = queryManager.cache;\n\n // Track how often cache.evict is called, since we want eviction to\n // override the feud-stopping logic in the markResult method, by\n // causing shouldWrite to return true. Wrapping the cache.evict method\n // is a bit of a hack, but it saves us from having to make eviction\n // counting an official part of the ApolloCache API.\n if (!destructiveMethodCounts.has(cache)) {\n destructiveMethodCounts.set(cache, 0);\n wrapDestructiveCacheMethod(cache, \"evict\");\n wrapDestructiveCacheMethod(cache, \"modify\");\n wrapDestructiveCacheMethod(cache, \"reset\");\n }\n }\n\n public init(query: {\n document: DocumentNode;\n variables: Record | undefined,\n // The initial networkStatus for this fetch, most often\n // NetworkStatus.loading, but also possibly fetchMore, poll, refetch,\n // or setVariables.\n networkStatus?: NetworkStatus,\n observableQuery?: ObservableQuery;\n lastRequestId?: number;\n }): this {\n let networkStatus = query.networkStatus || NetworkStatus.loading;\n if (this.variables &&\n this.networkStatus !== NetworkStatus.loading &&\n !equal(this.variables, query.variables)) {\n networkStatus = NetworkStatus.setVariables;\n }\n\n if (!equal(query.variables, this.variables)) {\n this.lastDiff = void 0;\n }\n\n Object.assign(this, {\n document: query.document,\n variables: query.variables,\n networkError: null,\n graphQLErrors: this.graphQLErrors || [],\n networkStatus,\n });\n\n if (query.observableQuery) {\n this.setObservableQuery(query.observableQuery);\n }\n\n if (query.lastRequestId) {\n this.lastRequestId = query.lastRequestId;\n }\n\n return this;\n }\n\n private dirty: boolean = false;\n\n private notifyTimeout?: ReturnType;\n\n reset() {\n cancelNotifyTimeout(this);\n this.dirty = false;\n }\n\n getDiff(variables = this.variables): Cache.DiffResult {\n const options = this.getDiffOptions(variables);\n\n if (this.lastDiff && equal(options, this.lastDiff.options)) {\n return this.lastDiff.diff;\n }\n\n this.updateWatch(this.variables = variables);\n\n const oq = this.observableQuery;\n if (oq && oq.options.fetchPolicy === \"no-cache\") {\n return { complete: false };\n }\n\n const diff = this.cache.diff(options);\n this.updateLastDiff(diff, options);\n return diff;\n }\n\n private lastDiff?: {\n diff: Cache.DiffResult,\n options: Cache.DiffOptions,\n };\n\n private updateLastDiff(\n diff: Cache.DiffResult | null,\n options?: Cache.DiffOptions,\n ) {\n this.lastDiff = diff ? {\n diff,\n options: options || this.getDiffOptions(),\n } : void 0;\n }\n\n private getDiffOptions(variables = this.variables): Cache.DiffOptions {\n return {\n query: this.document!,\n variables,\n returnPartialData: true,\n optimistic: true,\n canonizeResults: this.observableQuery?.options.canonizeResults,\n };\n }\n\n setDiff(diff: Cache.DiffResult | null) {\n const oldDiff = this.lastDiff && this.lastDiff.diff;\n this.updateLastDiff(diff);\n if (!this.dirty &&\n !equal(oldDiff && oldDiff.result,\n diff && diff.result)) {\n this.dirty = true;\n if (!this.notifyTimeout) {\n this.notifyTimeout = setTimeout(() => this.notify(), 0);\n }\n }\n }\n\n public readonly observableQuery: ObservableQuery | null = null;\n private oqListener?: QueryListener;\n\n setObservableQuery(oq: ObservableQuery | null) {\n if (oq === this.observableQuery) return;\n\n if (this.oqListener) {\n this.listeners.delete(this.oqListener);\n }\n\n (this as any).observableQuery = oq;\n\n if (oq) {\n oq[\"queryInfo\"] = this;\n this.listeners.add(this.oqListener = () => {\n const diff = this.getDiff();\n if (diff.fromOptimisticTransaction) {\n // If this diff came from an optimistic transaction, deliver the\n // current cache data to the ObservableQuery, but don't perform a\n // reobservation, since oq.reobserveCacheFirst might make a network\n // request, and we never want to trigger network requests in the\n // middle of optimistic updates.\n oq[\"observe\"]();\n } else {\n // Otherwise, make the ObservableQuery \"reobserve\" the latest data\n // using a temporary fetch policy of \"cache-first\", so complete cache\n // results have a chance to be delivered without triggering additional\n // network requests, even when options.fetchPolicy is \"network-only\"\n // or \"cache-and-network\". All other fetch policies are preserved by\n // this method, and are handled by calling oq.reobserve(). If this\n // reobservation is spurious, isDifferentFromLastResult still has a\n // chance to catch it before delivery to ObservableQuery subscribers.\n reobserveCacheFirst(oq);\n }\n });\n } else {\n delete this.oqListener;\n }\n }\n\n notify() {\n cancelNotifyTimeout(this);\n\n if (this.shouldNotify()) {\n this.listeners.forEach(listener => listener(this));\n }\n\n this.dirty = false;\n }\n\n private shouldNotify() {\n if (!this.dirty || !this.listeners.size) {\n return false;\n }\n\n if (isNetworkRequestInFlight(this.networkStatus) &&\n this.observableQuery) {\n const { fetchPolicy } = this.observableQuery.options;\n if (fetchPolicy !== \"cache-only\" &&\n fetchPolicy !== \"cache-and-network\") {\n return false;\n }\n }\n\n return true;\n }\n\n public stop() {\n if (!this.stopped) {\n this.stopped = true;\n\n // Cancel the pending notify timeout\n this.reset();\n\n this.cancel();\n // Revert back to the no-op version of cancel inherited from\n // QueryInfo.prototype.\n this.cancel = QueryInfo.prototype.cancel;\n\n this.subscriptions.forEach(sub => sub.unsubscribe());\n\n const oq = this.observableQuery;\n if (oq) oq.stopPolling();\n }\n }\n\n // This method is a no-op by default, until/unless overridden by the\n // updateWatch method.\n private cancel() {}\n\n private lastWatch?: Cache.WatchOptions;\n\n private updateWatch(variables = this.variables) {\n const oq = this.observableQuery;\n if (oq && oq.options.fetchPolicy === \"no-cache\") {\n return;\n }\n\n const watchOptions: Cache.WatchOptions = {\n // Although this.getDiffOptions returns Cache.DiffOptions instead of\n // Cache.WatchOptions, all the overlapping options should be the same, so\n // we can reuse getDiffOptions here, for consistency.\n ...this.getDiffOptions(variables),\n watcher: this,\n callback: diff => this.setDiff(diff),\n };\n\n if (!this.lastWatch ||\n !equal(watchOptions, this.lastWatch)) {\n this.cancel();\n this.cancel = this.cache.watch(this.lastWatch = watchOptions);\n }\n }\n\n private lastWrite?: {\n result: FetchResult;\n variables: WatchQueryOptions[\"variables\"];\n dmCount: number | undefined;\n };\n\n public resetLastWrite() {\n this.lastWrite = void 0;\n }\n\n private shouldWrite(\n result: FetchResult,\n variables: WatchQueryOptions[\"variables\"],\n ) {\n const { lastWrite } = this;\n return !(\n lastWrite &&\n // If cache.evict has been called since the last time we wrote this\n // data into the cache, there's a chance writing this result into\n // the cache will repair what was evicted.\n lastWrite.dmCount === destructiveMethodCounts.get(this.cache) &&\n equal(variables, lastWrite.variables) &&\n equal(result.data, lastWrite.result.data)\n );\n }\n\n public markResult(\n result: FetchResult,\n document: DocumentNode,\n options: Pick,\n cacheWriteBehavior: CacheWriteBehavior,\n ) {\n const graphQLErrors = isNonEmptyArray(result.errors)\n ? result.errors.slice(0)\n : [];\n\n // Cancel the pending notify timeout (if it exists) to prevent extraneous network\n // requests. To allow future notify timeouts, diff and dirty are reset as well.\n this.reset();\n\n if ('incremental' in result && isNonEmptyArray(result.incremental)) {\n let mergedData = this.getDiff().result;\n const merger = new DeepMerger();\n result.incremental.forEach(({ data, path, errors }) => {\n for (let i = path.length - 1; i >= 0; --i) {\n const key = path[i];\n const isNumericKey = !isNaN(+key);\n const parent: Record = isNumericKey ? [] : {};\n parent[key] = data;\n data = parent as typeof data;\n }\n if (errors) {\n graphQLErrors.push(...errors);\n }\n mergedData = merger.merge(mergedData, data);\n });\n result.data = mergedData;\n }\n\n this.graphQLErrors = graphQLErrors;\n\n if (options.fetchPolicy === 'no-cache') {\n this.updateLastDiff(\n { result: result.data, complete: true },\n this.getDiffOptions(options.variables),\n );\n\n } else if (cacheWriteBehavior !== CacheWriteBehavior.FORBID) {\n if (shouldWriteResult(result, options.errorPolicy)) {\n // Using a transaction here so we have a chance to read the result\n // back from the cache before the watch callback fires as a result\n // of writeQuery, so we can store the new diff quietly and ignore\n // it when we receive it redundantly from the watch callback.\n this.cache.performTransaction(cache => {\n if (this.shouldWrite(result, options.variables)) {\n cache.writeQuery({\n query: document,\n data: result.data as T,\n variables: options.variables,\n overwrite: cacheWriteBehavior === CacheWriteBehavior.OVERWRITE,\n });\n\n this.lastWrite = {\n result,\n variables: options.variables,\n dmCount: destructiveMethodCounts.get(this.cache),\n };\n } else {\n // If result is the same as the last result we received from\n // the network (and the variables match too), avoid writing\n // result into the cache again. The wisdom of skipping this\n // cache write is far from obvious, since any cache write\n // could be the one that puts the cache back into a desired\n // state, fixing corruption or missing data. However, if we\n // always write every network result into the cache, we enable\n // feuds between queries competing to update the same data in\n // incompatible ways, which can lead to an endless cycle of\n // cache broadcasts and useless network requests. As with any\n // feud, eventually one side must step back from the brink,\n // letting the other side(s) have the last word(s). There may\n // be other points where we could break this cycle, such as\n // silencing the broadcast for cache.writeQuery (not a good\n // idea, since it just delays the feud a bit) or somehow\n // avoiding the network request that just happened (also bad,\n // because the server could return useful new data). All\n // options considered, skipping this cache write seems to be\n // the least damaging place to break the cycle, because it\n // reflects the intuition that we recently wrote this exact\n // result into the cache, so the cache *should* already/still\n // contain this data. If some other query has clobbered that\n // data in the meantime, that's too bad, but there will be no\n // winners if every query blindly reverts to its own version\n // of the data. This approach also gives the network a chance\n // to return new data, which will be written into the cache as\n // usual, notifying only those queries that are directly\n // affected by the cache updates, as usual. In the future, an\n // even more sophisticated cache could perhaps prevent or\n // mitigate the clobbering somehow, but that would make this\n // particular cache write even less important, and thus\n // skipping it would be even safer than it is today.\n if (this.lastDiff &&\n this.lastDiff.diff.complete) {\n // Reuse data from the last good (complete) diff that we\n // received, when possible.\n result.data = this.lastDiff.diff.result;\n return;\n }\n // If the previous this.diff was incomplete, fall through to\n // re-reading the latest data with cache.diff, below.\n }\n\n const diffOptions = this.getDiffOptions(options.variables);\n const diff = cache.diff(diffOptions);\n\n // In case the QueryManager stops this QueryInfo before its\n // results are delivered, it's important to avoid restarting the\n // cache watch when markResult is called.\n if (!this.stopped) {\n // Any time we're about to update this.diff, we need to make\n // sure we've started watching the cache.\n this.updateWatch(options.variables);\n }\n\n // If we're allowed to write to the cache, and we can read a\n // complete result from the cache, update result.data to be the\n // result from the cache, rather than the raw network result.\n // Set without setDiff to avoid triggering a notify call, since\n // we have other ways of notifying for this result.\n this.updateLastDiff(diff, diffOptions);\n if (diff.complete) {\n result.data = diff.result;\n }\n });\n } else {\n this.lastWrite = void 0;\n }\n }\n }\n\n public markReady() {\n this.networkError = null;\n return this.networkStatus = NetworkStatus.ready;\n }\n\n public markError(error: ApolloError) {\n this.networkStatus = NetworkStatus.error;\n this.lastWrite = void 0;\n\n this.reset();\n\n if (error.graphQLErrors) {\n this.graphQLErrors = error.graphQLErrors;\n }\n\n if (error.networkError) {\n this.networkError = error.networkError;\n }\n\n return error;\n }\n}\n\nexport function shouldWriteResult(\n result: FetchResult,\n errorPolicy: ErrorPolicy = \"none\",\n) {\n const ignoreErrors =\n errorPolicy === \"ignore\" ||\n errorPolicy === \"all\";\n let writeWithErrors = !graphQLResultHasError(result);\n if (!writeWithErrors && ignoreErrors && result.data) {\n writeWithErrors = true;\n }\n return writeWithErrors;\n}\n","import { invariant, InvariantError } from '../utilities/globals';\n\nimport { DocumentNode } from 'graphql';\n// TODO(brian): A hack until this issue is resolved (https://github.com/graphql/graphql-js/issues/3356)\ntype OperationTypeNode = any;\nimport { equal } from '@wry/equality';\n\nimport { ApolloLink, execute, FetchResult } from '../link/core';\nimport { isExecutionPatchIncrementalResult } from '../utilities/common/incrementalResult';\nimport { Cache, ApolloCache, canonicalStringify } from '../cache';\n\nimport {\n getDefaultValues,\n getOperationDefinition,\n getOperationName,\n hasClientExports,\n graphQLResultHasError,\n removeConnectionDirectiveFromDocument,\n canUseWeakMap,\n ObservableSubscription,\n Observable,\n asyncMap,\n isNonEmptyArray,\n Concast,\n ConcastSourcesArray,\n makeUniqueId,\n isDocumentNode,\n isNonNullObject,\n cloneDeep,\n} from '../utilities';\nimport { ApolloError, isApolloError } from '../errors';\nimport {\n QueryOptions,\n WatchQueryOptions,\n SubscriptionOptions,\n MutationOptions,\n ErrorPolicy,\n MutationFetchPolicy,\n} from './watchQueryOptions';\nimport { ObservableQuery, logMissingFieldErrors } from './ObservableQuery';\nimport { NetworkStatus, isNetworkRequestInFlight } from './networkStatus';\nimport {\n ApolloQueryResult,\n OperationVariables,\n MutationUpdaterFunction,\n OnQueryUpdated,\n InternalRefetchQueriesInclude,\n InternalRefetchQueriesOptions,\n InternalRefetchQueriesResult,\n InternalRefetchQueriesMap,\n} from './types';\nimport { LocalState } from './LocalState';\n\nimport {\n QueryInfo,\n QueryStoreValue,\n shouldWriteResult,\n CacheWriteBehavior,\n} from './QueryInfo';\n\nconst { hasOwnProperty } = Object.prototype;\n\ninterface MutationStoreValue {\n mutation: DocumentNode;\n variables: Record;\n loading: boolean;\n error: Error | null;\n}\n\ntype UpdateQueries = MutationOptions[\"updateQueries\"];\n\ninterface TransformCacheEntry {\n document: DocumentNode;\n hasClientExports: boolean;\n hasForcedResolvers: boolean;\n clientQuery: DocumentNode | null;\n serverQuery: DocumentNode | null;\n defaultVars: OperationVariables;\n asQuery: DocumentNode;\n}\n\ntype DefaultOptions = import(\"./ApolloClient\").DefaultOptions;\n\nexport class QueryManager {\n public cache: ApolloCache;\n public link: ApolloLink;\n public defaultOptions: DefaultOptions;\n\n public readonly assumeImmutableResults: boolean;\n public readonly ssrMode: boolean;\n\n private queryDeduplication: boolean;\n private clientAwareness: Record = {};\n private localState: LocalState;\n\n private onBroadcast?: () => void;\n public mutationStore?: {\n [mutationId: string]: MutationStoreValue;\n };\n\n // All the queries that the QueryManager is currently managing (not\n // including mutations and subscriptions).\n private queries = new Map();\n\n // Maps from queryId strings to Promise rejection functions for\n // currently active queries and fetches.\n private fetchCancelFns = new Map any>();\n\n constructor({\n cache,\n link,\n defaultOptions,\n queryDeduplication = false,\n onBroadcast,\n ssrMode = false,\n clientAwareness = {},\n localState,\n assumeImmutableResults,\n }: {\n cache: ApolloCache;\n link: ApolloLink;\n defaultOptions?: DefaultOptions;\n queryDeduplication?: boolean;\n onBroadcast?: () => void;\n ssrMode?: boolean;\n clientAwareness?: Record;\n localState?: LocalState;\n assumeImmutableResults?: boolean;\n }) {\n this.cache = cache;\n this.link = link;\n this.defaultOptions = defaultOptions || Object.create(null);\n this.queryDeduplication = queryDeduplication;\n this.clientAwareness = clientAwareness;\n this.localState = localState || new LocalState({ cache });\n this.ssrMode = ssrMode;\n this.assumeImmutableResults = !!assumeImmutableResults;\n if ((this.onBroadcast = onBroadcast)) {\n this.mutationStore = Object.create(null);\n }\n }\n\n /**\n * Call this method to terminate any active query processes, making it safe\n * to dispose of this QueryManager instance.\n */\n public stop() {\n this.queries.forEach((_info, queryId) => {\n this.stopQueryNoBroadcast(queryId);\n });\n\n this.cancelPendingFetches(\n new InvariantError('QueryManager stopped while query was in flight'),\n );\n }\n\n private cancelPendingFetches(error: Error) {\n this.fetchCancelFns.forEach(cancel => cancel(error));\n this.fetchCancelFns.clear();\n }\n\n public async mutate<\n TData,\n TVariables,\n TContext,\n TCache extends ApolloCache\n >({\n mutation,\n variables,\n optimisticResponse,\n updateQueries,\n refetchQueries = [],\n awaitRefetchQueries = false,\n update: updateWithProxyFn,\n onQueryUpdated,\n fetchPolicy = this.defaultOptions.mutate?.fetchPolicy || \"network-only\",\n errorPolicy = this.defaultOptions.mutate?.errorPolicy || \"none\",\n keepRootFields,\n context,\n }: MutationOptions): Promise> {\n invariant(\n mutation,\n 'mutation option is required. You must specify your GraphQL document in the mutation option.',\n );\n\n invariant(\n fetchPolicy === 'network-only' ||\n fetchPolicy === 'no-cache',\n \"Mutations support only 'network-only' or 'no-cache' fetchPolicy strings. The default `network-only` behavior automatically writes mutation results to the cache. Passing `no-cache` skips the cache write.\"\n );\n\n const mutationId = this.generateMutationId();\n\n const {\n document,\n hasClientExports,\n } = this.transform(mutation);\n mutation = this.cache.transformForLink(document);\n\n variables = this.getVariables(mutation, variables) as TVariables;\n if (hasClientExports) {\n variables = await this.localState.addExportedVariables(mutation, variables, context) as TVariables;\n }\n\n const mutationStoreValue =\n this.mutationStore &&\n (this.mutationStore[mutationId] = {\n mutation,\n variables,\n loading: true,\n error: null,\n } as MutationStoreValue);\n\n if (optimisticResponse) {\n this.markMutationOptimistic<\n TData,\n TVariables,\n TContext,\n TCache\n >(optimisticResponse, {\n mutationId,\n document: mutation,\n variables,\n fetchPolicy,\n errorPolicy,\n context,\n updateQueries,\n update: updateWithProxyFn,\n keepRootFields,\n });\n }\n\n this.broadcastQueries();\n\n const self = this;\n\n return new Promise((resolve, reject) => {\n return asyncMap(\n self.getObservableFromLink(\n mutation,\n {\n ...context,\n optimisticResponse,\n },\n variables,\n false,\n ),\n\n (result: FetchResult) => {\n if (graphQLResultHasError(result) && errorPolicy === 'none') {\n throw new ApolloError({\n graphQLErrors: result.errors,\n });\n }\n\n if (mutationStoreValue) {\n mutationStoreValue.loading = false;\n mutationStoreValue.error = null;\n }\n\n const storeResult: typeof result = { ...result };\n\n if (typeof refetchQueries === \"function\") {\n refetchQueries = refetchQueries(storeResult);\n }\n\n if (errorPolicy === 'ignore' &&\n graphQLResultHasError(storeResult)) {\n delete storeResult.errors;\n }\n\n return self.markMutationResult<\n TData,\n TVariables,\n TContext,\n TCache\n >({\n mutationId,\n result: storeResult,\n document: mutation,\n variables,\n fetchPolicy,\n errorPolicy,\n context,\n update: updateWithProxyFn,\n updateQueries,\n awaitRefetchQueries,\n refetchQueries,\n removeOptimistic: optimisticResponse ? mutationId : void 0,\n onQueryUpdated,\n keepRootFields,\n });\n },\n\n ).subscribe({\n next(storeResult) {\n self.broadcastQueries();\n\n // At the moment, a mutation can have only one result, so we can\n // immediately resolve upon receiving the first result. In the future,\n // mutations containing @defer or @stream directives might receive\n // multiple FetchResult payloads from the ApolloLink chain, so we will\n // probably need to collect those results in this next method and call\n // resolve only later, in an observer.complete function.\n resolve(storeResult);\n },\n\n error(err: Error) {\n if (mutationStoreValue) {\n mutationStoreValue.loading = false;\n mutationStoreValue.error = err;\n }\n\n if (optimisticResponse) {\n self.cache.removeOptimistic(mutationId);\n }\n\n self.broadcastQueries();\n\n reject(\n err instanceof ApolloError ? err : new ApolloError({\n networkError: err,\n }),\n );\n },\n });\n });\n }\n\n public markMutationResult<\n TData,\n TVariables,\n TContext,\n TCache extends ApolloCache\n >(\n mutation: {\n mutationId: string;\n result: FetchResult;\n document: DocumentNode;\n variables?: TVariables;\n fetchPolicy?: MutationFetchPolicy;\n errorPolicy: ErrorPolicy;\n context?: TContext;\n updateQueries: UpdateQueries;\n update?: MutationUpdaterFunction;\n awaitRefetchQueries?: boolean;\n refetchQueries?: InternalRefetchQueriesInclude;\n removeOptimistic?: string;\n onQueryUpdated?: OnQueryUpdated;\n keepRootFields?: boolean;\n },\n cache = this.cache,\n ): Promise> {\n let { result } = mutation;\n const cacheWrites: Cache.WriteOptions[] = [];\n const skipCache = mutation.fetchPolicy === \"no-cache\";\n\n if (!skipCache && shouldWriteResult(result, mutation.errorPolicy)) {\n cacheWrites.push({\n result: result.data,\n dataId: 'ROOT_MUTATION',\n query: mutation.document,\n variables: mutation.variables,\n });\n\n const { updateQueries } = mutation;\n if (updateQueries) {\n this.queries.forEach(({ observableQuery }, queryId) => {\n const queryName = observableQuery && observableQuery.queryName;\n if (!queryName || !hasOwnProperty.call(updateQueries, queryName)) {\n return;\n }\n const updater = updateQueries[queryName];\n const { document, variables } = this.queries.get(queryId)!;\n\n // Read the current query result from the store.\n const { result: currentQueryResult, complete } = cache.diff({\n query: document!,\n variables,\n returnPartialData: true,\n optimistic: false,\n });\n\n if (complete && currentQueryResult) {\n // Run our reducer using the current query result and the mutation result.\n const nextQueryResult = updater(currentQueryResult, {\n mutationResult: result,\n queryName: document && getOperationName(document) || void 0,\n queryVariables: variables!,\n });\n\n // Write the modified result back into the store if we got a new result.\n if (nextQueryResult) {\n cacheWrites.push({\n result: nextQueryResult,\n dataId: 'ROOT_QUERY',\n query: document!,\n variables,\n });\n }\n }\n });\n }\n }\n\n if (\n cacheWrites.length > 0 ||\n mutation.refetchQueries ||\n mutation.update ||\n mutation.onQueryUpdated ||\n mutation.removeOptimistic\n ) {\n const results: any[] = [];\n\n this.refetchQueries({\n updateCache: (cache: TCache) => {\n if (!skipCache) {\n cacheWrites.forEach(write => cache.write(write));\n }\n\n // If the mutation has some writes associated with it then we need to\n // apply those writes to the store by running this reducer again with\n // a write action.\n const { update } = mutation;\n if (update) {\n if (!skipCache) {\n // Re-read the ROOT_MUTATION data we just wrote into the cache\n // (the first cache.write call in the cacheWrites.forEach loop\n // above), so field read functions have a chance to run for\n // fields within mutation result objects.\n const diff = cache.diff({\n id: \"ROOT_MUTATION\",\n // The cache complains if passed a mutation where it expects a\n // query, so we transform mutations and subscriptions to queries\n // (only once, thanks to this.transformCache).\n query: this.transform(mutation.document).asQuery,\n variables: mutation.variables,\n optimistic: false,\n returnPartialData: true,\n });\n\n if (diff.complete && !(isExecutionPatchIncrementalResult(result))) {\n result = { ...result, data: diff.result };\n }\n }\n\n update(cache, result, {\n context: mutation.context,\n variables: mutation.variables,\n });\n }\n\n // TODO Do this with cache.evict({ id: 'ROOT_MUTATION' }) but make it\n // shallow to allow rolling back optimistic evictions.\n if (!skipCache && !mutation.keepRootFields) {\n cache.modify({\n id: 'ROOT_MUTATION',\n fields(value, { fieldName, DELETE }) {\n return fieldName === \"__typename\" ? value : DELETE;\n },\n });\n }\n },\n\n include: mutation.refetchQueries,\n\n // Write the final mutation.result to the root layer of the cache.\n optimistic: false,\n\n // Remove the corresponding optimistic layer at the same time as we\n // write the final non-optimistic result.\n removeOptimistic: mutation.removeOptimistic,\n\n // Let the caller of client.mutate optionally determine the refetching\n // behavior for watched queries after the mutation.update function runs.\n // If no onQueryUpdated function was provided for this mutation, pass\n // null instead of undefined to disable the default refetching behavior.\n onQueryUpdated: mutation.onQueryUpdated || null,\n\n }).forEach(result => results.push(result));\n\n if (mutation.awaitRefetchQueries || mutation.onQueryUpdated) {\n // Returning a promise here makes the mutation await that promise, so we\n // include results in that promise's work if awaitRefetchQueries or an\n // onQueryUpdated function was specified.\n return Promise.all(results).then(() => result);\n }\n }\n\n return Promise.resolve(result);\n }\n\n public markMutationOptimistic>(\n optimisticResponse: any,\n mutation: {\n mutationId: string;\n document: DocumentNode;\n variables?: TVariables;\n fetchPolicy?: MutationFetchPolicy;\n errorPolicy: ErrorPolicy;\n context?: TContext;\n updateQueries: UpdateQueries,\n update?: MutationUpdaterFunction;\n keepRootFields?: boolean,\n },\n ) {\n const data = typeof optimisticResponse === \"function\"\n ? optimisticResponse(mutation.variables)\n : optimisticResponse;\n\n return this.cache.recordOptimisticTransaction(cache => {\n try {\n this.markMutationResult({\n ...mutation,\n result: { data },\n }, cache);\n } catch (error) {\n invariant.error(error);\n }\n }, mutation.mutationId);\n }\n\n public fetchQuery(\n queryId: string,\n options: WatchQueryOptions,\n networkStatus?: NetworkStatus,\n ): Promise> {\n return this.fetchQueryObservable(\n queryId,\n options,\n networkStatus,\n ).promise;\n }\n\n public getQueryStore() {\n const store: Record = Object.create(null);\n this.queries.forEach((info, queryId) => {\n store[queryId] = {\n variables: info.variables,\n networkStatus: info.networkStatus,\n networkError: info.networkError,\n graphQLErrors: info.graphQLErrors,\n };\n });\n return store;\n }\n\n public resetErrors(queryId: string) {\n const queryInfo = this.queries.get(queryId);\n if (queryInfo) {\n queryInfo.networkError = undefined;\n queryInfo.graphQLErrors = [];\n }\n }\n\n private transformCache = new (\n canUseWeakMap ? WeakMap : Map\n )();\n\n public transform(document: DocumentNode) {\n const { transformCache } = this;\n\n if (!transformCache.has(document)) {\n const transformed = this.cache.transformDocument(document);\n const noConnection = removeConnectionDirectiveFromDocument(transformed);\n const clientQuery = this.localState.clientQuery(transformed);\n const serverQuery = noConnection && this.localState.serverQuery(noConnection);\n\n const cacheEntry: TransformCacheEntry = {\n document: transformed,\n // TODO These two calls (hasClientExports and shouldForceResolvers)\n // could probably be merged into a single traversal.\n hasClientExports: hasClientExports(transformed),\n hasForcedResolvers: this.localState.shouldForceResolvers(transformed),\n clientQuery,\n serverQuery,\n defaultVars: getDefaultValues(\n getOperationDefinition(transformed)\n ) as OperationVariables,\n // Transform any mutation or subscription operations to query operations\n // so we can read/write them from/to the cache.\n asQuery: {\n ...transformed,\n definitions: transformed.definitions.map(def => {\n if (def.kind === \"OperationDefinition\" &&\n def.operation !== \"query\") {\n return { ...def, operation: \"query\" as OperationTypeNode };\n }\n return def;\n }),\n }\n };\n\n const add = (doc: DocumentNode | null) => {\n if (doc && !transformCache.has(doc)) {\n transformCache.set(doc, cacheEntry);\n }\n }\n // Add cacheEntry to the transformCache using several different keys,\n // since any one of these documents could end up getting passed to the\n // transform method again in the future.\n add(document);\n add(transformed);\n add(clientQuery);\n add(serverQuery);\n }\n\n return transformCache.get(document)!;\n }\n\n private getVariables(\n document: DocumentNode,\n variables?: TVariables,\n ): OperationVariables {\n return {\n ...this.transform(document).defaultVars,\n ...variables,\n };\n }\n\n public watchQuery(\n options: WatchQueryOptions,\n ): ObservableQuery {\n // assign variable default values if supplied\n options = {\n ...options,\n variables: this.getVariables(\n options.query,\n options.variables,\n ) as TVariables,\n };\n\n if (typeof options.notifyOnNetworkStatusChange === 'undefined') {\n options.notifyOnNetworkStatusChange = false;\n }\n\n const queryInfo = new QueryInfo(this);\n const observable = new ObservableQuery({\n queryManager: this,\n queryInfo,\n options,\n });\n\n this.queries.set(observable.queryId, queryInfo);\n\n queryInfo.init({\n document: observable.query,\n observableQuery: observable,\n variables: observable.variables,\n });\n\n return observable;\n }\n\n public query(\n options: QueryOptions,\n queryId = this.generateQueryId(),\n ): Promise> {\n invariant(\n options.query,\n 'query option is required. You must specify your GraphQL document ' +\n 'in the query option.',\n );\n\n invariant(\n options.query.kind === 'Document',\n 'You must wrap the query string in a \"gql\" tag.',\n );\n\n invariant(\n !(options as any).returnPartialData,\n 'returnPartialData option only supported on watchQuery.',\n );\n\n invariant(\n !(options as any).pollInterval,\n 'pollInterval option only supported on watchQuery.',\n );\n\n return this.fetchQuery(\n queryId,\n options,\n ).finally(() => this.stopQuery(queryId));\n }\n\n private queryIdCounter = 1;\n public generateQueryId() {\n return String(this.queryIdCounter++);\n }\n\n private requestIdCounter = 1;\n public generateRequestId() {\n return this.requestIdCounter++;\n }\n\n private mutationIdCounter = 1;\n public generateMutationId() {\n return String(this.mutationIdCounter++);\n }\n\n public stopQueryInStore(queryId: string) {\n this.stopQueryInStoreNoBroadcast(queryId);\n this.broadcastQueries();\n }\n\n private stopQueryInStoreNoBroadcast(queryId: string) {\n const queryInfo = this.queries.get(queryId);\n if (queryInfo) queryInfo.stop();\n }\n\n public clearStore(options: Cache.ResetOptions = {\n discardWatches: true,\n }): Promise {\n // Before we have sent the reset action to the store, we can no longer\n // rely on the results returned by in-flight requests since these may\n // depend on values that previously existed in the data portion of the\n // store. So, we cancel the promises and observers that we have issued\n // so far and not yet resolved (in the case of queries).\n this.cancelPendingFetches(new InvariantError(\n 'Store reset while query was in flight (not completed in link chain)',\n ));\n\n this.queries.forEach(queryInfo => {\n if (queryInfo.observableQuery) {\n // Set loading to true so listeners don't trigger unless they want\n // results with partial data.\n queryInfo.networkStatus = NetworkStatus.loading;\n } else {\n queryInfo.stop();\n }\n });\n\n if (this.mutationStore) {\n this.mutationStore = Object.create(null);\n }\n\n // begin removing data from the store\n return this.cache.reset(options);\n }\n\n public getObservableQueries(\n include: InternalRefetchQueriesInclude = \"active\",\n ) {\n const queries = new Map>();\n const queryNamesAndDocs = new Map();\n const legacyQueryOptions = new Set();\n\n if (Array.isArray(include)) {\n include.forEach(desc => {\n if (typeof desc === \"string\") {\n queryNamesAndDocs.set(desc, false);\n } else if (isDocumentNode(desc)) {\n queryNamesAndDocs.set(this.transform(desc).document, false);\n } else if (isNonNullObject(desc) && desc.query) {\n legacyQueryOptions.add(desc);\n }\n });\n }\n\n this.queries.forEach(({ observableQuery: oq, document }, queryId) => {\n if (oq) {\n if (include === \"all\") {\n queries.set(queryId, oq);\n return;\n }\n\n const {\n queryName,\n options: { fetchPolicy },\n } = oq;\n\n if (\n fetchPolicy === \"standby\" ||\n (include === \"active\" && !oq.hasObservers())\n ) {\n return;\n }\n\n if (\n include === \"active\" ||\n (queryName && queryNamesAndDocs.has(queryName)) ||\n (document && queryNamesAndDocs.has(document))\n ) {\n queries.set(queryId, oq);\n if (queryName) queryNamesAndDocs.set(queryName, true);\n if (document) queryNamesAndDocs.set(document, true);\n }\n }\n });\n\n if (legacyQueryOptions.size) {\n legacyQueryOptions.forEach((options: QueryOptions) => {\n // We will be issuing a fresh network request for this query, so we\n // pre-allocate a new query ID here, using a special prefix to enable\n // cleaning up these temporary queries later, after fetching.\n const queryId = makeUniqueId(\"legacyOneTimeQuery\");\n const queryInfo = this.getQuery(queryId).init({\n document: options.query,\n variables: options.variables,\n });\n const oq = new ObservableQuery({\n queryManager: this,\n queryInfo,\n options: {\n ...options,\n fetchPolicy: \"network-only\",\n },\n });\n invariant(oq.queryId === queryId);\n queryInfo.setObservableQuery(oq);\n queries.set(queryId, oq);\n });\n }\n\n if (__DEV__ && queryNamesAndDocs.size) {\n queryNamesAndDocs.forEach((included, nameOrDoc) => {\n if (!included) {\n invariant.warn(`Unknown query ${\n typeof nameOrDoc === \"string\" ? \"named \" : \"\"\n }${\n JSON.stringify(nameOrDoc, null, 2)\n } requested in refetchQueries options.include array`);\n }\n });\n }\n\n return queries;\n }\n\n public reFetchObservableQueries(\n includeStandby: boolean = false,\n ): Promise[]> {\n const observableQueryPromises: Promise>[] = [];\n\n this.getObservableQueries(\n includeStandby ? \"all\" : \"active\"\n ).forEach((observableQuery, queryId) => {\n const { fetchPolicy } = observableQuery.options;\n observableQuery.resetLastResults();\n if (includeStandby ||\n (fetchPolicy !== \"standby\" &&\n fetchPolicy !== \"cache-only\")) {\n observableQueryPromises.push(observableQuery.refetch());\n }\n this.getQuery(queryId).setDiff(null);\n });\n\n this.broadcastQueries();\n\n return Promise.all(observableQueryPromises);\n }\n\n public setObservableQuery(observableQuery: ObservableQuery) {\n this.getQuery(observableQuery.queryId).setObservableQuery(observableQuery);\n }\n\n public startGraphQLSubscription({\n query,\n fetchPolicy,\n errorPolicy,\n variables,\n context = {},\n }: SubscriptionOptions): Observable> {\n query = this.transform(query).document;\n variables = this.getVariables(query, variables);\n\n const makeObservable = (variables: OperationVariables) =>\n this.getObservableFromLink(\n query,\n context,\n variables,\n ).map(result => {\n if (fetchPolicy !== 'no-cache') {\n // the subscription interface should handle not sending us results we no longer subscribe to.\n // XXX I don't think we ever send in an object with errors, but we might in the future...\n if (shouldWriteResult(result, errorPolicy)) {\n this.cache.write({\n query,\n result: result.data,\n dataId: 'ROOT_SUBSCRIPTION',\n variables: variables,\n });\n }\n\n this.broadcastQueries();\n }\n\n if (graphQLResultHasError(result)) {\n throw new ApolloError({\n graphQLErrors: result.errors,\n });\n }\n\n return result;\n });\n\n if (this.transform(query).hasClientExports) {\n const observablePromise = this.localState.addExportedVariables(\n query,\n variables,\n context,\n ).then(makeObservable);\n\n return new Observable>(observer => {\n let sub: ObservableSubscription | null = null;\n observablePromise.then(\n observable => sub = observable.subscribe(observer),\n observer.error,\n );\n return () => sub && sub.unsubscribe();\n });\n }\n\n return makeObservable(variables);\n }\n\n public stopQuery(queryId: string) {\n this.stopQueryNoBroadcast(queryId);\n this.broadcastQueries();\n }\n\n private stopQueryNoBroadcast(queryId: string) {\n this.stopQueryInStoreNoBroadcast(queryId);\n this.removeQuery(queryId);\n }\n\n public removeQuery(queryId: string) {\n // teardown all links\n // Both `QueryManager.fetchRequest` and `QueryManager.query` create separate promises\n // that each add their reject functions to fetchCancelFns.\n // A query created with `QueryManager.query()` could trigger a `QueryManager.fetchRequest`.\n // The same queryId could have two rejection fns for two promises\n this.fetchCancelFns.delete(queryId);\n if (this.queries.has(queryId)) {\n this.getQuery(queryId).stop();\n this.queries.delete(queryId);\n }\n }\n\n public broadcastQueries() {\n if (this.onBroadcast) this.onBroadcast();\n this.queries.forEach(info => info.notify());\n }\n\n public getLocalState(): LocalState {\n return this.localState;\n }\n\n private inFlightLinkObservables = new Map<\n DocumentNode,\n Map>\n >();\n\n private getObservableFromLink(\n query: DocumentNode,\n context: any,\n variables?: OperationVariables,\n deduplication: boolean =\n // Prefer context.queryDeduplication if specified.\n context?.queryDeduplication ??\n this.queryDeduplication,\n ): Observable> {\n let observable: Observable>;\n\n const { serverQuery } = this.transform(query);\n if (serverQuery) {\n const { inFlightLinkObservables, link } = this;\n\n const operation = {\n query: serverQuery,\n variables,\n operationName: getOperationName(serverQuery) || void 0,\n context: this.prepareContext({\n ...context,\n forceFetch: !deduplication\n }),\n };\n\n context = operation.context;\n\n if (deduplication) {\n const byVariables = inFlightLinkObservables.get(serverQuery) || new Map();\n inFlightLinkObservables.set(serverQuery, byVariables);\n\n const varJson = canonicalStringify(variables);\n observable = byVariables.get(varJson);\n\n if (!observable) {\n const concast = new Concast([\n execute(link, operation) as Observable>\n ]);\n\n byVariables.set(varJson, observable = concast);\n\n concast.beforeNext(() => {\n if (byVariables.delete(varJson) &&\n byVariables.size < 1) {\n inFlightLinkObservables.delete(serverQuery);\n }\n });\n }\n\n } else {\n observable = new Concast([\n execute(link, operation) as Observable>\n ]);\n }\n } else {\n observable = new Concast([\n Observable.of({ data: {} } as FetchResult)\n ]);\n context = this.prepareContext(context);\n }\n\n const { clientQuery } = this.transform(query);\n if (clientQuery) {\n observable = asyncMap(observable, result => {\n return this.localState.runResolvers({\n document: clientQuery,\n remoteResult: result,\n context,\n variables,\n });\n });\n }\n\n return observable;\n }\n\n private getResultsFromLink(\n queryInfo: QueryInfo,\n cacheWriteBehavior: CacheWriteBehavior,\n options: Pick,\n | \"variables\"\n | \"context\"\n | \"fetchPolicy\"\n | \"errorPolicy\">,\n ): Observable> {\n const requestId = queryInfo.lastRequestId = this.generateRequestId();\n\n // Make sure we write the result below using the same options we were given,\n // even though the input object may have been modified in the meantime.\n options = cloneDeep(options);\n\n // Performing transformForLink here gives this.cache a chance to fill in\n // missing fragment definitions (for example) before sending this document\n // through the link chain.\n const linkDocument = this.cache.transformForLink(\n // Use same document originally produced by this.cache.transformDocument.\n this.transform(queryInfo.document!).document\n );\n\n return asyncMap(\n this.getObservableFromLink(\n linkDocument,\n options.context,\n options.variables,\n ),\n\n result => {\n const graphQLErrors = isNonEmptyArray(result.errors)\n ? result.errors.slice(0)\n : [];\n\n if ('incremental' in result && isNonEmptyArray(result.incremental)) {\n result.incremental.forEach(incrementalResult => {\n if (incrementalResult.errors) {\n graphQLErrors.push(...incrementalResult.errors);\n }\n });\n }\n\n const hasErrors = isNonEmptyArray(graphQLErrors);\n\n // If we interrupted this request by calling getResultsFromLink again\n // with the same QueryInfo object, we ignore the old results.\n if (requestId >= queryInfo.lastRequestId) {\n if (hasErrors && options.errorPolicy === \"none\") {\n // Throwing here effectively calls observer.error.\n throw queryInfo.markError(new ApolloError({\n graphQLErrors,\n }));\n }\n // Use linkDocument rather than queryInfo.document so the\n // operation/fragments used to write the result are the same as the\n // ones used to obtain it from the link.\n queryInfo.markResult(result, linkDocument, options, cacheWriteBehavior);\n queryInfo.markReady();\n }\n\n const aqr: ApolloQueryResult = {\n data: result.data,\n loading: false,\n networkStatus: NetworkStatus.ready,\n };\n\n if (hasErrors && options.errorPolicy !== \"ignore\") {\n aqr.errors = graphQLErrors;\n aqr.networkStatus = NetworkStatus.error;\n }\n\n return aqr;\n },\n\n networkError => {\n const error = isApolloError(networkError)\n ? networkError\n : new ApolloError({ networkError });\n\n // Avoid storing errors from older interrupted queries.\n if (requestId >= queryInfo.lastRequestId) {\n queryInfo.markError(error);\n }\n\n throw error;\n },\n );\n }\n\n public fetchQueryObservable(\n queryId: string,\n options: WatchQueryOptions,\n // The initial networkStatus for this fetch, most often\n // NetworkStatus.loading, but also possibly fetchMore, poll, refetch,\n // or setVariables.\n networkStatus = NetworkStatus.loading,\n ): Concast> {\n const query = this.transform(options.query).document;\n const variables = this.getVariables(query, options.variables) as TVars;\n const queryInfo = this.getQuery(queryId);\n\n const defaults = this.defaultOptions.watchQuery;\n let {\n fetchPolicy = defaults && defaults.fetchPolicy || \"cache-first\",\n errorPolicy = defaults && defaults.errorPolicy || \"none\",\n returnPartialData = false,\n notifyOnNetworkStatusChange = false,\n context = {},\n } = options;\n\n const normalized = Object.assign({}, options, {\n query,\n variables,\n fetchPolicy,\n errorPolicy,\n returnPartialData,\n notifyOnNetworkStatusChange,\n context,\n });\n\n const fromVariables = (variables: TVars) => {\n // Since normalized is always a fresh copy of options, it's safe to\n // modify its properties here, rather than creating yet another new\n // WatchQueryOptions object.\n normalized.variables = variables;\n\n const concastSources = this.fetchQueryByPolicy(\n queryInfo,\n normalized,\n networkStatus,\n );\n\n if (\n // If we're in standby, postpone advancing options.fetchPolicy using\n // applyNextFetchPolicy.\n normalized.fetchPolicy !== \"standby\" &&\n // The \"standby\" policy currently returns [] from fetchQueryByPolicy, so\n // this is another way to detect when nothing was done/fetched.\n concastSources.length > 0 &&\n queryInfo.observableQuery\n ) {\n queryInfo.observableQuery[\"applyNextFetchPolicy\"](\"after-fetch\", options);\n }\n\n return concastSources;\n };\n\n // This cancel function needs to be set before the concast is created,\n // in case concast creation synchronously cancels the request.\n const cleanupCancelFn = () => this.fetchCancelFns.delete(queryId);\n this.fetchCancelFns.set(queryId, reason => {\n cleanupCancelFn();\n // This delay ensures the concast variable has been initialized.\n setTimeout(() => concast.cancel(reason));\n });\n\n // A Concast can be created either from an Iterable>\n // or from a PromiseLike>>, where T in this\n // case is ApolloQueryResult.\n const concast = new Concast(\n // If the query has @export(as: ...) directives, then we need to\n // process those directives asynchronously. When there are no\n // @export directives (the common case), we deliberately avoid\n // wrapping the result of this.fetchQueryByPolicy in a Promise,\n // since the timing of result delivery is (unfortunately) important\n // for backwards compatibility. TODO This code could be simpler if\n // we deprecated and removed LocalState.\n this.transform(normalized.query).hasClientExports\n ? this.localState.addExportedVariables(\n normalized.query,\n normalized.variables,\n normalized.context,\n ).then(fromVariables)\n : fromVariables(normalized.variables!)\n );\n\n concast.promise.then(cleanupCancelFn, cleanupCancelFn);\n\n return concast;\n }\n\n public refetchQueries({\n updateCache,\n include,\n optimistic = false,\n removeOptimistic = optimistic ? makeUniqueId(\"refetchQueries\") : void 0,\n onQueryUpdated,\n }: InternalRefetchQueriesOptions, TResult>\n ): InternalRefetchQueriesMap {\n const includedQueriesById = new Map;\n lastDiff?: Cache.DiffResult;\n diff?: Cache.DiffResult;\n }>();\n\n if (include) {\n this.getObservableQueries(include).forEach((oq, queryId) => {\n includedQueriesById.set(queryId, {\n oq,\n lastDiff: this.getQuery(queryId).getDiff(),\n });\n });\n }\n\n const results: InternalRefetchQueriesMap = new Map;\n\n if (updateCache) {\n this.cache.batch({\n update: updateCache,\n\n // Since you can perform any combination of cache reads and/or writes in\n // the cache.batch update function, its optimistic option can be either\n // a boolean or a string, representing three distinct modes of\n // operation:\n //\n // * false: read/write only the root layer\n // * true: read/write the topmost layer\n // * string: read/write a fresh optimistic layer with that ID string\n //\n // When typeof optimistic === \"string\", a new optimistic layer will be\n // temporarily created within cache.batch with that string as its ID. If\n // we then pass that same string as the removeOptimistic option, we can\n // make cache.batch immediately remove the optimistic layer after\n // running the updateCache function, triggering only one broadcast.\n //\n // However, the refetchQueries method accepts only true or false for its\n // optimistic option (not string). We interpret true to mean a temporary\n // optimistic layer should be created, to allow efficiently rolling back\n // the effect of the updateCache function, which involves passing a\n // string instead of true as the optimistic option to cache.batch, when\n // refetchQueries receives optimistic: true.\n //\n // In other words, we are deliberately not supporting the use case of\n // writing to an *existing* optimistic layer (using the refetchQueries\n // updateCache function), since that would potentially interfere with\n // other optimistic updates in progress. Instead, you can read/write\n // only the root layer by passing optimistic: false to refetchQueries,\n // or you can read/write a brand new optimistic layer that will be\n // automatically removed by passing optimistic: true.\n optimistic: optimistic && removeOptimistic || false,\n\n // The removeOptimistic option can also be provided by itself, even if\n // optimistic === false, to remove some previously-added optimistic\n // layer safely and efficiently, like we do in markMutationResult.\n //\n // If an explicit removeOptimistic string is provided with optimistic:\n // true, the removeOptimistic string will determine the ID of the\n // temporary optimistic layer, in case that ever matters.\n removeOptimistic,\n\n onWatchUpdated(watch, diff, lastDiff) {\n const oq =\n watch.watcher instanceof QueryInfo &&\n watch.watcher.observableQuery;\n\n if (oq) {\n if (onQueryUpdated) {\n // Since we're about to handle this query now, remove it from\n // includedQueriesById, in case it was added earlier because of\n // options.include.\n includedQueriesById.delete(oq.queryId);\n\n let result: TResult | boolean | Promise> =\n onQueryUpdated(oq, diff, lastDiff);\n\n if (result === true) {\n // The onQueryUpdated function requested the default refetching\n // behavior by returning true.\n result = oq.refetch();\n }\n\n // Record the result in the results Map, as long as onQueryUpdated\n // did not return false to skip/ignore this result.\n if (result !== false) {\n results.set(oq, result as InternalRefetchQueriesResult);\n }\n\n // Allow the default cache broadcast to happen, except when\n // onQueryUpdated returns false.\n return result;\n }\n\n if (onQueryUpdated !== null) {\n // If we don't have an onQueryUpdated function, and onQueryUpdated\n // was not disabled by passing null, make sure this query is\n // \"included\" like any other options.include-specified query.\n includedQueriesById.set(oq.queryId, { oq, lastDiff, diff });\n }\n }\n },\n });\n }\n\n if (includedQueriesById.size) {\n includedQueriesById.forEach(({ oq, lastDiff, diff }, queryId) => {\n let result: TResult | boolean | Promise> | undefined;\n\n // If onQueryUpdated is provided, we want to use it for all included\n // queries, even the QueryOptions ones.\n if (onQueryUpdated) {\n if (!diff) {\n const info = oq[\"queryInfo\"];\n info.reset(); // Force info.getDiff() to read from cache.\n diff = info.getDiff();\n }\n result = onQueryUpdated(oq, diff, lastDiff);\n }\n\n // Otherwise, we fall back to refetching.\n if (!onQueryUpdated || result === true) {\n result = oq.refetch();\n }\n\n if (result !== false) {\n results.set(oq, result as InternalRefetchQueriesResult);\n }\n\n if (queryId.indexOf(\"legacyOneTimeQuery\") >= 0) {\n this.stopQueryNoBroadcast(queryId);\n }\n });\n }\n\n if (removeOptimistic) {\n // In case no updateCache callback was provided (so cache.batch was not\n // called above, and thus did not already remove the optimistic layer),\n // remove it here. Since this is a no-op when the layer has already been\n // removed, we do it even if we called cache.batch above, since it's\n // possible this.cache is an instance of some ApolloCache subclass other\n // than InMemoryCache, and does not fully support the removeOptimistic\n // option for cache.batch.\n this.cache.removeOptimistic(removeOptimistic);\n }\n\n return results;\n }\n\n private fetchQueryByPolicy(\n queryInfo: QueryInfo,\n { query,\n variables,\n fetchPolicy,\n refetchWritePolicy,\n errorPolicy,\n returnPartialData,\n context,\n notifyOnNetworkStatusChange,\n }: WatchQueryOptions,\n // The initial networkStatus for this fetch, most often\n // NetworkStatus.loading, but also possibly fetchMore, poll, refetch,\n // or setVariables.\n networkStatus: NetworkStatus,\n ): ConcastSourcesArray> {\n const oldNetworkStatus = queryInfo.networkStatus;\n\n queryInfo.init({\n document: this.transform(query).document,\n variables,\n networkStatus,\n });\n\n const readCache = () => queryInfo.getDiff(variables);\n\n const resultsFromCache = (\n diff: Cache.DiffResult,\n networkStatus = queryInfo.networkStatus || NetworkStatus.loading,\n ) => {\n const data = diff.result;\n\n if (__DEV__ &&\n !returnPartialData &&\n !equal(data, {})) {\n logMissingFieldErrors(diff.missing);\n }\n\n const fromData = (data: TData | undefined) => Observable.of({\n data,\n loading: isNetworkRequestInFlight(networkStatus),\n networkStatus,\n ...(diff.complete ? null : { partial: true }),\n } as ApolloQueryResult);\n\n if (data && this.transform(query).hasForcedResolvers) {\n return this.localState.runResolvers({\n document: query,\n remoteResult: { data },\n context,\n variables,\n onlyRunForcedResolvers: true,\n }).then(resolved => fromData(resolved.data || void 0));\n }\n\n return fromData(data);\n };\n\n const cacheWriteBehavior =\n fetchPolicy === \"no-cache\" ? CacheWriteBehavior.FORBID :\n ( // Watched queries must opt into overwriting existing data on refetch,\n // by passing refetchWritePolicy: \"overwrite\" in their WatchQueryOptions.\n networkStatus === NetworkStatus.refetch &&\n refetchWritePolicy !== \"merge\"\n ) ? CacheWriteBehavior.OVERWRITE\n : CacheWriteBehavior.MERGE;\n\n const resultsFromLink = () => this.getResultsFromLink(\n queryInfo,\n cacheWriteBehavior,\n {\n variables,\n context,\n fetchPolicy,\n errorPolicy,\n },\n );\n\n const shouldNotify =\n notifyOnNetworkStatusChange &&\n typeof oldNetworkStatus === \"number\" &&\n oldNetworkStatus !== networkStatus &&\n isNetworkRequestInFlight(networkStatus);\n\n switch (fetchPolicy) {\n default: case \"cache-first\": {\n const diff = readCache();\n\n if (diff.complete) {\n return [\n resultsFromCache(diff, queryInfo.markReady()),\n ];\n }\n\n if (returnPartialData || shouldNotify) {\n return [\n resultsFromCache(diff),\n resultsFromLink(),\n ];\n }\n\n return [\n resultsFromLink(),\n ];\n }\n\n case \"cache-and-network\": {\n const diff = readCache();\n\n if (diff.complete || returnPartialData || shouldNotify) {\n return [\n resultsFromCache(diff),\n resultsFromLink(),\n ];\n }\n\n return [\n resultsFromLink(),\n ];\n }\n\n case \"cache-only\":\n return [\n resultsFromCache(readCache(), queryInfo.markReady()),\n ];\n\n case \"network-only\":\n if (shouldNotify) {\n return [\n resultsFromCache(readCache()),\n resultsFromLink(),\n ];\n }\n\n return [resultsFromLink()];\n\n case \"no-cache\":\n if (shouldNotify) {\n return [\n // Note that queryInfo.getDiff() for no-cache queries does not call\n // cache.diff, but instead returns a { complete: false } stub result\n // when there is no queryInfo.diff already defined.\n resultsFromCache(queryInfo.getDiff()),\n resultsFromLink(),\n ];\n }\n\n return [resultsFromLink()];\n\n case \"standby\":\n return [];\n }\n }\n\n private getQuery(queryId: string): QueryInfo {\n if (queryId && !this.queries.has(queryId)) {\n this.queries.set(queryId, new QueryInfo(this, queryId));\n }\n return this.queries.get(queryId)!;\n }\n\n private prepareContext(context = {}) {\n const newContext = this.localState.prepareContext(context);\n return {\n ...newContext,\n clientAwareness: this.clientAwareness,\n };\n }\n}\n","import { ExecutionPatchIncrementalResult } from '../../link/core';\n\nexport function isExecutionPatchIncrementalResult(value: any): value is ExecutionPatchIncrementalResult {\n return !!(value as ExecutionPatchIncrementalResult).incremental;\n}\n","import { invariant, InvariantError } from '../utilities/globals';\n\nimport { ExecutionResult, DocumentNode } from 'graphql';\n\nimport { ApolloLink, FetchResult, GraphQLRequest, execute } from '../link/core';\nimport { ApolloCache, DataProxy } from '../cache';\nimport { Observable } from '../utilities';\nimport { version } from '../version';\nimport { HttpLink, UriFunction } from '../link/http';\n\nimport { QueryManager } from './QueryManager';\nimport { ObservableQuery } from './ObservableQuery';\n\nimport {\n ApolloQueryResult,\n DefaultContext,\n OperationVariables,\n Resolvers,\n RefetchQueriesOptions,\n RefetchQueriesResult,\n InternalRefetchQueriesResult,\n RefetchQueriesInclude,\n} from './types';\n\nimport {\n QueryOptions,\n WatchQueryOptions,\n MutationOptions,\n SubscriptionOptions,\n WatchQueryFetchPolicy,\n} from './watchQueryOptions';\n\nimport {\n LocalState,\n FragmentMatcher,\n} from './LocalState';\n\nexport interface DefaultOptions {\n watchQuery?: Partial>;\n query?: Partial>;\n mutate?: Partial>;\n}\n\nlet hasSuggestedDevtools = false;\n\nexport type ApolloClientOptions = {\n uri?: string | UriFunction;\n credentials?: string;\n headers?: Record;\n link?: ApolloLink;\n cache: ApolloCache;\n ssrForceFetchDelay?: number;\n ssrMode?: boolean;\n connectToDevTools?: boolean;\n queryDeduplication?: boolean;\n defaultOptions?: DefaultOptions;\n assumeImmutableResults?: boolean;\n resolvers?: Resolvers | Resolvers[];\n typeDefs?: string | string[] | DocumentNode | DocumentNode[];\n fragmentMatcher?: FragmentMatcher;\n name?: string;\n version?: string;\n};\n\n// Though mergeOptions now resides in @apollo/client/utilities, it was\n// previously declared and exported from this module, and then reexported from\n// @apollo/client/core. Since we need to preserve that API anyway, the easiest\n// solution is to reexport mergeOptions where it was previously declared (here).\nimport { mergeOptions } from \"../utilities\";\nexport { mergeOptions }\n\n/**\n * This is the primary Apollo Client class. It is used to send GraphQL documents (i.e. queries\n * and mutations) to a GraphQL spec-compliant server over a {@link NetworkInterface} instance,\n * receive results from the server and cache the results in a store. It also delivers updates\n * to GraphQL queries through {@link Observable} instances.\n */\nexport class ApolloClient implements DataProxy {\n public link: ApolloLink;\n public cache: ApolloCache;\n public disableNetworkFetches: boolean;\n public version: string;\n public queryDeduplication: boolean;\n public defaultOptions: DefaultOptions;\n public readonly typeDefs: ApolloClientOptions['typeDefs'];\n\n private queryManager: QueryManager;\n private devToolsHookCb: Function;\n private resetStoreCallbacks: Array<() => Promise> = [];\n private clearStoreCallbacks: Array<() => Promise> = [];\n private localState: LocalState;\n\n /**\n * Constructs an instance of {@link ApolloClient}.\n *\n * @param uri The GraphQL endpoint that Apollo Client will connect to. If\n * `link` is configured, this option is ignored.\n * @param link The {@link ApolloLink} over which GraphQL documents will be resolved into a response.\n *\n * @param cache The initial cache to use in the data store.\n *\n * @param ssrMode Determines whether this is being run in Server Side Rendering (SSR) mode.\n *\n * @param ssrForceFetchDelay Determines the time interval before we force fetch queries for a\n * server side render.\n *\n * @param queryDeduplication If set to false, a query will still be sent to the server even if a query\n * with identical parameters (query, variables, operationName) is already in flight.\n *\n * @param defaultOptions Used to set application wide defaults for the\n * options supplied to `watchQuery`, `query`, or\n * `mutate`.\n *\n * @param assumeImmutableResults When this option is true, the client will assume results\n * read from the cache are never mutated by application code,\n * which enables substantial performance optimizations.\n *\n * @param name A custom name that can be used to identify this client, when\n * using Apollo client awareness features. E.g. \"iOS\".\n *\n * @param version A custom version that can be used to identify this client,\n * when using Apollo client awareness features. This is the\n * version of your client, which you may want to increment on\n * new builds. This is NOT the version of Apollo Client that\n * you are using.\n */\n constructor(options: ApolloClientOptions) {\n const {\n uri,\n credentials,\n headers,\n cache,\n ssrMode = false,\n ssrForceFetchDelay = 0,\n connectToDevTools =\n // Expose the client instance as window.__APOLLO_CLIENT__ and call\n // onBroadcast in queryManager.broadcastQueries to enable browser\n // devtools, but disable them by default in production.\n typeof window === 'object' &&\n !(window as any).__APOLLO_CLIENT__ &&\n __DEV__,\n queryDeduplication = true,\n defaultOptions,\n assumeImmutableResults = false,\n resolvers,\n typeDefs,\n fragmentMatcher,\n name: clientAwarenessName,\n version: clientAwarenessVersion,\n } = options;\n\n let { link } = options;\n\n if (!link) {\n link = uri\n ? new HttpLink({ uri, credentials, headers })\n : ApolloLink.empty();\n }\n\n if (!cache) {\n throw new InvariantError(\n \"To initialize Apollo Client, you must specify a 'cache' property \" +\n \"in the options object. \\n\" +\n \"For more information, please visit: https://go.apollo.dev/c/docs\"\n );\n }\n\n this.link = link;\n this.cache = cache;\n this.disableNetworkFetches = ssrMode || ssrForceFetchDelay > 0;\n this.queryDeduplication = queryDeduplication;\n this.defaultOptions = defaultOptions || Object.create(null);\n this.typeDefs = typeDefs;\n\n if (ssrForceFetchDelay) {\n setTimeout(\n () => (this.disableNetworkFetches = false),\n ssrForceFetchDelay,\n );\n }\n\n this.watchQuery = this.watchQuery.bind(this);\n this.query = this.query.bind(this);\n this.mutate = this.mutate.bind(this);\n this.resetStore = this.resetStore.bind(this);\n this.reFetchObservableQueries = this.reFetchObservableQueries.bind(this);\n\n if (connectToDevTools && typeof window === 'object') {\n (window as any).__APOLLO_CLIENT__ = this;\n }\n\n /**\n * Suggest installing the devtools for developers who don't have them\n */\n if (!hasSuggestedDevtools && __DEV__) {\n hasSuggestedDevtools = true;\n if (\n typeof window !== 'undefined' &&\n window.document &&\n window.top === window.self &&\n !(window as any).__APOLLO_DEVTOOLS_GLOBAL_HOOK__\n ) {\n const nav = window.navigator;\n const ua = nav && nav.userAgent;\n let url: string | undefined;\n if (typeof ua === \"string\") {\n if (ua.indexOf(\"Chrome/\") > -1) {\n url = \"https://chrome.google.com/webstore/detail/\" +\n \"apollo-client-developer-t/jdkknkkbebbapilgoeccciglkfbmbnfm\";\n } else if (ua.indexOf(\"Firefox/\") > -1) {\n url = \"https://addons.mozilla.org/en-US/firefox/addon/apollo-developer-tools/\";\n }\n }\n if (url) {\n invariant.log(\n \"Download the Apollo DevTools for a better development \" +\n \"experience: \" + url\n );\n }\n }\n }\n\n this.version = version;\n\n this.localState = new LocalState({\n cache,\n client: this,\n resolvers,\n fragmentMatcher,\n });\n\n this.queryManager = new QueryManager({\n cache: this.cache,\n link: this.link,\n defaultOptions: this.defaultOptions,\n queryDeduplication,\n ssrMode,\n clientAwareness: {\n name: clientAwarenessName!,\n version: clientAwarenessVersion!,\n },\n localState: this.localState,\n assumeImmutableResults,\n onBroadcast: connectToDevTools ? () => {\n if (this.devToolsHookCb) {\n this.devToolsHookCb({\n action: {},\n state: {\n queries: this.queryManager.getQueryStore(),\n mutations: this.queryManager.mutationStore || {},\n },\n dataWithOptimisticResults: this.cache.extract(true),\n });\n }\n } : void 0,\n });\n }\n\n /**\n * Call this method to terminate any active client processes, making it safe\n * to dispose of this `ApolloClient` instance.\n */\n public stop() {\n this.queryManager.stop();\n }\n\n /**\n * This watches the cache store of the query according to the options specified and\n * returns an {@link ObservableQuery}. We can subscribe to this {@link ObservableQuery} and\n * receive updated results through a GraphQL observer when the cache store changes.\n *\n * Note that this method is not an implementation of GraphQL subscriptions. Rather,\n * it uses Apollo's store in order to reactively deliver updates to your query results.\n *\n * For example, suppose you call watchQuery on a GraphQL query that fetches a person's\n * first and last name and this person has a particular object identifier, provided by\n * dataIdFromObject. Later, a different query fetches that same person's\n * first and last name and the first name has now changed. Then, any observers associated\n * with the results of the first query will be updated with a new result object.\n *\n * Note that if the cache does not change, the subscriber will *not* be notified.\n *\n * See [here](https://medium.com/apollo-stack/the-concepts-of-graphql-bc68bd819be3#.3mb0cbcmc) for\n * a description of store reactivity.\n */\n public watchQuery(\n options: WatchQueryOptions,\n ): ObservableQuery {\n if (this.defaultOptions.watchQuery) {\n options = mergeOptions(this.defaultOptions.watchQuery, options);\n }\n\n // XXX Overwriting options is probably not the best way to do this long term...\n if (\n this.disableNetworkFetches &&\n (options.fetchPolicy === 'network-only' ||\n options.fetchPolicy === 'cache-and-network')\n ) {\n options = { ...options, fetchPolicy: 'cache-first' };\n }\n\n return this.queryManager.watchQuery(options);\n }\n\n /**\n * This resolves a single query according to the options specified and\n * returns a {@link Promise} which is either resolved with the resulting data\n * or rejected with an error.\n *\n * @param options An object of type {@link QueryOptions} that allows us to\n * describe how this query should be treated e.g. whether it should hit the\n * server at all or just resolve from the cache, etc.\n */\n public query(\n options: QueryOptions,\n ): Promise> {\n if (this.defaultOptions.query) {\n options = mergeOptions(this.defaultOptions.query, options);\n }\n\n invariant(\n (options.fetchPolicy as WatchQueryFetchPolicy) !== 'cache-and-network',\n 'The cache-and-network fetchPolicy does not work with client.query, because ' +\n 'client.query can only return a single result. Please use client.watchQuery ' +\n 'to receive multiple results from the cache and the network, or consider ' +\n 'using a different fetchPolicy, such as cache-first or network-only.'\n );\n\n if (this.disableNetworkFetches && options.fetchPolicy === 'network-only') {\n options = { ...options, fetchPolicy: 'cache-first' };\n }\n\n return this.queryManager.query(options);\n }\n\n /**\n * This resolves a single mutation according to the options specified and returns a\n * {@link Promise} which is either resolved with the resulting data or rejected with an\n * error.\n *\n * It takes options as an object with the following keys and values:\n */\n public mutate<\n TData = any,\n TVariables = OperationVariables,\n TContext = DefaultContext,\n TCache extends ApolloCache = ApolloCache\n >(\n options: MutationOptions,\n ): Promise> {\n if (this.defaultOptions.mutate) {\n options = mergeOptions(this.defaultOptions.mutate, options);\n }\n return this.queryManager.mutate(options);\n }\n\n /**\n * This subscribes to a graphql subscription according to the options specified and returns an\n * {@link Observable} which either emits received data or an error.\n */\n public subscribe(\n options: SubscriptionOptions,\n ): Observable> {\n return this.queryManager.startGraphQLSubscription(options);\n }\n\n /**\n * Tries to read some data from the store in the shape of the provided\n * GraphQL query without making a network request. This method will start at\n * the root query. To start at a specific id returned by `dataIdFromObject`\n * use `readFragment`.\n *\n * @param optimistic Set to `true` to allow `readQuery` to return\n * optimistic results. Is `false` by default.\n */\n public readQuery(\n options: DataProxy.Query,\n optimistic: boolean = false,\n ): T | null {\n return this.cache.readQuery(options, optimistic);\n }\n\n /**\n * Tries to read some data from the store in the shape of the provided\n * GraphQL fragment without making a network request. This method will read a\n * GraphQL fragment from any arbitrary id that is currently cached, unlike\n * `readQuery` which will only read from the root query.\n *\n * You must pass in a GraphQL document with a single fragment or a document\n * with multiple fragments that represent what you are reading. If you pass\n * in a document with multiple fragments then you must also specify a\n * `fragmentName`.\n *\n * @param optimistic Set to `true` to allow `readFragment` to return\n * optimistic results. Is `false` by default.\n */\n public readFragment(\n options: DataProxy.Fragment,\n optimistic: boolean = false,\n ): T | null {\n return this.cache.readFragment(options, optimistic);\n }\n\n /**\n * Writes some data in the shape of the provided GraphQL query directly to\n * the store. This method will start at the root query. To start at a\n * specific id returned by `dataIdFromObject` then use `writeFragment`.\n */\n public writeQuery(\n options: DataProxy.WriteQueryOptions,\n ): void {\n this.cache.writeQuery(options);\n this.queryManager.broadcastQueries();\n }\n\n /**\n * Writes some data in the shape of the provided GraphQL fragment directly to\n * the store. This method will write to a GraphQL fragment from any arbitrary\n * id that is currently cached, unlike `writeQuery` which will only write\n * from the root query.\n *\n * You must pass in a GraphQL document with a single fragment or a document\n * with multiple fragments that represent what you are writing. If you pass\n * in a document with multiple fragments then you must also specify a\n * `fragmentName`.\n */\n public writeFragment(\n options: DataProxy.WriteFragmentOptions,\n ): void {\n this.cache.writeFragment(options);\n this.queryManager.broadcastQueries();\n }\n\n public __actionHookForDevTools(cb: () => any) {\n this.devToolsHookCb = cb;\n }\n\n public __requestRaw(payload: GraphQLRequest): Observable {\n return execute(this.link, payload);\n }\n\n /**\n * Resets your entire store by clearing out your cache and then re-executing\n * all of your active queries. This makes it so that you may guarantee that\n * there is no data left in your store from a time before you called this\n * method.\n *\n * `resetStore()` is useful when your user just logged out. You’ve removed the\n * user session, and you now want to make sure that any references to data you\n * might have fetched while the user session was active is gone.\n *\n * It is important to remember that `resetStore()` *will* refetch any active\n * queries. This means that any components that might be mounted will execute\n * their queries again using your network interface. If you do not want to\n * re-execute any queries then you should make sure to stop watching any\n * active queries.\n */\n public resetStore(): Promise[] | null> {\n return Promise.resolve()\n .then(() => this.queryManager.clearStore({\n discardWatches: false,\n }))\n .then(() => Promise.all(this.resetStoreCallbacks.map(fn => fn())))\n .then(() => this.reFetchObservableQueries());\n }\n\n /**\n * Remove all data from the store. Unlike `resetStore`, `clearStore` will\n * not refetch any active queries.\n */\n public clearStore(): Promise {\n return Promise.resolve()\n .then(() => this.queryManager.clearStore({\n discardWatches: true,\n }))\n .then(() => Promise.all(this.clearStoreCallbacks.map(fn => fn())));\n }\n\n /**\n * Allows callbacks to be registered that are executed when the store is\n * reset. `onResetStore` returns an unsubscribe function that can be used\n * to remove registered callbacks.\n */\n public onResetStore(cb: () => Promise): () => void {\n this.resetStoreCallbacks.push(cb);\n return () => {\n this.resetStoreCallbacks = this.resetStoreCallbacks.filter(c => c !== cb);\n };\n }\n\n /**\n * Allows callbacks to be registered that are executed when the store is\n * cleared. `onClearStore` returns an unsubscribe function that can be used\n * to remove registered callbacks.\n */\n public onClearStore(cb: () => Promise): () => void {\n this.clearStoreCallbacks.push(cb);\n return () => {\n this.clearStoreCallbacks = this.clearStoreCallbacks.filter(c => c !== cb);\n };\n }\n\n /**\n * Refetches all of your active queries.\n *\n * `reFetchObservableQueries()` is useful if you want to bring the client back to proper state in case of a network outage\n *\n * It is important to remember that `reFetchObservableQueries()` *will* refetch any active\n * queries. This means that any components that might be mounted will execute\n * their queries again using your network interface. If you do not want to\n * re-execute any queries then you should make sure to stop watching any\n * active queries.\n * Takes optional parameter `includeStandby` which will include queries in standby-mode when refetching.\n */\n public reFetchObservableQueries(\n includeStandby?: boolean,\n ): Promise[]> {\n return this.queryManager.reFetchObservableQueries(includeStandby);\n }\n\n /**\n * Refetches specified active queries. Similar to \"reFetchObservableQueries()\" but with a specific list of queries.\n *\n * `refetchQueries()` is useful for use cases to imperatively refresh a selection of queries.\n *\n * It is important to remember that `refetchQueries()` *will* refetch specified active\n * queries. This means that any components that might be mounted will execute\n * their queries again using your network interface. If you do not want to\n * re-execute any queries then you should make sure to stop watching any\n * active queries.\n */\n public refetchQueries<\n TCache extends ApolloCache = ApolloCache,\n TResult = Promise>,\n >(\n options: RefetchQueriesOptions,\n ): RefetchQueriesResult {\n const map = this.queryManager.refetchQueries(options);\n const queries: ObservableQuery[] = [];\n const results: InternalRefetchQueriesResult[] = [];\n\n map.forEach((result, obsQuery) => {\n queries.push(obsQuery);\n results.push(result);\n });\n\n const result = Promise.all(\n results as TResult[]\n ) as RefetchQueriesResult;\n\n // In case you need the raw results immediately, without awaiting\n // Promise.all(results):\n result.queries = queries;\n result.results = results;\n\n // If you decide to ignore the result Promise because you're using\n // result.queries and result.results instead, you shouldn't have to worry\n // about preventing uncaught rejections for the Promise.all result.\n result.catch(error => {\n invariant.debug(`In client.refetchQueries, Promise.all promise rejected with error ${error}`);\n });\n\n return result;\n }\n\n /**\n * Get all currently active `ObservableQuery` objects, in a `Map` keyed by\n * query ID strings. An \"active\" query is one that has observers and a\n * `fetchPolicy` other than \"standby\" or \"cache-only\". You can include all\n * `ObservableQuery` objects (including the inactive ones) by passing \"all\"\n * instead of \"active\", or you can include just a subset of active queries by\n * passing an array of query names or DocumentNode objects.\n */\n public getObservableQueries(\n include: RefetchQueriesInclude = \"active\",\n ): Map> {\n return this.queryManager.getObservableQueries(include);\n }\n\n /**\n * Exposes the cache's complete state, in a serializable format for later restoration.\n */\n public extract(optimistic?: boolean): TCacheShape {\n return this.cache.extract(optimistic);\n }\n\n /**\n * Replaces existing state in the cache (if any) with the values expressed by\n * `serializedState`.\n *\n * Called when hydrating a cache (server side rendering, or offline storage),\n * and also (potentially) during hot reloads.\n */\n public restore(serializedState: TCacheShape): ApolloCache {\n return this.cache.restore(serializedState);\n }\n\n /**\n * Add additional local resolvers.\n */\n public addResolvers(resolvers: Resolvers | Resolvers[]) {\n this.localState.addResolvers(resolvers);\n }\n\n /**\n * Set (override existing) local resolvers.\n */\n public setResolvers(resolvers: Resolvers | Resolvers[]) {\n this.localState.setResolvers(resolvers);\n }\n\n /**\n * Get all registered local resolvers.\n */\n public getResolvers() {\n return this.localState.getResolvers();\n }\n\n /**\n * Set a custom local state fragment matcher.\n */\n public setLocalStateFragmentMatcher(fragmentMatcher: FragmentMatcher) {\n this.localState.setFragmentMatcher(fragmentMatcher);\n }\n\n /**\n * Define a new ApolloLink (or link chain) that Apollo Client will use.\n */\n public setLink(newLink: ApolloLink) {\n this.link = this.queryManager.link = newLink;\n }\n}\n","export const version = 'local';\n","import { DocumentNode } from 'graphql';\nimport { wrap } from 'optimism';\n\nimport {\n StoreObject,\n Reference,\n getFragmentQueryDocument,\n} from '../../utilities';\nimport { DataProxy } from './types/DataProxy';\nimport { Cache } from './types/Cache';\n\nexport type Transaction = (c: ApolloCache) => void;\n\nexport abstract class ApolloCache implements DataProxy {\n // required to implement\n // core API\n public abstract read(\n query: Cache.ReadOptions,\n ): TData | null;\n public abstract write(\n write: Cache.WriteOptions,\n ): Reference | undefined;\n public abstract diff(query: Cache.DiffOptions): Cache.DiffResult;\n public abstract watch(\n watch: Cache.WatchOptions,\n ): () => void;\n\n // Empty the cache and restart all current watches (unless\n // options.discardWatches is true).\n public abstract reset(options?: Cache.ResetOptions): Promise;\n\n // Remove whole objects from the cache by passing just options.id, or\n // specific fields by passing options.field and/or options.args. If no\n // options.args are provided, all fields matching options.field (even\n // those with arguments) will be removed. Returns true iff any data was\n // removed from the cache.\n public abstract evict(options: Cache.EvictOptions): boolean;\n\n // initializer / offline / ssr API\n /**\n * Replaces existing state in the cache (if any) with the values expressed by\n * `serializedState`.\n *\n * Called when hydrating a cache (server side rendering, or offline storage),\n * and also (potentially) during hot reloads.\n */\n public abstract restore(\n serializedState: TSerialized,\n ): ApolloCache;\n\n /**\n * Exposes the cache's complete state, in a serializable format for later restoration.\n */\n public abstract extract(optimistic?: boolean): TSerialized;\n\n // Optimistic API\n\n public abstract removeOptimistic(id: string): void;\n\n // Transactional API\n\n // The batch method is intended to replace/subsume both performTransaction\n // and recordOptimisticTransaction, but performTransaction came first, so we\n // provide a default batch implementation that's just another way of calling\n // performTransaction. Subclasses of ApolloCache (such as InMemoryCache) can\n // override the batch method to do more interesting things with its options.\n public batch(options: Cache.BatchOptions): U {\n const optimisticId =\n typeof options.optimistic === \"string\" ? options.optimistic :\n options.optimistic === false ? null : void 0;\n let updateResult: U;\n this.performTransaction(\n () => updateResult = options.update(this),\n optimisticId,\n );\n return updateResult!;\n }\n\n public abstract performTransaction(\n transaction: Transaction,\n // Although subclasses may implement recordOptimisticTransaction\n // however they choose, the default implementation simply calls\n // performTransaction with a string as the second argument, allowing\n // performTransaction to handle both optimistic and non-optimistic\n // (broadcast-batching) transactions. Passing null for optimisticId is\n // also allowed, and indicates that performTransaction should apply\n // the transaction non-optimistically (ignoring optimistic data).\n optimisticId?: string | null,\n ): void;\n\n public recordOptimisticTransaction(\n transaction: Transaction,\n optimisticId: string,\n ) {\n this.performTransaction(transaction, optimisticId);\n }\n\n // Optional API\n\n // Called once per input document, allowing the cache to make static changes\n // to the query, such as adding __typename fields.\n public transformDocument(document: DocumentNode): DocumentNode {\n return document;\n }\n\n // Called before each ApolloLink request, allowing the cache to make dynamic\n // changes to the query, such as filling in missing fragment definitions.\n public transformForLink(document: DocumentNode): DocumentNode {\n return document;\n }\n\n public identify(object: StoreObject | Reference): string | undefined {\n return;\n }\n\n public gc(): string[] {\n return [];\n }\n\n public modify(options: Cache.ModifyOptions): boolean {\n return false;\n }\n\n // DataProxy API\n /**\n *\n * @param options\n * @param optimistic\n */\n public readQuery(\n options: Cache.ReadQueryOptions,\n optimistic = !!options.optimistic,\n ): QueryType | null {\n return this.read({\n ...options,\n rootId: options.id || 'ROOT_QUERY',\n optimistic,\n });\n }\n\n // Make sure we compute the same (===) fragment query document every\n // time we receive the same fragment in readFragment.\n private getFragmentDoc = wrap(getFragmentQueryDocument);\n\n public readFragment(\n options: Cache.ReadFragmentOptions,\n optimistic = !!options.optimistic,\n ): FragmentType | null {\n return this.read({\n ...options,\n query: this.getFragmentDoc(options.fragment, options.fragmentName),\n rootId: options.id,\n optimistic,\n });\n }\n\n public writeQuery({\n id,\n data,\n ...options\n }: Cache.WriteQueryOptions): Reference | undefined {\n return this.write(Object.assign(options, {\n dataId: id || 'ROOT_QUERY',\n result: data,\n }));\n }\n\n public writeFragment({\n id,\n data,\n fragment,\n fragmentName,\n ...options\n }: Cache.WriteFragmentOptions): Reference | undefined {\n return this.write(Object.assign(options, {\n query: this.getFragmentDoc(fragment, fragmentName),\n dataId: id,\n result: data,\n }));\n }\n\n public updateQuery(\n options: Cache.UpdateQueryOptions,\n update: (data: TData | null) => TData | null | void,\n ): TData | null {\n return this.batch({\n update(cache) {\n const value = cache.readQuery(options);\n const data = update(value);\n if (data === void 0 || data === null) return value;\n cache.writeQuery({ ...options, data });\n return data;\n },\n });\n }\n\n public updateFragment(\n options: Cache.UpdateFragmentOptions,\n update: (data: TData | null) => TData | null | void,\n ): TData | null {\n return this.batch({\n update(cache) {\n const value = cache.readFragment(options);\n const data = update(value);\n if (data === void 0 || data === null) return value;\n cache.writeFragment({ ...options, data });\n return data;\n },\n });\n }\n}\n","import { DocumentNode, FieldNode } from 'graphql';\n\nimport {\n Reference,\n StoreObject,\n StoreValue,\n isReference,\n} from '../../../utilities';\n\nimport { StorageType } from '../../inmemory/policies';\n\n// The Readonly type only really works for object types, since it marks\n// all of the object's properties as readonly, but there are many cases when\n// a generic type parameter like TExisting might be a string or some other\n// primitive type, in which case we need to avoid wrapping it with Readonly.\n// SafeReadonly collapses to just string, which makes string\n// assignable to SafeReadonly, whereas string is not assignable to\n// Readonly, somewhat surprisingly.\nexport type SafeReadonly = T extends object ? Readonly : T;\n\nexport type MissingTree = string | {\n readonly [key: string]: MissingTree;\n};\n\nexport class MissingFieldError extends Error {\n constructor(\n public readonly message: string,\n public readonly path: MissingTree | Array,\n public readonly query: DocumentNode,\n public readonly variables?: Record,\n ) {\n // 'Error' breaks prototype chain here\n super(message);\n\n if (Array.isArray(this.path)) {\n this.missing = this.message;\n for (let i = this.path.length - 1; i >= 0; --i) {\n this.missing = { [this.path[i]]: this.missing };\n }\n } else {\n this.missing = this.path;\n }\n\n // We're not using `Object.setPrototypeOf` here as it isn't fully supported\n // on Android (see issue #3236).\n (this as any).__proto__ = MissingFieldError.prototype;\n }\n\n public readonly missing: MissingTree;\n}\n\nexport interface FieldSpecifier {\n typename?: string;\n fieldName: string;\n field?: FieldNode;\n args?: Record;\n variables?: Record;\n}\n\nexport interface ReadFieldOptions extends FieldSpecifier {\n from?: StoreObject | Reference;\n}\n\nexport interface ReadFieldFunction {\n (options: ReadFieldOptions): SafeReadonly | undefined;\n (\n fieldName: string,\n from?: StoreObject | Reference,\n ): SafeReadonly | undefined;\n}\n\nexport type ToReferenceFunction = (\n objOrIdOrRef: StoreObject | string | Reference,\n mergeIntoStore?: boolean,\n) => Reference | undefined;\n\nexport type CanReadFunction = (value: StoreValue) => boolean;\n\nexport type Modifier = (value: T, details: {\n DELETE: any;\n INVALIDATE: any;\n fieldName: string;\n storeFieldName: string;\n readField: ReadFieldFunction;\n canRead: CanReadFunction;\n isReference: typeof isReference;\n toReference: ToReferenceFunction;\n storage: StorageType;\n}) => T;\n\nexport type Modifiers = {\n [fieldName: string]: Modifier;\n};\n","import { invariant } from '../../utilities/globals';\nimport { dep, OptimisticDependencyFunction } from 'optimism';\nimport { equal } from '@wry/equality';\nimport { Trie } from '@wry/trie';\n\nimport {\n isReference,\n StoreValue,\n StoreObject,\n Reference,\n makeReference,\n DeepMerger,\n maybeDeepFreeze,\n canUseWeakMap,\n isNonNullObject,\n} from '../../utilities';\nimport { NormalizedCache, NormalizedCacheObject } from './types';\nimport { hasOwn, fieldNameFromStoreName } from './helpers';\nimport { Policies, StorageType } from './policies';\nimport { Cache } from '../core/types/Cache';\nimport {\n SafeReadonly,\n Modifier,\n Modifiers,\n ReadFieldOptions,\n ToReferenceFunction,\n CanReadFunction,\n} from '../core/types/common';\n\nconst DELETE: any = Object.create(null);\nconst delModifier: Modifier = () => DELETE;\nconst INVALIDATE: any = Object.create(null);\n\nexport abstract class EntityStore implements NormalizedCache {\n protected data: NormalizedCacheObject = Object.create(null);\n\n constructor(\n public readonly policies: Policies,\n public readonly group: CacheGroup,\n ) {}\n\n public abstract addLayer(\n layerId: string,\n replay: (layer: EntityStore) => any,\n ): Layer;\n\n public abstract removeLayer(layerId: string): EntityStore;\n\n // Although the EntityStore class is abstract, it contains concrete\n // implementations of the various NormalizedCache interface methods that\n // are inherited by the Root and Layer subclasses.\n\n public toObject(): NormalizedCacheObject {\n return { ...this.data };\n }\n\n public has(dataId: string): boolean {\n return this.lookup(dataId, true) !== void 0;\n }\n\n public get(dataId: string, fieldName: string): StoreValue {\n this.group.depend(dataId, fieldName);\n if (hasOwn.call(this.data, dataId)) {\n const storeObject = this.data[dataId];\n if (storeObject && hasOwn.call(storeObject, fieldName)) {\n return storeObject[fieldName];\n }\n }\n if (fieldName === \"__typename\" &&\n hasOwn.call(this.policies.rootTypenamesById, dataId)) {\n return this.policies.rootTypenamesById[dataId];\n }\n if (this instanceof Layer) {\n return this.parent.get(dataId, fieldName);\n }\n }\n\n protected lookup(dataId: string, dependOnExistence?: boolean): StoreObject | undefined {\n // The has method (above) calls lookup with dependOnExistence = true, so\n // that it can later be invalidated when we add or remove a StoreObject for\n // this dataId. Any consumer who cares about the contents of the StoreObject\n // should not rely on this dependency, since the contents could change\n // without the object being added or removed.\n if (dependOnExistence) this.group.depend(dataId, \"__exists\");\n\n if (hasOwn.call(this.data, dataId)) {\n return this.data[dataId];\n }\n\n if (this instanceof Layer) {\n return this.parent.lookup(dataId, dependOnExistence);\n }\n\n if (this.policies.rootTypenamesById[dataId]) {\n return Object.create(null);\n }\n }\n\n public merge(\n older: string | StoreObject,\n newer: StoreObject | string,\n ): void {\n let dataId: string | undefined;\n\n // Convert unexpected references to ID strings.\n if (isReference(older)) older = older.__ref;\n if (isReference(newer)) newer = newer.__ref;\n\n const existing: StoreObject | undefined =\n typeof older === \"string\"\n ? this.lookup(dataId = older)\n : older;\n\n const incoming: StoreObject | undefined =\n typeof newer === \"string\"\n ? this.lookup(dataId = newer)\n : newer;\n\n // If newer was a string ID, but that ID was not defined in this store,\n // then there are no fields to be merged, so we're done.\n if (!incoming) return;\n\n invariant(\n typeof dataId === \"string\",\n \"store.merge expects a string ID\",\n );\n\n const merged: StoreObject =\n new DeepMerger(storeObjectReconciler).merge(existing, incoming);\n\n // Even if merged === existing, existing may have come from a lower\n // layer, so we always need to set this.data[dataId] on this level.\n this.data[dataId] = merged;\n\n if (merged !== existing) {\n delete this.refs[dataId];\n if (this.group.caching) {\n const fieldsToDirty: Record = Object.create(null);\n\n // If we added a new StoreObject where there was previously none, dirty\n // anything that depended on the existence of this dataId, such as the\n // EntityStore#has method.\n if (!existing) fieldsToDirty.__exists = 1;\n\n // Now invalidate dependents who called getFieldValue for any fields\n // that are changing as a result of this merge.\n Object.keys(incoming).forEach(storeFieldName => {\n if (!existing || existing[storeFieldName] !== merged[storeFieldName]) {\n // Always dirty the full storeFieldName, which may include\n // serialized arguments following the fieldName prefix.\n fieldsToDirty[storeFieldName] = 1;\n\n // Also dirty fieldNameFromStoreName(storeFieldName) if it's\n // different from storeFieldName and this field does not have\n // keyArgs configured, because that means the cache can't make\n // any assumptions about how field values with the same field\n // name but different arguments might be interrelated, so it\n // must err on the side of invalidating all field values that\n // share the same short fieldName, regardless of arguments.\n const fieldName = fieldNameFromStoreName(storeFieldName);\n if (fieldName !== storeFieldName &&\n !this.policies.hasKeyArgs(merged.__typename, fieldName)) {\n fieldsToDirty[fieldName] = 1;\n }\n\n // If merged[storeFieldName] has become undefined, and this is the\n // Root layer, actually delete the property from the merged object,\n // which is guaranteed to have been created fresh in this method.\n if (merged[storeFieldName] === void 0 && !(this instanceof Layer)) {\n delete merged[storeFieldName];\n }\n }\n });\n\n if (fieldsToDirty.__typename &&\n !(existing && existing.__typename) &&\n // Since we return default root __typename strings\n // automatically from store.get, we don't need to dirty the\n // ROOT_QUERY.__typename field if merged.__typename is equal\n // to the default string (usually \"Query\").\n this.policies.rootTypenamesById[dataId] === merged.__typename) {\n delete fieldsToDirty.__typename;\n }\n\n Object.keys(fieldsToDirty).forEach(\n fieldName => this.group.dirty(dataId as string, fieldName));\n }\n }\n }\n\n public modify(\n dataId: string,\n fields: Modifier | Modifiers,\n ): boolean {\n const storeObject = this.lookup(dataId);\n\n if (storeObject) {\n const changedFields: Record = Object.create(null);\n let needToMerge = false;\n let allDeleted = true;\n\n const sharedDetails = {\n DELETE,\n INVALIDATE,\n isReference,\n toReference: this.toReference,\n canRead: this.canRead,\n readField: (\n fieldNameOrOptions: string | ReadFieldOptions,\n from?: StoreObject | Reference,\n ) => this.policies.readField(\n typeof fieldNameOrOptions === \"string\" ? {\n fieldName: fieldNameOrOptions,\n from: from || makeReference(dataId),\n } : fieldNameOrOptions,\n { store: this },\n ),\n };\n\n Object.keys(storeObject).forEach(storeFieldName => {\n const fieldName = fieldNameFromStoreName(storeFieldName);\n let fieldValue = storeObject[storeFieldName];\n if (fieldValue === void 0) return;\n const modify: Modifier = typeof fields === \"function\"\n ? fields\n : fields[storeFieldName] || fields[fieldName];\n if (modify) {\n let newValue = modify === delModifier ? DELETE :\n modify(maybeDeepFreeze(fieldValue), {\n ...sharedDetails,\n fieldName,\n storeFieldName,\n storage: this.getStorage(dataId, storeFieldName),\n });\n if (newValue === INVALIDATE) {\n this.group.dirty(dataId, storeFieldName);\n } else {\n if (newValue === DELETE) newValue = void 0;\n if (newValue !== fieldValue) {\n changedFields[storeFieldName] = newValue;\n needToMerge = true;\n fieldValue = newValue;\n }\n }\n }\n if (fieldValue !== void 0) {\n allDeleted = false;\n }\n });\n\n if (needToMerge) {\n this.merge(dataId, changedFields);\n\n if (allDeleted) {\n if (this instanceof Layer) {\n this.data[dataId] = void 0;\n } else {\n delete this.data[dataId];\n }\n this.group.dirty(dataId, \"__exists\");\n }\n\n return true;\n }\n }\n\n return false;\n }\n\n // If called with only one argument, removes the entire entity\n // identified by dataId. If called with a fieldName as well, removes all\n // fields of that entity whose names match fieldName according to the\n // fieldNameFromStoreName helper function. If called with a fieldName\n // and variables, removes all fields of that entity whose names match fieldName\n // and whose arguments when cached exactly match the variables passed.\n public delete(\n dataId: string,\n fieldName?: string,\n args?: Record,\n ) {\n const storeObject = this.lookup(dataId);\n if (storeObject) {\n const typename = this.getFieldValue(storeObject, \"__typename\");\n const storeFieldName = fieldName && args\n ? this.policies.getStoreFieldName({ typename, fieldName, args })\n : fieldName;\n return this.modify(dataId, storeFieldName ? {\n [storeFieldName]: delModifier,\n } : delModifier);\n }\n return false;\n }\n\n public evict(\n options: Cache.EvictOptions,\n limit: EntityStore,\n ): boolean {\n let evicted = false;\n if (options.id) {\n if (hasOwn.call(this.data, options.id)) {\n evicted = this.delete(options.id, options.fieldName, options.args);\n }\n if (this instanceof Layer && this !== limit) {\n evicted = this.parent.evict(options, limit) || evicted;\n }\n // Always invalidate the field to trigger rereading of watched\n // queries, even if no cache data was modified by the eviction,\n // because queries may depend on computed fields with custom read\n // functions, whose values are not stored in the EntityStore.\n if (options.fieldName || evicted) {\n this.group.dirty(options.id, options.fieldName || \"__exists\");\n }\n }\n return evicted;\n }\n\n public clear(): void {\n this.replace(null);\n }\n\n public extract(): NormalizedCacheObject {\n const obj = this.toObject();\n const extraRootIds: string[] = [];\n this.getRootIdSet().forEach(id => {\n if (!hasOwn.call(this.policies.rootTypenamesById, id)) {\n extraRootIds.push(id);\n }\n });\n if (extraRootIds.length) {\n obj.__META = { extraRootIds: extraRootIds.sort() };\n }\n return obj;\n }\n\n public replace(newData: NormalizedCacheObject | null): void {\n Object.keys(this.data).forEach(dataId => {\n if (!(newData && hasOwn.call(newData, dataId))) {\n this.delete(dataId);\n }\n });\n if (newData) {\n const { __META, ...rest } = newData;\n Object.keys(rest).forEach(dataId => {\n this.merge(dataId, rest[dataId] as StoreObject);\n });\n if (__META) {\n __META.extraRootIds.forEach(this.retain, this);\n }\n }\n }\n\n public abstract getStorage(\n idOrObj: string | StoreObject,\n ...storeFieldNames: (string | number)[]\n ): StorageType;\n\n // Maps root entity IDs to the number of times they have been retained, minus\n // the number of times they have been released. Retained entities keep other\n // entities they reference (even indirectly) from being garbage collected.\n private rootIds: {\n [rootId: string]: number;\n } = Object.create(null);\n\n public retain(rootId: string): number {\n return this.rootIds[rootId] = (this.rootIds[rootId] || 0) + 1;\n }\n\n public release(rootId: string): number {\n if (this.rootIds[rootId] > 0) {\n const count = --this.rootIds[rootId];\n if (!count) delete this.rootIds[rootId];\n return count;\n }\n return 0;\n }\n\n // Return a Set of all the ID strings that have been retained by\n // this layer/root *and* any layers/roots beneath it.\n public getRootIdSet(ids = new Set()) {\n Object.keys(this.rootIds).forEach(ids.add, ids);\n if (this instanceof Layer) {\n this.parent.getRootIdSet(ids);\n } else {\n // Official singleton IDs like ROOT_QUERY and ROOT_MUTATION are\n // always considered roots for garbage collection, regardless of\n // their retainment counts in this.rootIds.\n Object.keys(this.policies.rootTypenamesById).forEach(ids.add, ids);\n }\n return ids;\n }\n\n // The goal of garbage collection is to remove IDs from the Root layer of the\n // store that are no longer reachable starting from any IDs that have been\n // explicitly retained (see retain and release, above). Returns an array of\n // dataId strings that were removed from the store.\n public gc() {\n const ids = this.getRootIdSet();\n const snapshot = this.toObject();\n ids.forEach(id => {\n if (hasOwn.call(snapshot, id)) {\n // Because we are iterating over an ECMAScript Set, the IDs we add here\n // will be visited in later iterations of the forEach loop only if they\n // were not previously contained by the Set.\n Object.keys(this.findChildRefIds(id)).forEach(ids.add, ids);\n // By removing IDs from the snapshot object here, we protect them from\n // getting removed from the root store layer below.\n delete snapshot[id];\n }\n });\n const idsToRemove = Object.keys(snapshot);\n if (idsToRemove.length) {\n let root: EntityStore = this;\n while (root instanceof Layer) root = root.parent;\n idsToRemove.forEach(id => root.delete(id));\n }\n return idsToRemove;\n }\n\n // Lazily tracks { __ref: } strings contained by this.data[dataId].\n private refs: {\n [dataId: string]: Record;\n } = Object.create(null);\n\n public findChildRefIds(dataId: string): Record {\n if (!hasOwn.call(this.refs, dataId)) {\n const found = this.refs[dataId] = Object.create(null);\n const root = this.data[dataId];\n if (!root) return found;\n\n const workSet = new Set>([root]);\n // Within the store, only arrays and objects can contain child entity\n // references, so we can prune the traversal using this predicate:\n workSet.forEach(obj => {\n if (isReference(obj)) {\n found[obj.__ref] = true;\n // In rare cases, a { __ref } Reference object may have other fields.\n // This often indicates a mismerging of References with StoreObjects,\n // but garbage collection should not be fooled by a stray __ref\n // property in a StoreObject (ignoring all the other fields just\n // because the StoreObject looks like a Reference). To avoid this\n // premature termination of findChildRefIds recursion, we fall through\n // to the code below, which will handle any other properties of obj.\n }\n if (isNonNullObject(obj)) {\n Object.keys(obj).forEach(key => {\n const child = obj[key];\n // No need to add primitive values to the workSet, since they cannot\n // contain reference objects.\n if (isNonNullObject(child)) {\n workSet.add(child);\n }\n });\n }\n });\n }\n return this.refs[dataId];\n }\n\n // Used to compute cache keys specific to this.group.\n public makeCacheKey(...args: any[]): object;\n public makeCacheKey() {\n return this.group.keyMaker.lookupArray(arguments);\n }\n\n // Bound function that can be passed around to provide easy access to fields\n // of Reference objects as well as ordinary objects.\n public getFieldValue = (\n objectOrReference: StoreObject | Reference | undefined,\n storeFieldName: string,\n ) => maybeDeepFreeze(\n isReference(objectOrReference)\n ? this.get(objectOrReference.__ref, storeFieldName)\n : objectOrReference && objectOrReference[storeFieldName]\n ) as SafeReadonly;\n\n // Returns true for non-normalized StoreObjects and non-dangling\n // References, indicating that readField(name, objOrRef) has a chance of\n // working. Useful for filtering out dangling references from lists.\n public canRead: CanReadFunction = objOrRef => {\n return isReference(objOrRef)\n ? this.has(objOrRef.__ref)\n : typeof objOrRef === \"object\";\n };\n\n // Bound function that converts an id or an object with a __typename and\n // primary key fields to a Reference object. If called with a Reference object,\n // that same Reference object is returned. Pass true for mergeIntoStore to persist\n // an object into the store.\n public toReference: ToReferenceFunction = (\n objOrIdOrRef,\n mergeIntoStore,\n ) => {\n if (typeof objOrIdOrRef === \"string\") {\n return makeReference(objOrIdOrRef);\n }\n\n if (isReference(objOrIdOrRef)) {\n return objOrIdOrRef;\n }\n\n const [id] = this.policies.identify(objOrIdOrRef);\n\n if (id) {\n const ref = makeReference(id);\n if (mergeIntoStore) {\n this.merge(id, objOrIdOrRef);\n }\n return ref;\n }\n };\n}\n\nexport type FieldValueGetter = EntityStore[\"getFieldValue\"];\n\n// A single CacheGroup represents a set of one or more EntityStore objects,\n// typically the Root store in a CacheGroup by itself, and all active Layer\n// stores in a group together. A single EntityStore object belongs to only\n// one CacheGroup, store.group. The CacheGroup is responsible for tracking\n// dependencies, so store.group is helpful for generating unique keys for\n// cached results that need to be invalidated when/if those dependencies\n// change. If we used the EntityStore objects themselves as cache keys (that\n// is, store rather than store.group), the cache would become unnecessarily\n// fragmented by all the different Layer objects. Instead, the CacheGroup\n// approach allows all optimistic Layer objects in the same linked list to\n// belong to one CacheGroup, with the non-optimistic Root object belonging\n// to another CacheGroup, allowing resultCaching dependencies to be tracked\n// separately for optimistic and non-optimistic entity data.\nclass CacheGroup {\n private d: OptimisticDependencyFunction | null = null;\n\n // Used by the EntityStore#makeCacheKey method to compute cache keys\n // specific to this CacheGroup.\n public keyMaker: Trie;\n\n constructor(\n public readonly caching: boolean,\n private parent: CacheGroup | null = null,\n ) {\n this.resetCaching();\n }\n\n public resetCaching() {\n this.d = this.caching ? dep() : null;\n this.keyMaker = new Trie(canUseWeakMap);\n }\n\n public depend(dataId: string, storeFieldName: string) {\n if (this.d) {\n this.d(makeDepKey(dataId, storeFieldName));\n const fieldName = fieldNameFromStoreName(storeFieldName);\n if (fieldName !== storeFieldName) {\n // Fields with arguments that contribute extra identifying\n // information to the fieldName (thus forming the storeFieldName)\n // depend not only on the full storeFieldName but also on the\n // short fieldName, so the field can be invalidated using either\n // level of specificity.\n this.d(makeDepKey(dataId, fieldName));\n }\n if (this.parent) {\n this.parent.depend(dataId, storeFieldName);\n }\n }\n }\n\n public dirty(dataId: string, storeFieldName: string) {\n if (this.d) {\n this.d.dirty(\n makeDepKey(dataId, storeFieldName),\n // When storeFieldName === \"__exists\", that means the entity identified\n // by dataId has either disappeared from the cache or was newly added,\n // so the result caching system would do well to \"forget everything it\n // knows\" about that object. To achieve that kind of invalidation, we\n // not only dirty the associated result cache entry, but also remove it\n // completely from the dependency graph. For the optimism implementation\n // details, see https://github.com/benjamn/optimism/pull/195.\n storeFieldName === \"__exists\" ? \"forget\" : \"setDirty\",\n );\n }\n }\n}\n\nfunction makeDepKey(dataId: string, storeFieldName: string) {\n // Since field names cannot have '#' characters in them, this method\n // of joining the field name and the ID should be unambiguous, and much\n // cheaper than JSON.stringify([dataId, fieldName]).\n return storeFieldName + '#' + dataId;\n}\n\nexport function maybeDependOnExistenceOfEntity(\n store: NormalizedCache,\n entityId: string,\n) {\n if (supportsResultCaching(store)) {\n // We use this pseudo-field __exists elsewhere in the EntityStore code to\n // represent changes in the existence of the entity object identified by\n // entityId. This dependency gets reliably dirtied whenever an object with\n // this ID is deleted (or newly created) within this group, so any result\n // cache entries (for example, StoreReader#executeSelectionSet results) that\n // depend on __exists for this entityId will get dirtied as well, leading to\n // the eventual recomputation (instead of reuse) of those result objects the\n // next time someone reads them from the cache.\n store.group.depend(entityId, \"__exists\");\n }\n}\n\nexport namespace EntityStore {\n // Refer to this class as EntityStore.Root outside this namespace.\n export class Root extends EntityStore {\n constructor({\n policies,\n resultCaching = true,\n seed,\n }: {\n policies: Policies;\n resultCaching?: boolean;\n seed?: NormalizedCacheObject;\n }) {\n super(policies, new CacheGroup(resultCaching));\n if (seed) this.replace(seed);\n }\n\n public readonly stump = new Stump(this);\n\n public addLayer(\n layerId: string,\n replay: (layer: EntityStore) => any,\n ): Layer {\n // Adding an optimistic Layer on top of the Root actually adds the Layer\n // on top of the Stump, so the Stump always comes between the Root and\n // any Layer objects that we've added.\n return this.stump.addLayer(layerId, replay);\n }\n\n public removeLayer(): Root {\n // Never remove the root layer.\n return this;\n }\n\n public readonly storageTrie = new Trie(canUseWeakMap);\n public getStorage(): StorageType {\n return this.storageTrie.lookupArray(arguments);\n }\n }\n}\n\n// Not exported, since all Layer instances are created by the addLayer method\n// of the EntityStore.Root class.\nclass Layer extends EntityStore {\n constructor(\n public readonly id: string,\n public readonly parent: EntityStore,\n public readonly replay: (layer: EntityStore) => any,\n public readonly group: CacheGroup,\n ) {\n super(parent.policies, group);\n replay(this);\n }\n\n public addLayer(\n layerId: string,\n replay: (layer: EntityStore) => any,\n ): Layer {\n return new Layer(layerId, this, replay, this.group);\n }\n\n public removeLayer(layerId: string): EntityStore {\n // Remove all instances of the given id, not just the first one.\n const parent = this.parent.removeLayer(layerId);\n\n if (layerId === this.id) {\n if (this.group.caching) {\n // Dirty every ID we're removing. Technically we might be able to avoid\n // dirtying fields that have values in higher layers, but we don't have\n // easy access to higher layers here, and we're about to recreate those\n // layers anyway (see parent.addLayer below).\n Object.keys(this.data).forEach(dataId => {\n const ownStoreObject = this.data[dataId];\n const parentStoreObject = parent[\"lookup\"](dataId);\n if (!parentStoreObject) {\n // The StoreObject identified by dataId was defined in this layer\n // but will be undefined in the parent layer, so we can delete the\n // whole entity using this.delete(dataId). Since we're about to\n // throw this layer away, the only goal of this deletion is to dirty\n // the removed fields.\n this.delete(dataId);\n } else if (!ownStoreObject) {\n // This layer had an entry for dataId but it was undefined, which\n // means the entity was deleted in this layer, and it's about to\n // become undeleted when we remove this layer, so we need to dirty\n // all fields that are about to be reexposed.\n this.group.dirty(dataId, \"__exists\");\n Object.keys(parentStoreObject).forEach(storeFieldName => {\n this.group.dirty(dataId, storeFieldName);\n });\n } else if (ownStoreObject !== parentStoreObject) {\n // If ownStoreObject is not exactly the same as parentStoreObject,\n // dirty any fields whose values will change as a result of this\n // removal.\n Object.keys(ownStoreObject).forEach(storeFieldName => {\n if (!equal(ownStoreObject[storeFieldName],\n parentStoreObject[storeFieldName])) {\n this.group.dirty(dataId, storeFieldName);\n }\n });\n }\n });\n }\n\n return parent;\n }\n\n // No changes are necessary if the parent chain remains identical.\n if (parent === this.parent) return this;\n\n // Recreate this layer on top of the new parent.\n return parent.addLayer(this.id, this.replay);\n }\n\n public toObject(): NormalizedCacheObject {\n return {\n ...this.parent.toObject(),\n ...this.data,\n };\n }\n\n public findChildRefIds(dataId: string): Record {\n const fromParent = this.parent.findChildRefIds(dataId);\n return hasOwn.call(this.data, dataId) ? {\n ...fromParent,\n ...super.findChildRefIds(dataId),\n } : fromParent;\n }\n\n public getStorage(): StorageType {\n let p: EntityStore = this.parent;\n while ((p as Layer).parent) p = (p as Layer).parent;\n return p.getStorage.apply(p, arguments);\n }\n}\n\n// Represents a Layer permanently installed just above the Root, which allows\n// reading optimistically (and registering optimistic dependencies) even when\n// no optimistic layers are currently active. The stump.group CacheGroup object\n// is shared by any/all Layer objects added on top of the Stump.\nclass Stump extends Layer {\n constructor(root: EntityStore.Root) {\n super(\n \"EntityStore.Stump\",\n root,\n () => {},\n new CacheGroup(root.group.caching, root.group),\n );\n }\n\n public removeLayer() {\n // Never remove the Stump layer.\n return this;\n }\n\n public merge() {\n // We never want to write any data into the Stump, so we forward any merge\n // calls to the Root instead. Another option here would be to throw an\n // exception, but the toReference(object, true) function can sometimes\n // trigger Stump writes (which used to be Root writes, before the Stump\n // concept was introduced).\n return this.parent.merge.apply(this.parent, arguments);\n }\n}\n\nfunction storeObjectReconciler(\n existingObject: StoreObject,\n incomingObject: StoreObject,\n property: string,\n): StoreValue {\n const existingValue = existingObject[property];\n const incomingValue = incomingObject[property];\n // Wherever there is a key collision, prefer the incoming value, unless\n // it is deeply equal to the existing value. It's worth checking deep\n // equality here (even though blindly returning incoming would be\n // logically correct) because preserving the referential identity of\n // existing data can prevent needless rereading and rerendering.\n return equal(existingValue, incomingValue) ? existingValue : incomingValue;\n}\n\nexport function supportsResultCaching(store: any): store is EntityStore {\n // When result caching is disabled, store.depend will be null.\n return !!(store instanceof EntityStore && store.group.caching);\n}\n","import { invariant, InvariantError } from '../../utilities/globals';\n\nimport {\n DocumentNode,\n FieldNode,\n Kind,\n SelectionSetNode,\n} from 'graphql';\nimport { wrap, OptimisticWrapperFunction } from 'optimism';\n\nimport {\n isField,\n resultKeyNameFromField,\n Reference,\n isReference,\n makeReference,\n StoreObject,\n FragmentMap,\n shouldInclude,\n addTypenameToDocument,\n getDefaultValues,\n getMainDefinition,\n getQueryDefinition,\n getFragmentFromSelection,\n maybeDeepFreeze,\n mergeDeepArray,\n DeepMerger,\n isNonNullObject,\n canUseWeakMap,\n compact,\n FragmentMapFunction,\n} from '../../utilities';\nimport { Cache } from '../core/types/Cache';\nimport {\n DiffQueryAgainstStoreOptions,\n InMemoryCacheConfig,\n NormalizedCache,\n ReadMergeModifyContext,\n} from './types';\nimport { maybeDependOnExistenceOfEntity, supportsResultCaching } from './entityStore';\nimport { isArray, extractFragmentContext, getTypenameFromStoreObject, shouldCanonizeResults } from './helpers';\nimport { Policies } from './policies';\nimport { InMemoryCache } from './inMemoryCache';\nimport { MissingFieldError, MissingTree } from '../core/types/common';\nimport { canonicalStringify, ObjectCanon } from './object-canon';\n\nexport type VariableMap = { [name: string]: any };\n\ninterface ReadContext extends ReadMergeModifyContext {\n query: DocumentNode;\n policies: Policies;\n canonizeResults: boolean;\n fragmentMap: FragmentMap;\n lookupFragment: FragmentMapFunction;\n};\n\nexport type ExecResult = {\n result: R;\n missing?: MissingTree;\n};\n\ntype ExecSelectionSetOptions = {\n selectionSet: SelectionSetNode;\n objectOrReference: StoreObject | Reference;\n enclosingRef: Reference;\n context: ReadContext;\n};\n\ntype ExecSubSelectedArrayOptions = {\n field: FieldNode;\n array: readonly any[];\n enclosingRef: Reference;\n context: ReadContext;\n};\n\nexport interface StoreReaderConfig {\n cache: InMemoryCache,\n addTypename?: boolean;\n resultCacheMaxSize?: number;\n canonizeResults?: boolean;\n canon?: ObjectCanon;\n fragments?: InMemoryCacheConfig[\"fragments\"];\n}\n\n// Arguments type after keyArgs translation.\ntype ExecSelectionSetKeyArgs = [\n SelectionSetNode,\n StoreObject | Reference,\n ReadMergeModifyContext,\n boolean,\n];\n\nfunction execSelectionSetKeyArgs(\n options: ExecSelectionSetOptions,\n): ExecSelectionSetKeyArgs {\n return [\n options.selectionSet,\n options.objectOrReference,\n options.context,\n // We split out this property so we can pass different values\n // independently without modifying options.context itself.\n options.context.canonizeResults,\n ];\n}\n\nexport class StoreReader {\n // cached version of executeSelectionSet\n private executeSelectionSet: OptimisticWrapperFunction<\n [ExecSelectionSetOptions], // Actual arguments tuple type.\n ExecResult, // Actual return type.\n ExecSelectionSetKeyArgs\n >;\n\n // cached version of executeSubSelectedArray\n private executeSubSelectedArray: OptimisticWrapperFunction<\n [ExecSubSelectedArrayOptions],\n ExecResult,\n [ExecSubSelectedArrayOptions]>;\n\n private config: {\n cache: InMemoryCache,\n addTypename: boolean;\n resultCacheMaxSize?: number;\n canonizeResults: boolean;\n fragments?: InMemoryCacheConfig[\"fragments\"];\n };\n\n private knownResults = new (\n canUseWeakMap ? WeakMap : Map\n ), SelectionSetNode>();\n\n public canon: ObjectCanon;\n public resetCanon() {\n this.canon = new ObjectCanon;\n }\n\n constructor(config: StoreReaderConfig) {\n this.config = compact(config, {\n addTypename: config.addTypename !== false,\n canonizeResults: shouldCanonizeResults(config),\n });\n\n this.canon = config.canon || new ObjectCanon;\n\n this.executeSelectionSet = wrap(options => {\n const { canonizeResults } = options.context;\n\n const peekArgs = execSelectionSetKeyArgs(options);\n\n // Negate this boolean option so we can find out if we've already read\n // this result using the other boolean value.\n peekArgs[3] = !canonizeResults;\n\n const other = this.executeSelectionSet.peek(...peekArgs);\n\n if (other) {\n if (canonizeResults) {\n return {\n ...other,\n // If we previously read this result without canonizing it, we can\n // reuse that result simply by canonizing it now.\n result: this.canon.admit(other.result),\n };\n }\n // If we previously read this result with canonization enabled, we can\n // return that canonized result as-is.\n return other;\n }\n\n maybeDependOnExistenceOfEntity(\n options.context.store,\n options.enclosingRef.__ref,\n );\n\n // Finally, if we didn't find any useful previous results, run the real\n // execSelectionSetImpl method with the given options.\n return this.execSelectionSetImpl(options);\n\n }, {\n max: this.config.resultCacheMaxSize,\n keyArgs: execSelectionSetKeyArgs,\n // Note that the parameters of makeCacheKey are determined by the\n // array returned by keyArgs.\n makeCacheKey(selectionSet, parent, context, canonizeResults) {\n if (supportsResultCaching(context.store)) {\n return context.store.makeCacheKey(\n selectionSet,\n isReference(parent) ? parent.__ref : parent,\n context.varString,\n canonizeResults,\n );\n }\n }\n });\n\n this.executeSubSelectedArray = wrap((options: ExecSubSelectedArrayOptions) => {\n maybeDependOnExistenceOfEntity(\n options.context.store,\n options.enclosingRef.__ref,\n );\n return this.execSubSelectedArrayImpl(options);\n }, {\n max: this.config.resultCacheMaxSize,\n makeCacheKey({ field, array, context }) {\n if (supportsResultCaching(context.store)) {\n return context.store.makeCacheKey(\n field,\n array,\n context.varString,\n );\n }\n }\n });\n }\n\n /**\n * Given a store and a query, return as much of the result as possible and\n * identify if any data was missing from the store.\n * @param {DocumentNode} query A parsed GraphQL query document\n * @param {Store} store The Apollo Client store object\n * @return {result: Object, complete: [boolean]}\n */\n public diffQueryAgainstStore({\n store,\n query,\n rootId = 'ROOT_QUERY',\n variables,\n returnPartialData = true,\n canonizeResults = this.config.canonizeResults,\n }: DiffQueryAgainstStoreOptions): Cache.DiffResult {\n const policies = this.config.cache.policies;\n\n variables = {\n ...getDefaultValues(getQueryDefinition(query)),\n ...variables!,\n };\n\n const rootRef = makeReference(rootId);\n const execResult = this.executeSelectionSet({\n selectionSet: getMainDefinition(query).selectionSet,\n objectOrReference: rootRef,\n enclosingRef: rootRef,\n context: {\n store,\n query,\n policies,\n variables,\n varString: canonicalStringify(variables),\n canonizeResults,\n ...extractFragmentContext(query, this.config.fragments),\n },\n });\n\n let missing: MissingFieldError[] | undefined;\n if (execResult.missing) {\n // For backwards compatibility we still report an array of\n // MissingFieldError objects, even though there will only ever be at most\n // one of them, now that all missing field error messages are grouped\n // together in the execResult.missing tree.\n missing = [new MissingFieldError(\n firstMissing(execResult.missing)!,\n execResult.missing,\n query,\n variables,\n )];\n if (!returnPartialData) {\n throw missing[0];\n }\n }\n\n return {\n result: execResult.result,\n complete: !missing,\n missing,\n };\n }\n\n public isFresh(\n result: Record,\n parent: StoreObject | Reference,\n selectionSet: SelectionSetNode,\n context: ReadMergeModifyContext,\n ): boolean {\n if (supportsResultCaching(context.store) &&\n this.knownResults.get(result) === selectionSet) {\n const latest = this.executeSelectionSet.peek(\n selectionSet,\n parent,\n context,\n // If result is canonical, then it could only have been previously\n // cached by the canonizing version of executeSelectionSet, so we can\n // avoid checking both possibilities here.\n this.canon.isKnown(result),\n );\n if (latest && result === latest.result) {\n return true;\n }\n }\n return false;\n }\n\n // Uncached version of executeSelectionSet.\n private execSelectionSetImpl({\n selectionSet,\n objectOrReference,\n enclosingRef,\n context,\n }: ExecSelectionSetOptions): ExecResult {\n if (isReference(objectOrReference) &&\n !context.policies.rootTypenamesById[objectOrReference.__ref] &&\n !context.store.has(objectOrReference.__ref)) {\n return {\n result: this.canon.empty,\n missing: `Dangling reference to missing ${objectOrReference.__ref} object`,\n };\n }\n\n const { variables, policies, store } = context;\n const typename = store.getFieldValue(objectOrReference, \"__typename\");\n\n const objectsToMerge: Record[] = [];\n let missing: MissingTree | undefined;\n const missingMerger = new DeepMerger();\n\n if (this.config.addTypename &&\n typeof typename === \"string\" &&\n !policies.rootIdsByTypename[typename]) {\n // Ensure we always include a default value for the __typename\n // field, if we have one, and this.config.addTypename is true. Note\n // that this field can be overridden by other merged objects.\n objectsToMerge.push({ __typename: typename });\n }\n\n function handleMissing(result: ExecResult, resultName: string): T {\n if (result.missing) {\n missing = missingMerger.merge(missing, { [resultName]: result.missing });\n }\n return result.result;\n }\n\n const workSet = new Set(selectionSet.selections);\n\n workSet.forEach(selection => {\n // Omit fields with directives @skip(if: ) or\n // @include(if: ).\n if (!shouldInclude(selection, variables)) return;\n\n if (isField(selection)) {\n let fieldValue = policies.readField({\n fieldName: selection.name.value,\n field: selection,\n variables: context.variables,\n from: objectOrReference,\n }, context);\n\n const resultName = resultKeyNameFromField(selection);\n\n if (fieldValue === void 0) {\n if (!addTypenameToDocument.added(selection)) {\n missing = missingMerger.merge(missing, {\n [resultName]: `Can't find field '${\n selection.name.value\n }' on ${\n isReference(objectOrReference)\n ? objectOrReference.__ref + \" object\"\n : \"object \" + JSON.stringify(objectOrReference, null, 2)\n }`\n });\n }\n\n } else if (isArray(fieldValue)) {\n fieldValue = handleMissing(this.executeSubSelectedArray({\n field: selection,\n array: fieldValue,\n enclosingRef,\n context,\n }), resultName);\n\n } else if (!selection.selectionSet) {\n // If the field does not have a selection set, then we handle it\n // as a scalar value. To keep this.canon from canonicalizing\n // this value, we use this.canon.pass to wrap fieldValue in a\n // Pass object that this.canon.admit will later unwrap as-is.\n if (context.canonizeResults) {\n fieldValue = this.canon.pass(fieldValue);\n }\n\n } else if (fieldValue != null) {\n // In this case, because we know the field has a selection set,\n // it must be trying to query a GraphQLObjectType, which is why\n // fieldValue must be != null.\n fieldValue = handleMissing(this.executeSelectionSet({\n selectionSet: selection.selectionSet,\n objectOrReference: fieldValue as StoreObject | Reference,\n enclosingRef: isReference(fieldValue) ? fieldValue : enclosingRef,\n context,\n }), resultName);\n }\n\n if (fieldValue !== void 0) {\n objectsToMerge.push({ [resultName]: fieldValue });\n }\n\n } else {\n const fragment = getFragmentFromSelection(\n selection,\n context.lookupFragment,\n );\n\n if (!fragment && selection.kind === Kind.FRAGMENT_SPREAD) {\n throw new InvariantError(`No fragment named ${selection.name.value}`);\n }\n\n if (fragment && policies.fragmentMatches(fragment, typename)) {\n fragment.selectionSet.selections.forEach(workSet.add, workSet);\n }\n }\n });\n\n const result = mergeDeepArray(objectsToMerge);\n const finalResult: ExecResult = { result, missing };\n const frozen = context.canonizeResults\n ? this.canon.admit(finalResult)\n // Since this.canon is normally responsible for freezing results (only in\n // development), freeze them manually if canonization is disabled.\n : maybeDeepFreeze(finalResult);\n\n // Store this result with its selection set so that we can quickly\n // recognize it again in the StoreReader#isFresh method.\n if (frozen.result) {\n this.knownResults.set(frozen.result, selectionSet);\n }\n\n return frozen;\n }\n\n // Uncached version of executeSubSelectedArray.\n private execSubSelectedArrayImpl({\n field,\n array,\n enclosingRef,\n context,\n }: ExecSubSelectedArrayOptions): ExecResult {\n let missing: MissingTree | undefined;\n let missingMerger = new DeepMerger();\n\n function handleMissing(childResult: ExecResult, i: number): T {\n if (childResult.missing) {\n missing = missingMerger.merge(missing, { [i]: childResult.missing });\n }\n return childResult.result;\n }\n\n if (field.selectionSet) {\n array = array.filter(context.store.canRead);\n }\n\n array = array.map((item, i) => {\n // null value in array\n if (item === null) {\n return null;\n }\n\n // This is a nested array, recurse\n if (isArray(item)) {\n return handleMissing(this.executeSubSelectedArray({\n field,\n array: item,\n enclosingRef,\n context,\n }), i);\n }\n\n // This is an object, run the selection set on it\n if (field.selectionSet) {\n return handleMissing(this.executeSelectionSet({\n selectionSet: field.selectionSet,\n objectOrReference: item,\n enclosingRef: isReference(item) ? item : enclosingRef,\n context,\n }), i);\n }\n\n if (__DEV__) {\n assertSelectionSetForIdValue(context.store, field, item);\n }\n\n return item;\n });\n\n return {\n result: context.canonizeResults ? this.canon.admit(array) : array,\n missing,\n };\n }\n}\n\nfunction firstMissing(tree: MissingTree): string | undefined {\n try {\n JSON.stringify(tree, (_, value) => {\n if (typeof value === \"string\") throw value;\n return value;\n });\n } catch (result) {\n return result;\n }\n}\n\nfunction assertSelectionSetForIdValue(\n store: NormalizedCache,\n field: FieldNode,\n fieldValue: any,\n) {\n if (!field.selectionSet) {\n const workSet = new Set([fieldValue]);\n workSet.forEach(value => {\n if (isNonNullObject(value)) {\n invariant(\n !isReference(value),\n `Missing selection set for object of type ${\n getTypenameFromStoreObject(store, value)\n } returned for query field ${field.name.value}`,\n );\n Object.values(value).forEach(workSet.add, workSet);\n }\n });\n }\n}\n","import { invariant } from \"../../utilities/globals\";\n\nimport {\n argumentsObjectFromField,\n DeepMerger,\n isNonEmptyArray,\n isNonNullObject,\n} from \"../../utilities\";\n\nimport { hasOwn, isArray } from \"./helpers\";\nimport {\n KeySpecifier,\n KeyFieldsFunction,\n KeyArgsFunction,\n} from \"./policies\";\n\n// Mapping from JSON-encoded KeySpecifier strings to associated information.\nconst specifierInfoCache: Record = Object.create(null);\n\nfunction lookupSpecifierInfo(spec: KeySpecifier) {\n // It's safe to encode KeySpecifier arrays with JSON.stringify, since they're\n // just arrays of strings or nested KeySpecifier arrays, and the order of the\n // array elements is important (and suitably preserved by JSON.stringify).\n const cacheKey = JSON.stringify(spec);\n return specifierInfoCache[cacheKey] ||\n (specifierInfoCache[cacheKey] = Object.create(null));\n}\n\nexport function keyFieldsFnFromSpecifier(\n specifier: KeySpecifier,\n): KeyFieldsFunction {\n const info = lookupSpecifierInfo(specifier);\n\n return info.keyFieldsFn || (info.keyFieldsFn = (\n object,\n context,\n ) => {\n const extract: typeof extractKey =\n (from, key) => context.readField(key, from);\n\n const keyObject = context.keyObject = collectSpecifierPaths(\n specifier,\n schemaKeyPath => {\n let extracted = extractKeyPath(\n context.storeObject,\n schemaKeyPath,\n // Using context.readField to extract paths from context.storeObject\n // allows the extraction to see through Reference objects and respect\n // custom read functions.\n extract,\n );\n\n if (\n extracted === void 0 &&\n object !== context.storeObject &&\n hasOwn.call(object, schemaKeyPath[0])\n ) {\n // If context.storeObject fails to provide a value for the requested\n // path, fall back to the raw result object, if it has a top-level key\n // matching the first key in the path (schemaKeyPath[0]). This allows\n // key fields included in the written data to be saved in the cache\n // even if they are not selected explicitly in context.selectionSet.\n // Not being mentioned by context.selectionSet is convenient here,\n // since it means these extra fields cannot be affected by field\n // aliasing, which is why we can use extractKey instead of\n // context.readField for this extraction.\n extracted = extractKeyPath(object, schemaKeyPath, extractKey);\n }\n\n invariant(\n extracted !== void 0,\n `Missing field '${schemaKeyPath.join('.')}' while extracting keyFields from ${\n JSON.stringify(object)\n }`,\n );\n\n return extracted;\n },\n );\n\n return `${context.typename}:${JSON.stringify(keyObject)}`;\n });\n}\n\n// The keyArgs extraction process is roughly analogous to keyFields extraction,\n// but there are no aliases involved, missing fields are tolerated (by merely\n// omitting them from the key), and drawing from field.directives or variables\n// is allowed (in addition to drawing from the field's arguments object).\n// Concretely, these differences mean passing a different key path extractor\n// function to collectSpecifierPaths, reusing the shared extractKeyPath helper\n// wherever possible.\nexport function keyArgsFnFromSpecifier(specifier: KeySpecifier): KeyArgsFunction {\n const info = lookupSpecifierInfo(specifier);\n\n return info.keyArgsFn || (info.keyArgsFn = (args, {\n field,\n variables,\n fieldName,\n }) => {\n const collected = collectSpecifierPaths(specifier, keyPath => {\n const firstKey = keyPath[0];\n const firstChar = firstKey.charAt(0);\n\n if (firstChar === \"@\") {\n if (field && isNonEmptyArray(field.directives)) {\n const directiveName = firstKey.slice(1);\n // If the directive appears multiple times, only the first\n // occurrence's arguments will be used. TODO Allow repetition?\n // TODO Cache this work somehow, a la aliasMap?\n const d = field.directives.find(d => d.name.value === directiveName);\n // Fortunately argumentsObjectFromField works for DirectiveNode!\n const directiveArgs = d && argumentsObjectFromField(d, variables);\n // For directives without arguments (d defined, but directiveArgs ===\n // null), the presence or absence of the directive still counts as\n // part of the field key, so we return null in those cases. If no\n // directive with this name was found for this field (d undefined and\n // thus directiveArgs undefined), we return undefined, which causes\n // this value to be omitted from the key object returned by\n // collectSpecifierPaths.\n return directiveArgs && extractKeyPath(\n directiveArgs,\n // If keyPath.length === 1, this code calls extractKeyPath with an\n // empty path, which works because it uses directiveArgs as the\n // extracted value.\n keyPath.slice(1),\n );\n }\n // If the key started with @ but there was no corresponding directive,\n // we want to omit this value from the key object, not fall through to\n // treating @whatever as a normal argument name.\n return;\n }\n\n if (firstChar === \"$\") {\n const variableName = firstKey.slice(1);\n if (variables && hasOwn.call(variables, variableName)) {\n const varKeyPath = keyPath.slice(0);\n varKeyPath[0] = variableName;\n return extractKeyPath(variables, varKeyPath);\n }\n // If the key started with $ but there was no corresponding variable, we\n // want to omit this value from the key object, not fall through to\n // treating $whatever as a normal argument name.\n return;\n }\n\n if (args) {\n return extractKeyPath(args, keyPath);\n }\n });\n\n const suffix = JSON.stringify(collected);\n\n // If no arguments were passed to this field, and it didn't have any other\n // field key contributions from directives or variables, hide the empty\n // :{} suffix from the field key. However, a field passed no arguments can\n // still end up with a non-empty :{...} suffix if its key configuration\n // refers to directives or variables.\n if (args || suffix !== \"{}\") {\n fieldName += \":\" + suffix;\n }\n\n return fieldName;\n });\n}\n\nexport function collectSpecifierPaths(\n specifier: KeySpecifier,\n extractor: (path: string[]) => any,\n): Record {\n // For each path specified by specifier, invoke the extractor, and repeatedly\n // merge the results together, with appropriate ancestor context.\n const merger = new DeepMerger;\n return getSpecifierPaths(specifier).reduce((collected, path) => {\n let toMerge = extractor(path);\n if (toMerge !== void 0) {\n // This path is not expected to contain array indexes, so the toMerge\n // reconstruction will not contain arrays. TODO Fix this?\n for (let i = path.length - 1; i >= 0; --i) {\n toMerge = { [path[i]]: toMerge };\n }\n collected = merger.merge(collected, toMerge);\n }\n return collected;\n }, Object.create(null));\n}\n\nexport function getSpecifierPaths(spec: KeySpecifier): string[][] {\n const info = lookupSpecifierInfo(spec);\n\n if (!info.paths) {\n const paths: string[][] = info.paths = [];\n const currentPath: string[] = [];\n\n spec.forEach((s, i) => {\n if (isArray(s)) {\n getSpecifierPaths(s).forEach(p => paths.push(currentPath.concat(p)));\n currentPath.length = 0;\n } else {\n currentPath.push(s);\n if (!isArray(spec[i + 1])) {\n paths.push(currentPath.slice(0));\n currentPath.length = 0;\n }\n }\n });\n }\n\n return info.paths!;\n}\n\nfunction extractKey<\n TObj extends Record,\n TKey extends string,\n>(object: TObj, key: TKey): TObj[TKey] | undefined {\n return object[key];\n}\n\nexport function extractKeyPath(\n object: Record,\n path: string[],\n extract?: typeof extractKey,\n): any {\n // For each key in path, extract the corresponding child property from obj,\n // flattening arrays if encountered (uncommon for keyFields and keyArgs, but\n // possible). The final result of path.reduce is normalized so unexpected leaf\n // objects have their keys safely sorted. That final result is difficult to\n // type as anything other than any. You're welcome to try to improve the\n // return type, but keep in mind extractKeyPath is not a public function\n // (exported only for testing), so the effort may not be worthwhile unless the\n // limited set of actual callers (see above) pass arguments that TypeScript\n // can statically type. If we know only that path is some array of strings\n // (and not, say, a specific tuple of statically known strings), any (or\n // possibly unknown) is the honest answer.\n extract = extract || extractKey;\n return normalize(path.reduce(function reducer(obj, key): any {\n return isArray(obj)\n ? obj.map(child => reducer(child, key))\n : obj && extract!(obj, key);\n }, object));\n}\n\nfunction normalize(value: T): T {\n // Usually the extracted value will be a scalar value, since most primary\n // key fields are scalar, but just in case we get an object or an array, we\n // need to do some normalization of the order of (nested) keys.\n if (isNonNullObject(value)) {\n if (isArray(value)) {\n return value.map(normalize) as any;\n }\n return collectSpecifierPaths(\n Object.keys(value).sort(),\n path => extractKeyPath(value, path),\n ) as T;\n }\n return value;\n}\n","import { invariant, InvariantError } from '../../utilities/globals';\n\nimport {\n InlineFragmentNode,\n FragmentDefinitionNode,\n SelectionSetNode,\n FieldNode,\n} from 'graphql';\n\nimport {\n FragmentMap,\n storeKeyNameFromField,\n StoreValue,\n StoreObject,\n argumentsObjectFromField,\n Reference,\n isReference,\n getStoreKeyName,\n isNonNullObject,\n stringifyForDisplay,\n} from '../../utilities';\nimport {\n IdGetter,\n MergeInfo,\n NormalizedCache,\n ReadMergeModifyContext,\n} from \"./types\";\nimport {\n hasOwn,\n fieldNameFromStoreName,\n storeValueIsStoreObject,\n selectionSetMatchesResult,\n TypeOrFieldNameRegExp,\n defaultDataIdFromObject,\n isArray,\n} from './helpers';\nimport { cacheSlot } from './reactiveVars';\nimport { InMemoryCache } from './inMemoryCache';\nimport {\n SafeReadonly,\n FieldSpecifier,\n ToReferenceFunction,\n ReadFieldFunction,\n ReadFieldOptions,\n CanReadFunction,\n} from '../core/types/common';\nimport { WriteContext } from './writeToStore';\n\n// Upgrade to a faster version of the default stable JSON.stringify function\n// used by getStoreKeyName. This function is used when computing storeFieldName\n// strings (when no keyArgs has been configured for a field).\nimport { canonicalStringify } from './object-canon';\nimport { keyArgsFnFromSpecifier, keyFieldsFnFromSpecifier } from './key-extractor';\n\ngetStoreKeyName.setStringify(canonicalStringify);\n\nexport type TypePolicies = {\n [__typename: string]: TypePolicy;\n}\n\n// TypeScript 3.7 will allow recursive type aliases, so this should work:\n// type KeySpecifier = (string | KeySpecifier)[]\nexport type KeySpecifier = ReadonlyArray;\n\nexport type KeyFieldsContext = {\n // The __typename of the incoming object, even if the __typename field was\n // aliased to another name in the raw result object. May be undefined when\n // dataIdFromObject is called for objects without __typename fields.\n typename: string | undefined;\n\n // The object to be identified, after processing to remove aliases and\n // normalize identifiable child objects with references.\n storeObject: StoreObject;\n\n // Handy tool for reading additional fields from context.storeObject, either\n // readField(\"fieldName\") to read storeObject[fieldName], or readField(\"name\",\n // objectOrReference) to read from another object or Reference. If you read a\n // field with a read function, that function will be invoked.\n readField: ReadFieldFunction;\n\n // If you are writing a custom keyFields function, and you plan to use the raw\n // result object passed as the first argument, you may also need access to the\n // selection set and available fragments for this object, just in case any\n // fields have aliases. Since this logic is tricky to get right, and these\n // context properties are not even always provided (for example, they are\n // omitted when calling cache.identify(object), where object is assumed to be\n // a StoreObject), we recommend you use context.storeObject (which has already\n // been de-aliased) and context.readField (which can read from references as\n // well as objects) instead of the raw result object in your keyFields\n // functions, or just rely on the internal implementation of keyFields:[...]\n // syntax to get these details right for you.\n selectionSet?: SelectionSetNode;\n fragmentMap?: FragmentMap;\n\n // Internal. May be set by the KeyFieldsFunction to report fields that were\n // involved in computing the ID. Never passed in by the caller.\n keyObject?: Record;\n};\n\nexport type KeyFieldsFunction = (\n object: Readonly,\n context: KeyFieldsContext,\n) => KeySpecifier | false | ReturnType;\n\ntype KeyFieldsResult = Exclude, KeySpecifier>;\n\n// TODO Should TypePolicy be a generic type, with a TObject or TEntity\n// type parameter?\nexport type TypePolicy = {\n // Allows defining the primary key fields for this type, either using an\n // array of field names or a function that returns an arbitrary string.\n keyFields?: KeySpecifier | KeyFieldsFunction | false;\n\n // Allows defining a merge function (or merge:true/false shorthand) to\n // be used for merging objects of this type wherever they appear, unless\n // the parent field also defines a merge function/boolean (that is,\n // parent field merge functions take precedence over type policy merge\n // functions). In many cases, defining merge:true for a given type\n // policy can save you from specifying merge:true for all the field\n // policies where that type might be encountered.\n merge?: FieldMergeFunction | boolean;\n\n // In the rare event that your schema happens to use a different\n // __typename for the root Query, Mutation, and/or Schema types, you can\n // express your deviant preferences by enabling one of these options.\n queryType?: true,\n mutationType?: true,\n subscriptionType?: true,\n\n fields?: {\n [fieldName: string]:\n | FieldPolicy\n | FieldReadFunction;\n }\n};\n\nexport type KeyArgsFunction = (\n args: Record | null,\n context: {\n typename: string;\n fieldName: string;\n field: FieldNode | null;\n variables?: Record;\n },\n) => KeySpecifier | false | ReturnType;\n\nexport type FieldPolicy<\n // The internal representation used to store the field's data in the\n // cache. Must be JSON-serializable if you plan to serialize the result\n // of cache.extract() using JSON.\n TExisting = any,\n // The type of the incoming parameter passed to the merge function,\n // typically matching the GraphQL response format, but with Reference\n // objects substituted for any identifiable child objects. Often the\n // same as TExisting, but not necessarily.\n TIncoming = TExisting,\n // The type that the read function actually returns, using TExisting\n // data and options.args as input. Usually the same as TIncoming.\n TReadResult = TIncoming,\n // Allows FieldFunctionOptions definition to be overwritten by the\n // developer\n TOptions extends FieldFunctionOptions = FieldFunctionOptions\n> = {\n keyArgs?: KeySpecifier | KeyArgsFunction | false;\n read?: FieldReadFunction;\n merge?: FieldMergeFunction | boolean;\n};\n\nexport type StorageType = Record;\n\nfunction argsFromFieldSpecifier(spec: FieldSpecifier) {\n return spec.args !== void 0 ? spec.args :\n spec.field ? argumentsObjectFromField(spec.field, spec.variables) : null;\n}\n\nexport interface FieldFunctionOptions<\n TArgs = Record,\n TVars = Record,\n> {\n args: TArgs | null;\n\n // The name of the field, equal to options.field.name.value when\n // options.field is available. Useful if you reuse the same function for\n // multiple fields, and you need to know which field you're currently\n // processing. Always a string, even when options.field is null.\n fieldName: string;\n\n // The full field key used internally, including serialized key arguments.\n storeFieldName: string;\n\n // The FieldNode object used to read this field. Useful if you need to\n // know about other attributes of the field, such as its directives. This\n // option will be null when a string was passed to options.readField.\n field: FieldNode | null;\n\n variables?: TVars;\n\n // Utilities for dealing with { __ref } objects.\n isReference: typeof isReference;\n toReference: ToReferenceFunction;\n\n // A handy place to put field-specific data that you want to survive\n // across multiple read function calls. Useful for field-level caching,\n // if your read function does any expensive work.\n storage: StorageType;\n\n cache: InMemoryCache;\n\n // Helper function for reading other fields within the current object.\n // If a foreign object or reference is provided, the field will be read\n // from that object instead of the current object, so this function can\n // be used (together with isReference) to examine the cache outside the\n // current object. If a FieldNode is passed instead of a string, and\n // that FieldNode has arguments, the same options.variables will be used\n // to compute the argument values. Note that this function will invoke\n // custom read functions for other fields, if defined. Always returns\n // immutable data (enforced with Object.freeze in development).\n readField: ReadFieldFunction;\n\n // Returns true for non-normalized StoreObjects and non-dangling\n // References, indicating that readField(name, objOrRef) has a chance of\n // working. Useful for filtering out dangling references from lists.\n canRead: CanReadFunction;\n\n // Instead of just merging objects with { ...existing, ...incoming }, this\n // helper function can be used to merge objects in a way that respects any\n // custom merge functions defined for their fields.\n mergeObjects: MergeObjectsFunction;\n}\n\ntype MergeObjectsFunction = (\n existing: T,\n incoming: T,\n) => T;\n\nexport type FieldReadFunction<\n TExisting = any,\n TReadResult = TExisting,\n TOptions extends FieldFunctionOptions = FieldFunctionOptions\n> = (\n // When reading a field, one often needs to know about any existing\n // value stored for that field. If the field is read before any value\n // has been written to the cache, this existing parameter will be\n // undefined, which makes it easy to use a default parameter expression\n // to supply the initial value. This parameter is positional (rather\n // than one of the named options) because that makes it possible for the\n // developer to annotate it with a type, without also having to provide\n // a whole new type for the options object.\n existing: SafeReadonly | undefined,\n options: TOptions,\n) => TReadResult | undefined;\n\nexport type FieldMergeFunction<\n TExisting = any,\n TIncoming = TExisting,\n // Passing the whole FieldFunctionOptions makes the current definition\n // independent from its implementation\n TOptions extends FieldFunctionOptions = FieldFunctionOptions\n> = (\n existing: SafeReadonly | undefined,\n // The incoming parameter needs to be positional as well, for the same\n // reasons discussed in FieldReadFunction above.\n incoming: SafeReadonly,\n options: TOptions,\n) => SafeReadonly;\n\nconst nullKeyFieldsFn: KeyFieldsFunction = () => void 0;\nconst simpleKeyArgsFn: KeyArgsFunction = (_args, context) => context.fieldName;\n\n// These merge functions can be selected by specifying merge:true or\n// merge:false in a field policy.\nconst mergeTrueFn: FieldMergeFunction =\n (existing, incoming, { mergeObjects }) => mergeObjects(existing, incoming);\nconst mergeFalseFn: FieldMergeFunction = (_, incoming) => incoming;\n\nexport type PossibleTypesMap = {\n [supertype: string]: string[];\n};\n\nexport class Policies {\n private typePolicies: {\n [__typename: string]: {\n keyFn?: KeyFieldsFunction;\n merge?: FieldMergeFunction;\n fields: {\n [fieldName: string]: {\n keyFn?: KeyArgsFunction;\n read?: FieldReadFunction;\n merge?: FieldMergeFunction;\n };\n };\n };\n } = Object.create(null);\n\n private toBeAdded: {\n [__typename: string]: TypePolicy[];\n } = Object.create(null);\n\n // Map from subtype names to sets of supertype names. Note that this\n // representation inverts the structure of possibleTypes (whose keys are\n // supertypes and whose values are arrays of subtypes) because it tends\n // to be much more efficient to search upwards than downwards.\n private supertypeMap = new Map>();\n\n // Any fuzzy subtypes specified by possibleTypes will be converted to\n // RegExp objects and recorded here. Every key of this map can also be\n // found in supertypeMap. In many cases this Map will be empty, which\n // means no fuzzy subtype checking will happen in fragmentMatches.\n private fuzzySubtypes = new Map();\n\n public readonly cache: InMemoryCache;\n\n public readonly rootIdsByTypename: Record = Object.create(null);\n public readonly rootTypenamesById: Record = Object.create(null);\n\n public readonly usingPossibleTypes = false;\n\n constructor(private config: {\n cache: InMemoryCache;\n dataIdFromObject?: KeyFieldsFunction;\n possibleTypes?: PossibleTypesMap;\n typePolicies?: TypePolicies;\n }) {\n this.config = {\n dataIdFromObject: defaultDataIdFromObject,\n ...config,\n };\n\n this.cache = this.config.cache;\n\n this.setRootTypename(\"Query\");\n this.setRootTypename(\"Mutation\");\n this.setRootTypename(\"Subscription\");\n\n if (config.possibleTypes) {\n this.addPossibleTypes(config.possibleTypes);\n }\n\n if (config.typePolicies) {\n this.addTypePolicies(config.typePolicies);\n }\n }\n\n public identify(\n object: StoreObject,\n partialContext?: Partial,\n ): [string?, StoreObject?] {\n const policies = this;\n\n const typename = partialContext && (\n partialContext.typename ||\n partialContext.storeObject?.__typename\n ) || object.__typename;\n\n // It should be possible to write root Query fields with writeFragment,\n // using { __typename: \"Query\", ... } as the data, but it does not make\n // sense to allow the same identification behavior for the Mutation and\n // Subscription types, since application code should never be writing\n // directly to (or reading directly from) those root objects.\n if (typename === this.rootTypenamesById.ROOT_QUERY) {\n return [\"ROOT_QUERY\"];\n }\n\n // Default context.storeObject to object if not otherwise provided.\n const storeObject = partialContext && partialContext.storeObject || object;\n\n const context: KeyFieldsContext = {\n ...partialContext,\n typename,\n storeObject,\n readField: partialContext && partialContext.readField || function () {\n const options = normalizeReadFieldOptions(arguments, storeObject);\n return policies.readField(options, {\n store: policies.cache[\"data\"],\n variables: options.variables,\n });\n },\n };\n\n let id: KeyFieldsResult;\n\n const policy = typename && this.getTypePolicy(typename);\n let keyFn = policy && policy.keyFn || this.config.dataIdFromObject;\n while (keyFn) {\n const specifierOrId = keyFn(object, context);\n if (isArray(specifierOrId)) {\n keyFn = keyFieldsFnFromSpecifier(specifierOrId);\n } else {\n id = specifierOrId;\n break;\n }\n }\n\n id = id ? String(id) : void 0;\n return context.keyObject ? [id, context.keyObject] : [id];\n }\n\n public addTypePolicies(typePolicies: TypePolicies) {\n Object.keys(typePolicies).forEach(typename => {\n const {\n queryType,\n mutationType,\n subscriptionType,\n ...incoming\n } = typePolicies[typename];\n\n // Though {query,mutation,subscription}Type configurations are rare,\n // it's important to call setRootTypename as early as possible,\n // since these configurations should apply consistently for the\n // entire lifetime of the cache. Also, since only one __typename can\n // qualify as one of these root types, these three properties cannot\n // be inherited, unlike the rest of the incoming properties. That\n // restriction is convenient, because the purpose of this.toBeAdded\n // is to delay the processing of type/field policies until the first\n // time they're used, allowing policies to be added in any order as\n // long as all relevant policies (including policies for supertypes)\n // have been added by the time a given policy is used for the first\n // time. In other words, since inheritance doesn't matter for these\n // properties, there's also no need to delay their processing using\n // the this.toBeAdded queue.\n if (queryType) this.setRootTypename(\"Query\", typename);\n if (mutationType) this.setRootTypename(\"Mutation\", typename);\n if (subscriptionType) this.setRootTypename(\"Subscription\", typename);\n\n if (hasOwn.call(this.toBeAdded, typename)) {\n this.toBeAdded[typename].push(incoming);\n } else {\n this.toBeAdded[typename] = [incoming];\n }\n });\n }\n\n private updateTypePolicy(typename: string, incoming: TypePolicy) {\n const existing = this.getTypePolicy(typename);\n const { keyFields, fields } = incoming;\n\n function setMerge(\n existing: { merge?: FieldMergeFunction | boolean; },\n merge?: FieldMergeFunction | boolean,\n ) {\n existing.merge =\n typeof merge === \"function\" ? merge :\n // Pass merge:true as a shorthand for a merge implementation\n // that returns options.mergeObjects(existing, incoming).\n merge === true ? mergeTrueFn :\n // Pass merge:false to make incoming always replace existing\n // without any warnings about data clobbering.\n merge === false ? mergeFalseFn :\n existing.merge;\n }\n\n // Type policies can define merge functions, as an alternative to\n // using field policies to merge child objects.\n setMerge(existing, incoming.merge);\n\n existing.keyFn =\n // Pass false to disable normalization for this typename.\n keyFields === false ? nullKeyFieldsFn :\n // Pass an array of strings to use those fields to compute a\n // composite ID for objects of this typename.\n isArray(keyFields) ? keyFieldsFnFromSpecifier(keyFields) :\n // Pass a function to take full control over identification.\n typeof keyFields === \"function\" ? keyFields :\n // Leave existing.keyFn unchanged if above cases fail.\n existing.keyFn;\n\n if (fields) {\n Object.keys(fields).forEach(fieldName => {\n const existing = this.getFieldPolicy(typename, fieldName, true)!;\n const incoming = fields[fieldName];\n\n if (typeof incoming === \"function\") {\n existing.read = incoming;\n } else {\n const { keyArgs, read, merge } = incoming;\n\n existing.keyFn =\n // Pass false to disable argument-based differentiation of\n // field identities.\n keyArgs === false ? simpleKeyArgsFn :\n // Pass an array of strings to use named arguments to\n // compute a composite identity for the field.\n isArray(keyArgs) ? keyArgsFnFromSpecifier(keyArgs) :\n // Pass a function to take full control over field identity.\n typeof keyArgs === \"function\" ? keyArgs :\n // Leave existing.keyFn unchanged if above cases fail.\n existing.keyFn;\n\n if (typeof read === \"function\") {\n existing.read = read;\n }\n\n setMerge(existing, merge);\n }\n\n if (existing.read && existing.merge) {\n // If we have both a read and a merge function, assume\n // keyArgs:false, because read and merge together can take\n // responsibility for interpreting arguments in and out. This\n // default assumption can always be overridden by specifying\n // keyArgs explicitly in the FieldPolicy.\n existing.keyFn = existing.keyFn || simpleKeyArgsFn;\n }\n });\n }\n }\n\n private setRootTypename(\n which: \"Query\" | \"Mutation\" | \"Subscription\",\n typename: string = which,\n ) {\n const rootId = \"ROOT_\" + which.toUpperCase();\n const old = this.rootTypenamesById[rootId];\n if (typename !== old) {\n invariant(!old || old === which, `Cannot change root ${which} __typename more than once`);\n // First, delete any old __typename associated with this rootId from\n // rootIdsByTypename.\n if (old) delete this.rootIdsByTypename[old];\n // Now make this the only __typename that maps to this rootId.\n this.rootIdsByTypename[typename] = rootId;\n // Finally, update the __typename associated with this rootId.\n this.rootTypenamesById[rootId] = typename;\n }\n }\n\n public addPossibleTypes(possibleTypes: PossibleTypesMap) {\n (this.usingPossibleTypes as boolean) = true;\n Object.keys(possibleTypes).forEach(supertype => {\n // Make sure all types have an entry in this.supertypeMap, even if\n // their supertype set is empty, so we can return false immediately\n // from policies.fragmentMatches for unknown supertypes.\n this.getSupertypeSet(supertype, true);\n\n possibleTypes[supertype].forEach(subtype => {\n this.getSupertypeSet(subtype, true)!.add(supertype);\n const match = subtype.match(TypeOrFieldNameRegExp);\n if (!match || match[0] !== subtype) {\n // TODO Don't interpret just any invalid typename as a RegExp.\n this.fuzzySubtypes.set(subtype, new RegExp(subtype));\n }\n });\n });\n }\n\n private getTypePolicy(typename: string): Policies[\"typePolicies\"][string] {\n if (!hasOwn.call(this.typePolicies, typename)) {\n const policy: Policies[\"typePolicies\"][string] =\n this.typePolicies[typename] = Object.create(null);\n policy.fields = Object.create(null);\n\n // When the TypePolicy for typename is first accessed, instead of\n // starting with an empty policy object, inherit any properties or\n // fields from the type policies of the supertypes of typename.\n //\n // Any properties or fields defined explicitly within the TypePolicy\n // for typename will take precedence, and if there are multiple\n // supertypes, the properties of policies whose types were added\n // later via addPossibleTypes will take precedence over those of\n // earlier supertypes. TODO Perhaps we should warn about these\n // conflicts in development, and recommend defining the property\n // explicitly in the subtype policy?\n //\n // Field policy inheritance is atomic/shallow: you can't inherit a\n // field policy and then override just its read function, since read\n // and merge functions often need to cooperate, so changing only one\n // of them would be a recipe for inconsistency.\n //\n // Once the TypePolicy for typename has been accessed, its\n // properties can still be updated directly using addTypePolicies,\n // but future changes to supertype policies will not be reflected in\n // this policy, because this code runs at most once per typename.\n const supertypes = this.supertypeMap.get(typename);\n if (supertypes && supertypes.size) {\n supertypes.forEach(supertype => {\n const { fields, ...rest } = this.getTypePolicy(supertype);\n Object.assign(policy, rest);\n Object.assign(policy.fields, fields);\n });\n }\n }\n\n const inbox = this.toBeAdded[typename];\n if (inbox && inbox.length) {\n // Merge the pending policies into this.typePolicies, in the order they\n // were originally passed to addTypePolicy.\n inbox.splice(0).forEach(policy => {\n this.updateTypePolicy(typename, policy);\n });\n }\n\n return this.typePolicies[typename];\n }\n\n private getFieldPolicy(\n typename: string | undefined,\n fieldName: string,\n createIfMissing: boolean,\n ): {\n keyFn?: KeyArgsFunction;\n read?: FieldReadFunction;\n merge?: FieldMergeFunction;\n } | undefined {\n if (typename) {\n const fieldPolicies = this.getTypePolicy(typename).fields;\n return fieldPolicies[fieldName] || (\n createIfMissing && (fieldPolicies[fieldName] = Object.create(null)));\n }\n }\n\n private getSupertypeSet(\n subtype: string,\n createIfMissing: boolean,\n ): Set | undefined {\n let supertypeSet = this.supertypeMap.get(subtype);\n if (!supertypeSet && createIfMissing) {\n this.supertypeMap.set(subtype, supertypeSet = new Set());\n }\n return supertypeSet;\n }\n\n public fragmentMatches(\n fragment: InlineFragmentNode | FragmentDefinitionNode,\n typename: string | undefined,\n result?: Record,\n variables?: Record,\n ): boolean {\n if (!fragment.typeCondition) return true;\n\n // If the fragment has a type condition but the object we're matching\n // against does not have a __typename, the fragment cannot match.\n if (!typename) return false;\n\n const supertype = fragment.typeCondition.name.value;\n // Common case: fragment type condition and __typename are the same.\n if (typename === supertype) return true;\n\n if (this.usingPossibleTypes &&\n this.supertypeMap.has(supertype)) {\n const typenameSupertypeSet = this.getSupertypeSet(typename, true)!;\n const workQueue = [typenameSupertypeSet];\n const maybeEnqueue = (subtype: string) => {\n const supertypeSet = this.getSupertypeSet(subtype, false);\n if (supertypeSet &&\n supertypeSet.size &&\n workQueue.indexOf(supertypeSet) < 0) {\n workQueue.push(supertypeSet);\n }\n };\n\n // We need to check fuzzy subtypes only if we encountered fuzzy\n // subtype strings in addPossibleTypes, and only while writing to\n // the cache, since that's when selectionSetMatchesResult gives a\n // strong signal of fragment matching. The StoreReader class calls\n // policies.fragmentMatches without passing a result object, so\n // needToCheckFuzzySubtypes is always false while reading.\n let needToCheckFuzzySubtypes = !!(result && this.fuzzySubtypes.size);\n let checkingFuzzySubtypes = false;\n\n // It's important to keep evaluating workQueue.length each time through\n // the loop, because the queue can grow while we're iterating over it.\n for (let i = 0; i < workQueue.length; ++i) {\n const supertypeSet = workQueue[i];\n\n if (supertypeSet.has(supertype)) {\n if (!typenameSupertypeSet.has(supertype)) {\n if (checkingFuzzySubtypes) {\n invariant.warn(`Inferring subtype ${typename} of supertype ${supertype}`);\n }\n // Record positive results for faster future lookup.\n // Unfortunately, we cannot safely cache negative results,\n // because new possibleTypes data could always be added to the\n // Policies class.\n typenameSupertypeSet.add(supertype);\n }\n return true;\n }\n\n supertypeSet.forEach(maybeEnqueue);\n\n if (needToCheckFuzzySubtypes &&\n // Start checking fuzzy subtypes only after exhausting all\n // non-fuzzy subtypes (after the final iteration of the loop).\n i === workQueue.length - 1 &&\n // We could wait to compare fragment.selectionSet to result\n // after we verify the supertype, but this check is often less\n // expensive than that search, and we will have to do the\n // comparison anyway whenever we find a potential match.\n selectionSetMatchesResult(fragment.selectionSet, result!, variables)) {\n // We don't always need to check fuzzy subtypes (if no result\n // was provided, or !this.fuzzySubtypes.size), but, when we do,\n // we only want to check them once.\n needToCheckFuzzySubtypes = false;\n checkingFuzzySubtypes = true;\n\n // If we find any fuzzy subtypes that match typename, extend the\n // workQueue to search through the supertypes of those fuzzy\n // subtypes. Otherwise the for-loop will terminate and we'll\n // return false below.\n this.fuzzySubtypes.forEach((regExp, fuzzyString) => {\n const match = typename.match(regExp);\n if (match && match[0] === typename) {\n maybeEnqueue(fuzzyString);\n }\n });\n }\n }\n }\n\n return false;\n }\n\n public hasKeyArgs(typename: string | undefined, fieldName: string) {\n const policy = this.getFieldPolicy(typename, fieldName, false);\n return !!(policy && policy.keyFn);\n }\n\n public getStoreFieldName(fieldSpec: FieldSpecifier): string {\n const { typename, fieldName } = fieldSpec;\n const policy = this.getFieldPolicy(typename, fieldName, false);\n let storeFieldName: Exclude, KeySpecifier>;\n\n let keyFn = policy && policy.keyFn;\n if (keyFn && typename) {\n const context: Parameters[1] = {\n typename,\n fieldName,\n field: fieldSpec.field || null,\n variables: fieldSpec.variables,\n };\n const args = argsFromFieldSpecifier(fieldSpec);\n while (keyFn) {\n const specifierOrString = keyFn(args, context);\n if (isArray(specifierOrString)) {\n keyFn = keyArgsFnFromSpecifier(specifierOrString);\n } else {\n // If the custom keyFn returns a falsy value, fall back to\n // fieldName instead.\n storeFieldName = specifierOrString || fieldName;\n break;\n }\n }\n }\n\n if (storeFieldName === void 0) {\n storeFieldName = fieldSpec.field\n ? storeKeyNameFromField(fieldSpec.field, fieldSpec.variables)\n : getStoreKeyName(fieldName, argsFromFieldSpecifier(fieldSpec));\n }\n\n // Returning false from a keyArgs function is like configuring\n // keyArgs: false, but more dynamic.\n if (storeFieldName === false) {\n return fieldName;\n }\n\n // Make sure custom field names start with the actual field.name.value\n // of the field, so we can always figure out which properties of a\n // StoreObject correspond to which original field names.\n return fieldName === fieldNameFromStoreName(storeFieldName)\n ? storeFieldName\n : fieldName + \":\" + storeFieldName;\n }\n\n public readField(\n options: ReadFieldOptions,\n context: ReadMergeModifyContext,\n ): SafeReadonly | undefined {\n const objectOrReference = options.from;\n if (!objectOrReference) return;\n\n const nameOrField = options.field || options.fieldName;\n if (!nameOrField) return;\n\n if (options.typename === void 0) {\n const typename = context.store.getFieldValue(objectOrReference, \"__typename\");\n if (typename) options.typename = typename;\n }\n\n const storeFieldName = this.getStoreFieldName(options);\n const fieldName = fieldNameFromStoreName(storeFieldName);\n const existing = context.store.getFieldValue(objectOrReference, storeFieldName);\n const policy = this.getFieldPolicy(options.typename, fieldName, false);\n const read = policy && policy.read;\n\n if (read) {\n const readOptions = makeFieldFunctionOptions(\n this,\n objectOrReference,\n options,\n context,\n context.store.getStorage(\n isReference(objectOrReference)\n ? objectOrReference.__ref\n : objectOrReference,\n storeFieldName,\n ),\n );\n\n // Call read(existing, readOptions) with cacheSlot holding this.cache.\n return cacheSlot.withValue(\n this.cache,\n read,\n [existing, readOptions],\n ) as SafeReadonly;\n }\n\n return existing;\n }\n\n public getReadFunction(\n typename: string | undefined,\n fieldName: string,\n ): FieldReadFunction | undefined {\n const policy = this.getFieldPolicy(typename, fieldName, false);\n return policy && policy.read;\n }\n\n public getMergeFunction(\n parentTypename: string | undefined,\n fieldName: string,\n childTypename: string | undefined,\n ): FieldMergeFunction | undefined {\n let policy:\n | Policies[\"typePolicies\"][string]\n | Policies[\"typePolicies\"][string][\"fields\"][string]\n | undefined =\n this.getFieldPolicy(parentTypename, fieldName, false);\n let merge = policy && policy.merge;\n if (!merge && childTypename) {\n policy = this.getTypePolicy(childTypename);\n merge = policy && policy.merge;\n }\n return merge;\n }\n\n public runMergeFunction(\n existing: StoreValue,\n incoming: StoreValue,\n { field, typename, merge }: MergeInfo,\n context: WriteContext,\n storage?: StorageType,\n ) {\n if (merge === mergeTrueFn) {\n // Instead of going to the trouble of creating a full\n // FieldFunctionOptions object and calling mergeTrueFn, we can\n // simply call mergeObjects, as mergeTrueFn would.\n return makeMergeObjectsFunction(\n context.store,\n )(existing as StoreObject,\n incoming as StoreObject);\n }\n\n if (merge === mergeFalseFn) {\n // Likewise for mergeFalseFn, whose implementation is even simpler.\n return incoming;\n }\n\n // If cache.writeQuery or cache.writeFragment was called with\n // options.overwrite set to true, we still call merge functions, but\n // the existing data is always undefined, so the merge function will\n // not attempt to combine the incoming data with the existing data.\n if (context.overwrite) {\n existing = void 0;\n }\n\n return merge(existing, incoming, makeFieldFunctionOptions(\n this,\n // Unlike options.readField for read functions, we do not fall\n // back to the current object if no foreignObjOrRef is provided,\n // because it's not clear what the current object should be for\n // merge functions: the (possibly undefined) existing object, or\n // the incoming object? If you think your merge function needs\n // to read sibling fields in order to produce a new value for\n // the current field, you might want to rethink your strategy,\n // because that's a recipe for making merge behavior sensitive\n // to the order in which fields are written into the cache.\n // However, readField(name, ref) is useful for merge functions\n // that need to deduplicate child objects and references.\n void 0,\n { typename,\n fieldName: field.name.value,\n field,\n variables: context.variables },\n context,\n storage || Object.create(null),\n ));\n }\n}\n\nfunction makeFieldFunctionOptions(\n policies: Policies,\n objectOrReference: StoreObject | Reference | undefined,\n fieldSpec: FieldSpecifier,\n context: ReadMergeModifyContext,\n storage: StorageType,\n): FieldFunctionOptions {\n const storeFieldName = policies.getStoreFieldName(fieldSpec);\n const fieldName = fieldNameFromStoreName(storeFieldName);\n const variables = fieldSpec.variables || context.variables;\n const { toReference, canRead } = context.store;\n\n return {\n args: argsFromFieldSpecifier(fieldSpec),\n field: fieldSpec.field || null,\n fieldName,\n storeFieldName,\n variables,\n isReference,\n toReference,\n storage,\n cache: policies.cache,\n canRead,\n readField() {\n return policies.readField(\n normalizeReadFieldOptions(arguments, objectOrReference, variables),\n context,\n );\n },\n mergeObjects: makeMergeObjectsFunction(context.store),\n };\n}\n\nexport function normalizeReadFieldOptions(\n readFieldArgs: IArguments,\n objectOrReference: StoreObject | Reference | undefined,\n variables?: ReadMergeModifyContext[\"variables\"],\n): ReadFieldOptions {\n const {\n 0: fieldNameOrOptions,\n 1: from,\n length: argc,\n } = readFieldArgs;\n\n let options: ReadFieldOptions;\n\n if (typeof fieldNameOrOptions === \"string\") {\n options = {\n fieldName: fieldNameOrOptions,\n // Default to objectOrReference only when no second argument was\n // passed for the from parameter, not when undefined is explicitly\n // passed as the second argument.\n from: argc > 1 ? from : objectOrReference,\n };\n } else {\n options = { ...fieldNameOrOptions };\n // Default to objectOrReference only when fieldNameOrOptions.from is\n // actually omitted, rather than just undefined.\n if (!hasOwn.call(options, \"from\")) {\n options.from = objectOrReference;\n }\n }\n\n if (__DEV__ && options.from === void 0) {\n invariant.warn(`Undefined 'from' passed to readField with arguments ${\n stringifyForDisplay(Array.from(readFieldArgs))\n }`);\n }\n\n if (void 0 === options.variables) {\n options.variables = variables;\n }\n\n return options;\n}\n\nfunction makeMergeObjectsFunction(\n store: NormalizedCache,\n): MergeObjectsFunction {\n return function mergeObjects(existing, incoming) {\n if (isArray(existing) || isArray(incoming)) {\n throw new InvariantError(\"Cannot automatically merge arrays\");\n }\n\n // These dynamic checks are necessary because the parameters of a\n // custom merge function can easily have the any type, so the type\n // system cannot always enforce the StoreObject | Reference parameter\n // types of options.mergeObjects.\n if (isNonNullObject(existing) &&\n isNonNullObject(incoming)) {\n const eType = store.getFieldValue(existing, \"__typename\");\n const iType = store.getFieldValue(incoming, \"__typename\");\n const typesDiffer = eType && iType && eType !== iType;\n\n if (typesDiffer) {\n return incoming;\n }\n\n if (isReference(existing) &&\n storeValueIsStoreObject(incoming)) {\n // Update the normalized EntityStore for the entity identified by\n // existing.__ref, preferring/overwriting any fields contributed by the\n // newer incoming StoreObject.\n store.merge(existing.__ref, incoming);\n return existing;\n }\n\n if (storeValueIsStoreObject(existing) &&\n isReference(incoming)) {\n // Update the normalized EntityStore for the entity identified by\n // incoming.__ref, taking fields from the older existing object only if\n // those fields are not already present in the newer StoreObject\n // identified by incoming.__ref.\n store.merge(existing, incoming.__ref);\n return incoming;\n }\n\n if (storeValueIsStoreObject(existing) &&\n storeValueIsStoreObject(incoming)) {\n return { ...existing, ...incoming };\n }\n }\n\n return incoming;\n };\n}\n","import { makeUniqueId } from \"./makeUniqueId\";\n\nexport function stringifyForDisplay(value: any): string {\n const undefId = makeUniqueId(\"stringifyForDisplay\");\n return JSON.stringify(value, (key, value) => {\n return value === void 0 ? undefId : value;\n }).split(JSON.stringify(undefId)).join(\"\");\n}\n","import { invariant, InvariantError } from '../../utilities/globals';\nimport { equal } from '@wry/equality';\nimport { Trie } from '@wry/trie';\nimport {\n SelectionSetNode,\n FieldNode,\n Kind,\n} from 'graphql';\n\nimport {\n FragmentMap,\n FragmentMapFunction,\n getFragmentFromSelection,\n getDefaultValues,\n getOperationDefinition,\n getTypenameFromResult,\n makeReference,\n isField,\n resultKeyNameFromField,\n StoreValue,\n StoreObject,\n Reference,\n isReference,\n shouldInclude,\n cloneDeep,\n addTypenameToDocument,\n isNonEmptyArray,\n argumentsObjectFromField,\n} from '../../utilities';\n\nimport { NormalizedCache, ReadMergeModifyContext, MergeTree, InMemoryCacheConfig } from './types';\nimport { isArray, makeProcessedFieldsMerger, fieldNameFromStoreName, storeValueIsStoreObject, extractFragmentContext } from './helpers';\nimport { StoreReader } from './readFromStore';\nimport { InMemoryCache } from './inMemoryCache';\nimport { EntityStore } from './entityStore';\nimport { Cache } from '../../core';\nimport { canonicalStringify } from './object-canon';\nimport { normalizeReadFieldOptions } from './policies';\nimport { ReadFieldFunction } from '../core/types/common';\n\nexport interface WriteContext extends ReadMergeModifyContext {\n readonly written: {\n [dataId: string]: SelectionSetNode[];\n };\n readonly fragmentMap: FragmentMap;\n lookupFragment: FragmentMapFunction;\n // General-purpose deep-merge function for use during writes.\n merge(existing: T, incoming: T): T;\n // If true, merge functions will be called with undefined existing data.\n overwrite: boolean;\n incomingById: Map;\n }>;\n // Directive metadata for @client and @defer. We could use a bitfield for this\n // information to save some space, and use that bitfield number as the keys in\n // the context.flavors Map.\n clientOnly: boolean;\n deferred: boolean;\n flavors: Map;\n};\n\ntype FlavorableWriteContext = Pick<\n WriteContext,\n | \"clientOnly\"\n | \"deferred\"\n | \"flavors\"\n>;\n\n// Since there are only four possible combinations of context.clientOnly and\n// context.deferred values, we should need at most four \"flavors\" of any given\n// WriteContext. To avoid creating multiple copies of the same context, we cache\n// the contexts in the context.flavors Map (shared by all flavors) according to\n// their clientOnly and deferred values (always in that order).\nfunction getContextFlavor(\n context: TContext,\n clientOnly: TContext[\"clientOnly\"],\n deferred: TContext[\"deferred\"],\n): TContext {\n const key = `${clientOnly}${deferred}`;\n let flavored = context.flavors.get(key);\n if (!flavored) {\n context.flavors.set(key, flavored = (\n context.clientOnly === clientOnly &&\n context.deferred === deferred\n ) ? context : {\n ...context,\n clientOnly,\n deferred,\n });\n }\n return flavored as TContext;\n}\n\ninterface ProcessSelectionSetOptions {\n dataId?: string,\n result: Record;\n selectionSet: SelectionSetNode;\n context: WriteContext;\n mergeTree: MergeTree;\n}\n\nexport class StoreWriter {\n constructor(\n public readonly cache: InMemoryCache,\n private reader?: StoreReader,\n private fragments?: InMemoryCacheConfig[\"fragments\"],\n ) {}\n\n public writeToStore(store: NormalizedCache, {\n query,\n result,\n dataId,\n variables,\n overwrite,\n }: Cache.WriteOptions): Reference | undefined {\n const operationDefinition = getOperationDefinition(query)!;\n const merger = makeProcessedFieldsMerger();\n\n variables = {\n ...getDefaultValues(operationDefinition),\n ...variables!,\n };\n\n const context: WriteContext = {\n store,\n written: Object.create(null),\n merge(existing: T, incoming: T) {\n return merger.merge(existing, incoming) as T;\n },\n variables,\n varString: canonicalStringify(variables),\n ...extractFragmentContext(query, this.fragments),\n overwrite: !!overwrite,\n incomingById: new Map,\n clientOnly: false,\n deferred: false,\n flavors: new Map,\n };\n\n const ref = this.processSelectionSet({\n result: result || Object.create(null),\n dataId,\n selectionSet: operationDefinition.selectionSet,\n mergeTree: { map: new Map },\n context,\n });\n\n if (!isReference(ref)) {\n throw new InvariantError(`Could not identify object ${JSON.stringify(result)}`);\n }\n\n // So far, the store has not been modified, so now it's time to process\n // context.incomingById and merge those incoming fields into context.store.\n context.incomingById.forEach(({ storeObject, mergeTree, fieldNodeSet }, dataId) => {\n const entityRef = makeReference(dataId);\n\n if (mergeTree && mergeTree.map.size) {\n const applied = this.applyMerges(mergeTree, entityRef, storeObject, context);\n if (isReference(applied)) {\n // Assume References returned by applyMerges have already been merged\n // into the store. See makeMergeObjectsFunction in policies.ts for an\n // example of how this can happen.\n return;\n }\n // Otherwise, applyMerges returned a StoreObject, whose fields we should\n // merge into the store (see store.merge statement below).\n storeObject = applied;\n }\n\n if (__DEV__ && !context.overwrite) {\n const fieldsWithSelectionSets: Record = Object.create(null);\n fieldNodeSet.forEach(field => {\n if (field.selectionSet) {\n fieldsWithSelectionSets[field.name.value] = true;\n }\n });\n\n const hasSelectionSet = (storeFieldName: string) =>\n fieldsWithSelectionSets[\n fieldNameFromStoreName(storeFieldName)\n ] === true;\n\n const hasMergeFunction = (storeFieldName: string) => {\n const childTree = mergeTree && mergeTree.map.get(storeFieldName);\n return Boolean(childTree && childTree.info && childTree.info.merge);\n };\n\n Object.keys(storeObject).forEach(storeFieldName => {\n // If a merge function was defined for this field, trust that it\n // did the right thing about (not) clobbering data. If the field\n // has no selection set, it's a scalar field, so it doesn't need\n // a merge function (even if it's an object, like JSON data).\n if (hasSelectionSet(storeFieldName) &&\n !hasMergeFunction(storeFieldName)) {\n warnAboutDataLoss(\n entityRef,\n storeObject,\n storeFieldName,\n context.store,\n );\n }\n });\n }\n\n store.merge(dataId, storeObject);\n });\n\n // Any IDs written explicitly to the cache will be retained as\n // reachable root IDs for garbage collection purposes. Although this\n // logic includes root IDs like ROOT_QUERY and ROOT_MUTATION, their\n // retainment counts are effectively ignored because cache.gc() always\n // includes them in its root ID set.\n store.retain(ref.__ref);\n\n return ref;\n }\n\n private processSelectionSet({\n dataId,\n result,\n selectionSet,\n context,\n // This object allows processSelectionSet to report useful information\n // to its callers without explicitly returning that information.\n mergeTree,\n }: ProcessSelectionSetOptions): StoreObject | Reference {\n const { policies } = this.cache;\n\n // This variable will be repeatedly updated using context.merge to\n // accumulate all fields that need to be written into the store.\n let incoming: StoreObject = Object.create(null);\n\n // If typename was not passed in, infer it. Note that typename is\n // always passed in for tricky-to-infer cases such as \"Query\" for\n // ROOT_QUERY.\n const typename: string | undefined =\n (dataId && policies.rootTypenamesById[dataId]) ||\n getTypenameFromResult(result, selectionSet, context.fragmentMap) ||\n (dataId && context.store.get(dataId, \"__typename\") as string);\n\n if (\"string\" === typeof typename) {\n incoming.__typename = typename;\n }\n\n // This readField function will be passed as context.readField in the\n // KeyFieldsContext object created within policies.identify (called below).\n // In addition to reading from the existing context.store (thanks to the\n // policies.readField(options, context) line at the very bottom), this\n // version of readField can read from Reference objects that are currently\n // pending in context.incomingById, which is important whenever keyFields\n // need to be extracted from a child object that processSelectionSet has\n // turned into a Reference.\n const readField: ReadFieldFunction = function (this: void) {\n const options = normalizeReadFieldOptions(\n arguments,\n incoming,\n context.variables,\n );\n\n if (isReference(options.from)) {\n const info = context.incomingById.get(options.from.__ref);\n if (info) {\n const result = policies.readField({\n ...options,\n from: info.storeObject\n }, context);\n\n if (result !== void 0) {\n return result;\n }\n }\n }\n\n return policies.readField(options, context);\n };\n\n const fieldNodeSet = new Set();\n\n this.flattenFields(\n selectionSet,\n result,\n // This WriteContext will be the default context value for fields returned\n // by the flattenFields method, but some fields may be assigned a modified\n // context, depending on the presence of @client and other directives.\n context,\n typename,\n ).forEach((context, field) => {\n const resultFieldKey = resultKeyNameFromField(field);\n const value = result[resultFieldKey];\n\n fieldNodeSet.add(field);\n\n if (value !== void 0) {\n const storeFieldName = policies.getStoreFieldName({\n typename,\n fieldName: field.name.value,\n field,\n variables: context.variables,\n });\n\n const childTree = getChildMergeTree(mergeTree, storeFieldName);\n\n let incomingValue = this.processFieldValue(\n value,\n field,\n // Reset context.clientOnly and context.deferred to their default\n // values before processing nested selection sets.\n field.selectionSet\n ? getContextFlavor(context, false, false)\n : context,\n childTree,\n );\n\n // To determine if this field holds a child object with a merge function\n // defined in its type policy (see PR #7070), we need to figure out the\n // child object's __typename.\n let childTypename: string | undefined;\n\n // The field's value can be an object that has a __typename only if the\n // field has a selection set. Otherwise incomingValue is scalar.\n if (field.selectionSet &&\n (isReference(incomingValue) ||\n storeValueIsStoreObject(incomingValue))) {\n childTypename = readField(\"__typename\", incomingValue);\n }\n\n const merge = policies.getMergeFunction(\n typename,\n field.name.value,\n childTypename,\n );\n\n if (merge) {\n childTree.info = {\n // TODO Check compatibility against any existing childTree.field?\n field,\n typename,\n merge,\n };\n } else {\n maybeRecycleChildMergeTree(mergeTree, storeFieldName);\n }\n\n incoming = context.merge(incoming, {\n [storeFieldName]: incomingValue,\n });\n\n } else if (\n __DEV__ &&\n !context.clientOnly &&\n !context.deferred &&\n !addTypenameToDocument.added(field) &&\n // If the field has a read function, it may be a synthetic field or\n // provide a default value, so its absence from the written data should\n // not be cause for alarm.\n !policies.getReadFunction(typename, field.name.value)\n ) {\n invariant.error(`Missing field '${\n resultKeyNameFromField(field)\n }' while writing result ${\n JSON.stringify(result, null, 2)\n }`.substring(0, 1000));\n }\n });\n\n // Identify the result object, even if dataId was already provided,\n // since we always need keyObject below.\n try {\n const [id, keyObject] = policies.identify(result, {\n typename,\n selectionSet,\n fragmentMap: context.fragmentMap,\n storeObject: incoming,\n readField,\n });\n\n // If dataId was not provided, fall back to the id just generated by\n // policies.identify.\n dataId = dataId || id;\n\n // Write any key fields that were used during identification, even if\n // they were not mentioned in the original query.\n if (keyObject) {\n // TODO Reverse the order of the arguments?\n incoming = context.merge(incoming, keyObject);\n }\n } catch (e) {\n // If dataId was provided, tolerate failure of policies.identify.\n if (!dataId) throw e;\n }\n\n if (\"string\" === typeof dataId) {\n const dataRef = makeReference(dataId);\n\n // Avoid processing the same entity object using the same selection\n // set more than once. We use an array instead of a Set since most\n // entity IDs will be written using only one selection set, so the\n // size of this array is likely to be very small, meaning indexOf is\n // likely to be faster than Set.prototype.has.\n const sets = context.written[dataId] || (context.written[dataId] = []);\n if (sets.indexOf(selectionSet) >= 0) return dataRef;\n sets.push(selectionSet);\n\n // If we're about to write a result object into the store, but we\n // happen to know that the exact same (===) result object would be\n // returned if we were to reread the result with the same inputs,\n // then we can skip the rest of the processSelectionSet work for\n // this object, and immediately return a Reference to it.\n if (this.reader && this.reader.isFresh(\n result,\n dataRef,\n selectionSet,\n context,\n )) {\n return dataRef;\n }\n\n const previous = context.incomingById.get(dataId);\n if (previous) {\n previous.storeObject = context.merge(previous.storeObject, incoming);\n previous.mergeTree = mergeMergeTrees(previous.mergeTree, mergeTree);\n fieldNodeSet.forEach(field => previous.fieldNodeSet.add(field));\n } else {\n context.incomingById.set(dataId, {\n storeObject: incoming,\n // Save a reference to mergeTree only if it is not empty, because\n // empty MergeTrees may be recycled by maybeRecycleChildMergeTree and\n // reused for entirely different parts of the result tree.\n mergeTree: mergeTreeIsEmpty(mergeTree) ? void 0 : mergeTree,\n fieldNodeSet,\n });\n }\n\n return dataRef;\n }\n\n return incoming;\n }\n\n private processFieldValue(\n value: any,\n field: FieldNode,\n context: WriteContext,\n mergeTree: MergeTree,\n ): StoreValue {\n if (!field.selectionSet || value === null) {\n // In development, we need to clone scalar values so that they can be\n // safely frozen with maybeDeepFreeze in readFromStore.ts. In production,\n // it's cheaper to store the scalar values directly in the cache.\n return __DEV__ ? cloneDeep(value) : value;\n }\n\n if (isArray(value)) {\n return value.map((item, i) => {\n const value = this.processFieldValue(\n item, field, context, getChildMergeTree(mergeTree, i));\n maybeRecycleChildMergeTree(mergeTree, i);\n return value;\n });\n }\n\n return this.processSelectionSet({\n result: value,\n selectionSet: field.selectionSet,\n context,\n mergeTree,\n });\n }\n\n // Implements https://spec.graphql.org/draft/#sec-Field-Collection, but with\n // some additions for tracking @client and @defer directives.\n private flattenFields>(\n selectionSet: SelectionSetNode,\n result: Record,\n context: TContext,\n typename = getTypenameFromResult(result, selectionSet, context.fragmentMap),\n ): Map {\n const fieldMap = new Map();\n const { policies } = this.cache;\n\n const limitingTrie = new Trie<{\n // Tracks whether (selectionSet, clientOnly, deferred) has been flattened\n // before. The GraphQL specification only uses the fragment name for\n // skipping previously visited fragments, but the top-level fragment\n // selection set corresponds 1:1 with the fagment name (and is slightly\n // easier too work with), and we need to consider clientOnly and deferred\n // values as well, potentially revisiting selection sets that were\n // previously visited with different inherited configurations of those\n // directives.\n visited?: boolean;\n }>(false); // No need for WeakMap, since limitingTrie does not escape.\n\n (function flatten(\n this: void,\n selectionSet: SelectionSetNode,\n inheritedContext: TContext,\n ) {\n const visitedNode = limitingTrie.lookup(\n selectionSet,\n // Because we take inheritedClientOnly and inheritedDeferred into\n // consideration here (in addition to selectionSet), it's possible for\n // the same selection set to be flattened more than once, if it appears\n // in the query with different @client and/or @directive configurations.\n inheritedContext.clientOnly,\n inheritedContext.deferred,\n );\n if (visitedNode.visited) return;\n visitedNode.visited = true;\n\n selectionSet.selections.forEach(selection => {\n if (!shouldInclude(selection, context.variables)) return;\n\n let { clientOnly, deferred } = inheritedContext;\n if (\n // Since the presence of @client or @defer on this field can only\n // cause clientOnly or deferred to become true, we can skip the\n // forEach loop if both clientOnly and deferred are already true.\n !(clientOnly && deferred) &&\n isNonEmptyArray(selection.directives)\n ) {\n selection.directives.forEach(dir => {\n const name = dir.name.value;\n if (name === \"client\") clientOnly = true;\n if (name === \"defer\") {\n const args = argumentsObjectFromField(dir, context.variables);\n // The @defer directive takes an optional args.if boolean\n // argument, similar to @include(if: boolean). Note that\n // @defer(if: false) does not make context.deferred false, but\n // instead behaves as if there was no @defer directive.\n if (!args || (args as { if?: boolean }).if !== false) {\n deferred = true;\n }\n // TODO In the future, we may want to record args.label using\n // context.deferred, if a label is specified.\n }\n });\n }\n\n if (isField(selection)) {\n const existing = fieldMap.get(selection);\n if (existing) {\n // If this field has been visited along another recursive path\n // before, the final context should have clientOnly or deferred set\n // to true only if *all* paths have the directive (hence the &&).\n clientOnly = clientOnly && existing.clientOnly;\n deferred = deferred && existing.deferred;\n }\n\n fieldMap.set(\n selection,\n getContextFlavor(context, clientOnly, deferred),\n );\n\n } else {\n const fragment = getFragmentFromSelection(\n selection,\n context.lookupFragment,\n );\n\n if (!fragment && selection.kind === Kind.FRAGMENT_SPREAD) {\n throw new InvariantError(`No fragment named ${selection.name.value}`);\n }\n\n if (fragment &&\n policies.fragmentMatches(\n fragment, typename, result, context.variables)) {\n\n flatten(\n fragment.selectionSet,\n getContextFlavor(context, clientOnly, deferred),\n );\n }\n }\n });\n })(selectionSet, context);\n\n return fieldMap;\n }\n\n private applyMerges(\n mergeTree: MergeTree,\n existing: StoreValue,\n incoming: T,\n context: WriteContext,\n getStorageArgs?: Parameters,\n ): T | Reference {\n if (mergeTree.map.size && !isReference(incoming)) {\n const e: StoreObject | Reference | undefined = (\n // Items in the same position in different arrays are not\n // necessarily related to each other, so when incoming is an array\n // we process its elements as if there was no existing data.\n !isArray(incoming) &&\n // Likewise, existing must be either a Reference or a StoreObject\n // in order for its fields to be safe to merge with the fields of\n // the incoming object.\n (isReference(existing) || storeValueIsStoreObject(existing))\n ) ? existing : void 0;\n\n // This narrowing is implied by mergeTree.map.size > 0 and\n // !isReference(incoming), though TypeScript understandably cannot\n // hope to infer this type.\n const i = incoming as StoreObject | StoreValue[];\n\n // The options.storage objects provided to read and merge functions\n // are derived from the identity of the parent object plus a\n // sequence of storeFieldName strings/numbers identifying the nested\n // field name path of each field value to be merged.\n if (e && !getStorageArgs) {\n getStorageArgs = [isReference(e) ? e.__ref : e];\n }\n\n // It's possible that applying merge functions to this subtree will\n // not change the incoming data, so this variable tracks the fields\n // that did change, so we can create a new incoming object when (and\n // only when) at least one incoming field has changed. We use a Map\n // to preserve the type of numeric keys.\n let changedFields: Map | undefined;\n\n const getValue = (\n from: typeof e | typeof i,\n name: string | number,\n ): StoreValue => {\n return isArray(from)\n ? (typeof name === \"number\" ? from[name] : void 0)\n : context.store.getFieldValue(from, String(name))\n };\n\n mergeTree.map.forEach((childTree, storeFieldName) => {\n const eVal = getValue(e, storeFieldName);\n const iVal = getValue(i, storeFieldName);\n // If we have no incoming data, leave any existing data untouched.\n if (void 0 === iVal) return;\n if (getStorageArgs) {\n getStorageArgs.push(storeFieldName);\n }\n const aVal = this.applyMerges(\n childTree,\n eVal,\n iVal,\n context,\n getStorageArgs,\n );\n if (aVal !== iVal) {\n changedFields = changedFields || new Map;\n changedFields.set(storeFieldName, aVal);\n }\n if (getStorageArgs) {\n invariant(getStorageArgs.pop() === storeFieldName);\n }\n });\n\n if (changedFields) {\n // Shallow clone i so we can add changed fields to it.\n incoming = (isArray(i) ? i.slice(0) : { ...i }) as T;\n changedFields.forEach((value, name) => {\n (incoming as any)[name] = value;\n });\n }\n }\n\n if (mergeTree.info) {\n return this.cache.policies.runMergeFunction(\n existing,\n incoming,\n mergeTree.info,\n context,\n getStorageArgs && context.store.getStorage(...getStorageArgs),\n );\n }\n\n return incoming;\n }\n}\n\nconst emptyMergeTreePool: MergeTree[] = [];\n\nfunction getChildMergeTree(\n { map }: MergeTree,\n name: string | number,\n): MergeTree {\n if (!map.has(name)) {\n map.set(name, emptyMergeTreePool.pop() || { map: new Map });\n }\n return map.get(name)!;\n}\n\nfunction mergeMergeTrees(\n left: MergeTree | undefined,\n right: MergeTree | undefined,\n): MergeTree {\n if (left === right || !right || mergeTreeIsEmpty(right)) return left!;\n if (!left || mergeTreeIsEmpty(left)) return right;\n\n const info = left.info && right.info ? {\n ...left.info,\n ...right.info,\n } : left.info || right.info;\n\n const needToMergeMaps = left.map.size && right.map.size;\n const map = needToMergeMaps ? new Map :\n left.map.size ? left.map : right.map;\n\n const merged = { info, map };\n\n if (needToMergeMaps) {\n const remainingRightKeys = new Set(right.map.keys());\n\n left.map.forEach((leftTree, key) => {\n merged.map.set(\n key,\n mergeMergeTrees(leftTree, right.map.get(key)),\n );\n remainingRightKeys.delete(key);\n });\n\n remainingRightKeys.forEach(key => {\n merged.map.set(\n key,\n mergeMergeTrees(\n right.map.get(key),\n left.map.get(key),\n ),\n );\n });\n }\n\n return merged;\n}\n\nfunction mergeTreeIsEmpty(tree: MergeTree | undefined): boolean {\n return !tree || !(tree.info || tree.map.size);\n}\n\nfunction maybeRecycleChildMergeTree(\n { map }: MergeTree,\n name: string | number,\n) {\n const childTree = map.get(name);\n if (childTree && mergeTreeIsEmpty(childTree)) {\n emptyMergeTreePool.push(childTree);\n map.delete(name);\n }\n}\n\nconst warnings = new Set();\n\n// Note that this function is unused in production, and thus should be\n// pruned by any well-configured minifier.\nfunction warnAboutDataLoss(\n existingRef: Reference,\n incomingObj: StoreObject,\n storeFieldName: string,\n store: NormalizedCache,\n) {\n const getChild = (objOrRef: StoreObject | Reference): StoreObject | false => {\n const child = store.getFieldValue(objOrRef, storeFieldName);\n return typeof child === \"object\" && child;\n };\n\n const existing = getChild(existingRef);\n if (!existing) return;\n\n const incoming = getChild(incomingObj);\n if (!incoming) return;\n\n // It's always safe to replace a reference, since it refers to data\n // safely stored elsewhere.\n if (isReference(existing)) return;\n\n // If the values are structurally equivalent, we do not need to worry\n // about incoming replacing existing.\n if (equal(existing, incoming)) return;\n\n // If we're replacing every key of the existing object, then the\n // existing data would be overwritten even if the objects were\n // normalized, so warning would not be helpful here.\n if (Object.keys(existing).every(\n key => store.getFieldValue(incoming, key) !== void 0)) {\n return;\n }\n\n const parentType =\n store.getFieldValue(existingRef, \"__typename\") ||\n store.getFieldValue(incomingObj, \"__typename\");\n const fieldName = fieldNameFromStoreName(storeFieldName);\n const typeDotName = `${parentType}.${fieldName}`;\n // Avoid warning more than once for the same type and field name.\n if (warnings.has(typeDotName)) return;\n warnings.add(typeDotName);\n\n const childTypenames: string[] = [];\n // Arrays do not have __typename fields, and always need a custom merge\n // function, even if their elements are normalized entities.\n if (!isArray(existing) &&\n !isArray(incoming)) {\n [existing, incoming].forEach(child => {\n const typename = store.getFieldValue(child, \"__typename\");\n if (typeof typename === \"string\" &&\n !childTypenames.includes(typename)) {\n childTypenames.push(typename);\n }\n });\n }\n\n invariant.warn(\n`Cache data may be lost when replacing the ${fieldName} field of a ${parentType} object.\n\nTo address this problem (which is not a bug in Apollo Client), ${\n childTypenames.length\n ? \"either ensure all objects of type \" +\n childTypenames.join(\" and \") + \" have an ID or a custom merge function, or \"\n : \"\"\n}define a custom merge function for the ${\n typeDotName\n} field, so InMemoryCache can safely merge these objects:\n\n existing: ${JSON.stringify(existing).slice(0, 1000)}\n incoming: ${JSON.stringify(incoming).slice(0, 1000)}\n\nFor more information about these options, please refer to the documentation:\n\n * Ensuring entity objects have IDs: https://go.apollo.dev/c/generating-unique-identifiers\n * Defining custom merge functions: https://go.apollo.dev/c/merging-non-normalized-objects\n`);\n}\n","import { invariant } from '../../utilities/globals';\n\n// Make builtins like Map and Set safe to use with non-extensible objects.\nimport './fixPolyfills';\n\nimport { DocumentNode } from 'graphql';\nimport { OptimisticWrapperFunction, wrap } from 'optimism';\nimport { equal } from '@wry/equality';\n\nimport { ApolloCache } from '../core/cache';\nimport { Cache } from '../core/types/Cache';\nimport { MissingFieldError } from '../core/types/common';\nimport {\n addTypenameToDocument,\n StoreObject,\n Reference,\n isReference,\n} from '../../utilities';\nimport { InMemoryCacheConfig, NormalizedCacheObject } from './types';\nimport { StoreReader } from './readFromStore';\nimport { StoreWriter } from './writeToStore';\nimport { EntityStore, supportsResultCaching } from './entityStore';\nimport { makeVar, forgetCache, recallCache } from './reactiveVars';\nimport { Policies } from './policies';\nimport { hasOwn, normalizeConfig, shouldCanonizeResults } from './helpers';\nimport { canonicalStringify } from './object-canon';\n\ntype BroadcastOptions = Pick<\n Cache.BatchOptions,\n | \"optimistic\"\n | \"onWatchUpdated\"\n>\n\nexport class InMemoryCache extends ApolloCache {\n private data: EntityStore;\n private optimisticData: EntityStore;\n\n protected config: InMemoryCacheConfig;\n private watches = new Set();\n private addTypename: boolean;\n\n private typenameDocumentCache = new Map();\n private storeReader: StoreReader;\n private storeWriter: StoreWriter;\n\n private maybeBroadcastWatch: OptimisticWrapperFunction<\n [Cache.WatchOptions, BroadcastOptions?],\n any,\n [Cache.WatchOptions]>;\n\n // Dynamically imported code can augment existing typePolicies or\n // possibleTypes by calling cache.policies.addTypePolicies or\n // cache.policies.addPossibletypes.\n public readonly policies: Policies;\n\n public readonly makeVar = makeVar;\n\n constructor(config: InMemoryCacheConfig = {}) {\n super();\n this.config = normalizeConfig(config);\n this.addTypename = !!this.config.addTypename;\n\n this.policies = new Policies({\n cache: this,\n dataIdFromObject: this.config.dataIdFromObject,\n possibleTypes: this.config.possibleTypes,\n typePolicies: this.config.typePolicies,\n });\n\n this.init();\n }\n\n private init() {\n // Passing { resultCaching: false } in the InMemoryCache constructor options\n // will completely disable dependency tracking, which will improve memory\n // usage but worsen the performance of repeated reads.\n const rootStore = this.data = new EntityStore.Root({\n policies: this.policies,\n resultCaching: this.config.resultCaching,\n });\n\n // When no optimistic writes are currently active, cache.optimisticData ===\n // cache.data, so there are no additional layers on top of the actual data.\n // When an optimistic update happens, this.optimisticData will become a\n // linked list of EntityStore Layer objects that terminates with the\n // original this.data cache object.\n this.optimisticData = rootStore.stump;\n\n this.resetResultCache();\n }\n\n private resetResultCache(resetResultIdentities?: boolean) {\n const previousReader = this.storeReader;\n const { fragments } = this.config;\n\n // The StoreWriter is mostly stateless and so doesn't really need to be\n // reset, but it does need to have its writer.storeReader reference updated,\n // so it's simpler to update this.storeWriter as well.\n this.storeWriter = new StoreWriter(\n this,\n this.storeReader = new StoreReader({\n cache: this,\n addTypename: this.addTypename,\n resultCacheMaxSize: this.config.resultCacheMaxSize,\n canonizeResults: shouldCanonizeResults(this.config),\n canon: resetResultIdentities\n ? void 0\n : previousReader && previousReader.canon,\n fragments,\n }),\n fragments,\n );\n\n this.maybeBroadcastWatch = wrap((\n c: Cache.WatchOptions,\n options?: BroadcastOptions,\n ) => {\n return this.broadcastWatch(c, options);\n }, {\n max: this.config.resultCacheMaxSize,\n makeCacheKey: (c: Cache.WatchOptions) => {\n // Return a cache key (thus enabling result caching) only if we're\n // currently using a data store that can track cache dependencies.\n const store = c.optimistic ? this.optimisticData : this.data;\n if (supportsResultCaching(store)) {\n const { optimistic, id, variables } = c;\n return store.makeCacheKey(\n c.query,\n // Different watches can have the same query, optimistic\n // status, rootId, and variables, but if their callbacks are\n // different, the (identical) result needs to be delivered to\n // each distinct callback. The easiest way to achieve that\n // separation is to include c.callback in the cache key for\n // maybeBroadcastWatch calls. See issue #5733.\n c.callback,\n canonicalStringify({ optimistic, id, variables }),\n );\n }\n }\n });\n\n // Since we have thrown away all the cached functions that depend on the\n // CacheGroup dependencies maintained by EntityStore, we should also reset\n // all CacheGroup dependency information.\n new Set([\n this.data.group,\n this.optimisticData.group,\n ]).forEach(group => group.resetCaching());\n }\n\n public restore(data: NormalizedCacheObject): this {\n this.init();\n // Since calling this.init() discards/replaces the entire StoreReader, along\n // with the result caches it maintains, this.data.replace(data) won't have\n // to bother deleting the old data.\n if (data) this.data.replace(data);\n return this;\n }\n\n public extract(optimistic: boolean = false): NormalizedCacheObject {\n return (optimistic ? this.optimisticData : this.data).extract();\n }\n\n public read(options: Cache.ReadOptions): T | null {\n const {\n // Since read returns data or null, without any additional metadata\n // about whether/where there might have been missing fields, the\n // default behavior cannot be returnPartialData = true (like it is\n // for the diff method), since defaulting to true would violate the\n // integrity of the T in the return type. However, partial data may\n // be useful in some cases, so returnPartialData:true may be\n // specified explicitly.\n returnPartialData = false,\n } = options;\n try {\n return this.storeReader.diffQueryAgainstStore({\n ...options,\n store: options.optimistic ? this.optimisticData : this.data,\n config: this.config,\n returnPartialData,\n }).result || null;\n } catch (e) {\n if (e instanceof MissingFieldError) {\n // Swallow MissingFieldError and return null, so callers do not need to\n // worry about catching \"normal\" exceptions resulting from incomplete\n // cache data. Unexpected errors will be re-thrown. If you need more\n // information about which fields were missing, use cache.diff instead,\n // and examine diffResult.missing.\n return null;\n }\n throw e;\n }\n }\n\n public write(options: Cache.WriteOptions): Reference | undefined {\n try {\n ++this.txCount;\n return this.storeWriter.writeToStore(this.data, options);\n } finally {\n if (!--this.txCount && options.broadcast !== false) {\n this.broadcastWatches();\n }\n }\n }\n\n public modify(options: Cache.ModifyOptions): boolean {\n if (hasOwn.call(options, \"id\") && !options.id) {\n // To my knowledge, TypeScript does not currently provide a way to\n // enforce that an optional property?:type must *not* be undefined\n // when present. That ability would be useful here, because we want\n // options.id to default to ROOT_QUERY only when no options.id was\n // provided. If the caller attempts to pass options.id with a\n // falsy/undefined value (perhaps because cache.identify failed), we\n // should not assume the goal was to modify the ROOT_QUERY object.\n // We could throw, but it seems natural to return false to indicate\n // that nothing was modified.\n return false;\n }\n const store = options.optimistic // Defaults to false.\n ? this.optimisticData\n : this.data;\n try {\n ++this.txCount;\n return store.modify(options.id || \"ROOT_QUERY\", options.fields);\n } finally {\n if (!--this.txCount && options.broadcast !== false) {\n this.broadcastWatches();\n }\n }\n }\n\n public diff(\n options: Cache.DiffOptions,\n ): Cache.DiffResult {\n return this.storeReader.diffQueryAgainstStore({\n ...options,\n store: options.optimistic ? this.optimisticData : this.data,\n rootId: options.id || \"ROOT_QUERY\",\n config: this.config,\n });\n }\n\n public watch(\n watch: Cache.WatchOptions,\n ): () => void {\n if (!this.watches.size) {\n // In case we previously called forgetCache(this) because\n // this.watches became empty (see below), reattach this cache to any\n // reactive variables on which it previously depended. It might seem\n // paradoxical that we're able to recall something we supposedly\n // forgot, but the point of calling forgetCache(this) is to silence\n // useless broadcasts while this.watches is empty, and to allow the\n // cache to be garbage collected. If, however, we manage to call\n // recallCache(this) here, this cache object must not have been\n // garbage collected yet, and should resume receiving updates from\n // reactive variables, now that it has a watcher to notify.\n recallCache(this);\n }\n this.watches.add(watch);\n if (watch.immediate) {\n this.maybeBroadcastWatch(watch);\n }\n return () => {\n // Once we remove the last watch from this.watches, cache.broadcastWatches\n // no longer does anything, so we preemptively tell the reactive variable\n // system to exclude this cache from future broadcasts.\n if (this.watches.delete(watch) && !this.watches.size) {\n forgetCache(this);\n }\n // Remove this watch from the LRU cache managed by the\n // maybeBroadcastWatch OptimisticWrapperFunction, to prevent memory\n // leaks involving the closure of watch.callback.\n this.maybeBroadcastWatch.forget(watch);\n };\n }\n\n public gc(options?: {\n // If true, also free non-essential result cache memory by bulk-releasing\n // this.{store{Reader,Writer},maybeBroadcastWatch}. Defaults to false.\n resetResultCache?: boolean;\n // If resetResultCache is true, this.storeReader.canon will be preserved by\n // default, but can also be discarded by passing resetResultIdentities:true.\n // Defaults to false.\n resetResultIdentities?: boolean;\n }) {\n canonicalStringify.reset();\n const ids = this.optimisticData.gc();\n if (options && !this.txCount) {\n if (options.resetResultCache) {\n this.resetResultCache(options.resetResultIdentities);\n } else if (options.resetResultIdentities) {\n this.storeReader.resetCanon();\n }\n }\n return ids;\n }\n\n // Call this method to ensure the given root ID remains in the cache after\n // garbage collection, along with its transitive child entities. Note that\n // the cache automatically retains all directly written entities. By default,\n // the retainment persists after optimistic updates are removed. Pass true\n // for the optimistic argument if you would prefer for the retainment to be\n // discarded when the top-most optimistic layer is removed. Returns the\n // resulting (non-negative) retainment count.\n public retain(rootId: string, optimistic?: boolean): number {\n return (optimistic ? this.optimisticData : this.data).retain(rootId);\n }\n\n // Call this method to undo the effect of the retain method, above. Once the\n // retainment count falls to zero, the given ID will no longer be preserved\n // during garbage collection, though it may still be preserved by other safe\n // entities that refer to it. Returns the resulting (non-negative) retainment\n // count, in case that's useful.\n public release(rootId: string, optimistic?: boolean): number {\n return (optimistic ? this.optimisticData : this.data).release(rootId);\n }\n\n // Returns the canonical ID for a given StoreObject, obeying typePolicies\n // and keyFields (and dataIdFromObject, if you still use that). At minimum,\n // the object must contain a __typename and any primary key fields required\n // to identify entities of that type. If you pass a query result object, be\n // sure that none of the primary key fields have been renamed by aliasing.\n // If you pass a Reference object, its __ref ID string will be returned.\n public identify(object: StoreObject | Reference): string | undefined {\n if (isReference(object)) return object.__ref;\n try {\n return this.policies.identify(object)[0];\n } catch (e) {\n invariant.warn(e);\n }\n }\n\n public evict(options: Cache.EvictOptions): boolean {\n if (!options.id) {\n if (hasOwn.call(options, \"id\")) {\n // See comment in modify method about why we return false when\n // options.id exists but is falsy/undefined.\n return false;\n }\n options = { ...options, id: \"ROOT_QUERY\" };\n }\n try {\n // It's unlikely that the eviction will end up invoking any other\n // cache update operations while it's running, but {in,de}crementing\n // this.txCount still seems like a good idea, for uniformity with\n // the other update methods.\n ++this.txCount;\n // Pass this.data as a limit on the depth of the eviction, so evictions\n // during optimistic updates (when this.data is temporarily set equal to\n // this.optimisticData) do not escape their optimistic Layer.\n return this.optimisticData.evict(options, this.data);\n } finally {\n if (!--this.txCount && options.broadcast !== false) {\n this.broadcastWatches();\n }\n }\n }\n\n public reset(options?: Cache.ResetOptions): Promise {\n this.init();\n\n canonicalStringify.reset();\n\n if (options && options.discardWatches) {\n // Similar to what happens in the unsubscribe function returned by\n // cache.watch, applied to all current watches.\n this.watches.forEach(watch => this.maybeBroadcastWatch.forget(watch));\n this.watches.clear();\n forgetCache(this);\n } else {\n // Calling this.init() above unblocks all maybeBroadcastWatch caching, so\n // this.broadcastWatches() triggers a broadcast to every current watcher\n // (letting them know their data is now missing). This default behavior is\n // convenient because it means the watches do not have to be manually\n // reestablished after resetting the cache. To prevent this broadcast and\n // cancel all watches, pass true for options.discardWatches.\n this.broadcastWatches();\n }\n\n return Promise.resolve();\n }\n\n public removeOptimistic(idToRemove: string) {\n const newOptimisticData = this.optimisticData.removeLayer(idToRemove);\n if (newOptimisticData !== this.optimisticData) {\n this.optimisticData = newOptimisticData;\n this.broadcastWatches();\n }\n }\n\n private txCount = 0;\n\n public batch(\n options: Cache.BatchOptions,\n ): TUpdateResult {\n const {\n update,\n optimistic = true,\n removeOptimistic,\n onWatchUpdated,\n } = options;\n\n let updateResult: TUpdateResult;\n const perform = (layer?: EntityStore): TUpdateResult => {\n const { data, optimisticData } = this;\n ++this.txCount;\n if (layer) {\n this.data = this.optimisticData = layer;\n }\n try {\n return updateResult = update(this);\n } finally {\n --this.txCount;\n this.data = data;\n this.optimisticData = optimisticData;\n }\n };\n\n const alreadyDirty = new Set();\n\n if (onWatchUpdated && !this.txCount) {\n // If an options.onWatchUpdated callback is provided, we want to call it\n // with only the Cache.WatchOptions objects affected by options.update,\n // but there might be dirty watchers already waiting to be broadcast that\n // have nothing to do with the update. To prevent including those watchers\n // in the post-update broadcast, we perform this initial broadcast to\n // collect the dirty watchers, so we can re-dirty them later, after the\n // post-update broadcast, allowing them to receive their pending\n // broadcasts the next time broadcastWatches is called, just as they would\n // if we never called cache.batch.\n this.broadcastWatches({\n ...options,\n onWatchUpdated(watch) {\n alreadyDirty.add(watch);\n return false;\n },\n });\n }\n\n if (typeof optimistic === 'string') {\n // Note that there can be multiple layers with the same optimistic ID.\n // When removeOptimistic(id) is called for that id, all matching layers\n // will be removed, and the remaining layers will be reapplied.\n this.optimisticData = this.optimisticData.addLayer(optimistic, perform);\n } else if (optimistic === false) {\n // Ensure both this.data and this.optimisticData refer to the root\n // (non-optimistic) layer of the cache during the update. Note that\n // this.data could be a Layer if we are currently executing an optimistic\n // update function, but otherwise will always be an EntityStore.Root\n // instance.\n perform(this.data);\n } else {\n // Otherwise, leave this.data and this.optimisticData unchanged and run\n // the update with broadcast batching.\n perform();\n }\n\n if (typeof removeOptimistic === \"string\") {\n this.optimisticData = this.optimisticData.removeLayer(removeOptimistic);\n }\n\n // Note: if this.txCount > 0, then alreadyDirty.size === 0, so this code\n // takes the else branch and calls this.broadcastWatches(options), which\n // does nothing when this.txCount > 0.\n if (onWatchUpdated && alreadyDirty.size) {\n this.broadcastWatches({\n ...options,\n onWatchUpdated(watch, diff) {\n const result = onWatchUpdated.call(this, watch, diff);\n if (result !== false) {\n // Since onWatchUpdated did not return false, this diff is\n // about to be broadcast to watch.callback, so we don't need\n // to re-dirty it with the other alreadyDirty watches below.\n alreadyDirty.delete(watch);\n }\n return result;\n }\n });\n // Silently re-dirty any watches that were already dirty before the update\n // was performed, and were not broadcast just now.\n if (alreadyDirty.size) {\n alreadyDirty.forEach(watch => this.maybeBroadcastWatch.dirty(watch));\n }\n } else {\n // If alreadyDirty is empty or we don't have an onWatchUpdated\n // function, we don't need to go to the trouble of wrapping\n // options.onWatchUpdated.\n this.broadcastWatches(options);\n }\n\n return updateResult!;\n }\n\n public performTransaction(\n update: (cache: InMemoryCache) => any,\n optimisticId?: string | null,\n ) {\n return this.batch({\n update,\n optimistic: optimisticId || (optimisticId !== null),\n });\n }\n\n public transformDocument(document: DocumentNode): DocumentNode {\n if (this.addTypename) {\n let result = this.typenameDocumentCache.get(document);\n if (!result) {\n result = addTypenameToDocument(document);\n this.typenameDocumentCache.set(document, result);\n // If someone calls transformDocument and then mistakenly passes the\n // result back into an API that also calls transformDocument, make sure\n // we don't keep creating new query documents.\n this.typenameDocumentCache.set(result, result);\n }\n return result;\n }\n return document;\n }\n\n public transformForLink(document: DocumentNode): DocumentNode {\n const { fragments } = this.config;\n return fragments\n ? fragments.transform(document)\n : document;\n }\n\n protected broadcastWatches(options?: BroadcastOptions) {\n if (!this.txCount) {\n this.watches.forEach(c => this.maybeBroadcastWatch(c, options));\n }\n }\n\n // This method is wrapped by maybeBroadcastWatch, which is called by\n // broadcastWatches, so that we compute and broadcast results only when\n // the data that would be broadcast might have changed. It would be\n // simpler to check for changes after recomputing a result but before\n // broadcasting it, but this wrapping approach allows us to skip both\n // the recomputation and the broadcast, in most cases.\n private broadcastWatch(\n c: Cache.WatchOptions,\n options?: BroadcastOptions,\n ) {\n const { lastDiff } = c;\n\n // Both WatchOptions and DiffOptions extend ReadOptions, and DiffOptions\n // currently requires no additional properties, so we can use c (a\n // WatchOptions object) as DiffOptions, without having to allocate a new\n // object, and without having to enumerate the relevant properties (query,\n // variables, etc.) explicitly. There will be some additional properties\n // (lastDiff, callback, etc.), but cache.diff ignores them.\n const diff = this.diff(c);\n\n if (options) {\n if (c.optimistic &&\n typeof options.optimistic === \"string\") {\n diff.fromOptimisticTransaction = true;\n }\n\n if (options.onWatchUpdated &&\n options.onWatchUpdated.call(this, c, diff, lastDiff) === false) {\n // Returning false from the onWatchUpdated callback will prevent\n // calling c.callback(diff) for this watcher.\n return;\n }\n }\n\n if (!lastDiff || !equal(lastDiff.result, diff.result)) {\n c.callback(c.lastDiff = diff, lastDiff);\n }\n }\n}\n","import { invariant } from '../../utilities/globals';\nimport * as React from 'react';\n\nimport { canUseLayoutEffect } from '../../utilities';\n\nlet didWarnUncachedGetSnapshot = false;\n\ntype RealUseSESHookType =\n // This import depends only on the @types/use-sync-external-store package, not\n // the actual use-sync-external-store package, which is not installed. It\n // might be nice to get this type from React 18, but it still needs to work\n // when only React 17 or earlier is installed.\n typeof import(\"use-sync-external-store\").useSyncExternalStore;\n\n// Prevent webpack from complaining about our feature detection of the\n// useSyncExternalStore property of the React namespace, which is expected not\n// to exist when using React 17 and earlier, and that's fine.\nconst uSESKey = \"useSyncExternalStore\" as keyof typeof React;\nconst realHook = React[uSESKey] as RealUseSESHookType | undefined;\n\n// Adapted from https://www.npmjs.com/package/use-sync-external-store, with\n// Apollo Client deviations called out by \"// DEVIATION ...\" comments.\n\n// When/if React.useSyncExternalStore is defined, delegate fully to it.\nexport const useSyncExternalStore: RealUseSESHookType = realHook || ((\n subscribe,\n getSnapshot,\n getServerSnapshot,\n) => {\n // Read the current snapshot from the store on every render. Again, this\n // breaks the rules of React, and only works here because of specific\n // implementation details, most importantly that updates are\n // always synchronous.\n const value = getSnapshot();\n if (\n // DEVIATION: Using our own __DEV__ polyfill (from ../../utilities/globals).\n __DEV__ &&\n !didWarnUncachedGetSnapshot &&\n // DEVIATION: Not using Object.is because we know our snapshots will never\n // be exotic primitive values like NaN, which is !== itself.\n value !== getSnapshot()\n ) {\n didWarnUncachedGetSnapshot = true;\n // DEVIATION: Using invariant.error instead of console.error directly.\n invariant.error(\n 'The result of getSnapshot should be cached to avoid an infinite loop',\n );\n }\n\n // Because updates are synchronous, we don't queue them. Instead we force a\n // re-render whenever the subscribed state changes by updating an some\n // arbitrary useState hook. Then, during render, we call getSnapshot to read\n // the current value.\n //\n // Because we don't actually use the state returned by the useState hook, we\n // can save a bit of memory by storing other stuff in that slot.\n //\n // To implement the early bailout, we need to track some things on a mutable\n // object. Usually, we would put that in a useRef hook, but we can stash it in\n // our useState hook instead.\n //\n // To force a re-render, we call forceUpdate({inst}). That works because the\n // new object always fails an equality check.\n const [{inst}, forceUpdate] = React.useState({inst: {value, getSnapshot}});\n\n // Track the latest getSnapshot function with a ref. This needs to be updated\n // in the layout phase so we can access it during the tearing check that\n // happens on subscribe.\n if (canUseLayoutEffect) {\n // DEVIATION: We avoid calling useLayoutEffect when !canUseLayoutEffect,\n // which may seem like a conditional hook, but this code ends up behaving\n // unconditionally (one way or the other) because canUseLayoutEffect is\n // constant.\n React.useLayoutEffect(() => {\n Object.assign(inst, { value, getSnapshot });\n // Whenever getSnapshot or subscribe changes, we need to check in the\n // commit phase if there was an interleaved mutation. In concurrent mode\n // this can happen all the time, but even in synchronous mode, an earlier\n // effect may have mutated the store.\n if (checkIfSnapshotChanged(inst)) {\n // Force a re-render.\n forceUpdate({inst});\n }\n }, [subscribe, value, getSnapshot]);\n } else {\n Object.assign(inst, { value, getSnapshot });\n }\n\n React.useEffect(() => {\n // Check for changes right before subscribing. Subsequent changes will be\n // detected in the subscription handler.\n if (checkIfSnapshotChanged(inst)) {\n // Force a re-render.\n forceUpdate({inst});\n }\n\n // Subscribe to the store and return a clean-up function.\n return subscribe(function handleStoreChange() {\n // TODO: Because there is no cross-renderer API for batching updates, it's\n // up to the consumer of this library to wrap their subscription event\n // with unstable_batchedUpdates. Should we try to detect when this isn't\n // the case and print a warning in development?\n\n // The store changed. Check if the snapshot changed since the last time we\n // read from the store.\n if (checkIfSnapshotChanged(inst)) {\n // Force a re-render.\n forceUpdate({inst});\n }\n });\n }, [subscribe]);\n\n return value;\n});\n\nfunction checkIfSnapshotChanged({\n value,\n getSnapshot,\n}: {\n value: Snapshot;\n getSnapshot: () => Snapshot;\n}): boolean {\n try {\n return value !== getSnapshot();\n } catch {\n return true;\n }\n}\n","import { invariant } from '../../utilities/globals';\n\nimport {\n useCallback,\n useContext,\n useMemo,\n useRef,\n useState,\n} from 'react';\nimport { useSyncExternalStore } from './useSyncExternalStore';\nimport { equal } from '@wry/equality';\n\nimport { mergeOptions, OperationVariables, WatchQueryFetchPolicy } from '../../core';\nimport { ApolloContextValue, getApolloContext } from '../context';\nimport { ApolloError } from '../../errors';\nimport {\n ApolloClient,\n ApolloQueryResult,\n NetworkStatus,\n ObservableQuery,\n DocumentNode,\n TypedDocumentNode,\n WatchQueryOptions,\n} from '../../core';\nimport {\n QueryHookOptions,\n QueryResult,\n ObservableQueryFields,\n} from '../types/types';\n\nimport { DocumentType, verifyDocumentType } from '../parser';\nimport { useApolloClient } from './useApolloClient';\nimport { canUseWeakMap, canUseWeakSet, compact, isNonEmptyArray, maybeDeepFreeze } from '../../utilities';\n\nconst {\n prototype: {\n hasOwnProperty,\n },\n} = Object;\n\nexport function useQuery<\n TData = any,\n TVariables = OperationVariables,\n>(\n query: DocumentNode | TypedDocumentNode,\n options: QueryHookOptions = Object.create(null),\n): QueryResult {\n return useInternalState(\n useApolloClient(options.client),\n query,\n ).useQuery(options);\n}\n\nexport function useInternalState(\n client: ApolloClient,\n query: DocumentNode | TypedDocumentNode,\n): InternalState {\n const stateRef = useRef>();\n if (\n !stateRef.current ||\n client !== stateRef.current.client ||\n query !== stateRef.current.query\n ) {\n stateRef.current = new InternalState(client, query, stateRef.current);\n }\n const state = stateRef.current;\n\n // By default, InternalState.prototype.forceUpdate is an empty function, but\n // we replace it here (before anyone has had a chance to see this state yet)\n // with a function that unconditionally forces an update, using the latest\n // setTick function. Updating this state by calling state.forceUpdate is the\n // only way we trigger React component updates (no other useState calls within\n // the InternalState class).\n const [_tick, setTick] = useState(0);\n state.forceUpdate = () => {\n setTick(tick => tick + 1);\n };\n\n return state;\n}\n\nclass InternalState {\n constructor(\n public readonly client: ReturnType,\n public readonly query: DocumentNode | TypedDocumentNode,\n previous?: InternalState,\n ) {\n verifyDocumentType(query, DocumentType.Query);\n\n // Reuse previousData from previous InternalState (if any) to provide\n // continuity of previousData even if/when the query or client changes.\n const previousResult = previous && previous.result;\n const previousData = previousResult && previousResult.data;\n if (previousData) {\n this.previousData = previousData;\n }\n }\n\n forceUpdate() {\n // Replaced (in useInternalState) with a method that triggers an update.\n invariant.warn(\"Calling default no-op implementation of InternalState#forceUpdate\");\n }\n\n asyncUpdate() {\n return new Promise>(resolve => {\n this.asyncResolveFns.add(resolve);\n this.optionsToIgnoreOnce.add(this.watchQueryOptions);\n this.forceUpdate();\n });\n }\n\n private asyncResolveFns = new Set<\n (result: QueryResult) => void\n >();\n\n private optionsToIgnoreOnce = new (canUseWeakSet ? WeakSet : Set)<\n WatchQueryOptions\n >();\n\n // Methods beginning with use- should be called according to the standard\n // rules of React hooks: only at the top level of the calling function, and\n // without any dynamic conditional logic.\n useQuery(options: QueryHookOptions) {\n // The renderPromises field gets initialized here in the useQuery method, at\n // the beginning of everything (for a given component rendering, at least),\n // so we can safely use this.renderPromises in other/later InternalState\n // methods without worrying it might be uninitialized. Even after\n // initialization, this.renderPromises is usually undefined (unless SSR is\n // happening), but that's fine as long as it has been initialized that way,\n // rather than left uninitialized.\n this.renderPromises = useContext(getApolloContext()).renderPromises;\n\n this.useOptions(options);\n\n const obsQuery = this.useObservableQuery();\n\n const result = useSyncExternalStore(\n useCallback(() => {\n if (this.renderPromises) {\n return () => {};\n }\n\n const onNext = () => {\n const previousResult = this.result;\n // We use `getCurrentResult()` instead of the onNext argument because\n // the values differ slightly. Specifically, loading results will have\n // an empty object for data instead of `undefined` for some reason.\n const result = obsQuery.getCurrentResult();\n // Make sure we're not attempting to re-render similar results\n if (\n previousResult &&\n previousResult.loading === result.loading &&\n previousResult.networkStatus === result.networkStatus &&\n equal(previousResult.data, result.data)\n ) {\n return;\n }\n\n this.setResult(result);\n };\n\n const onError = (error: Error) => {\n const last = obsQuery[\"last\"];\n subscription.unsubscribe();\n // Unfortunately, if `lastError` is set in the current\n // `observableQuery` when the subscription is re-created,\n // the subscription will immediately receive the error, which will\n // cause it to terminate again. To avoid this, we first clear\n // the last error/result from the `observableQuery` before re-starting\n // the subscription, and restore it afterwards (so the subscription\n // has a chance to stay open).\n try {\n obsQuery.resetLastResults();\n subscription = obsQuery.subscribe(onNext, onError);\n } finally {\n obsQuery[\"last\"] = last;\n }\n\n if (!hasOwnProperty.call(error, 'graphQLErrors')) {\n // The error is not a GraphQL error\n throw error;\n }\n\n const previousResult = this.result;\n if (\n !previousResult ||\n (previousResult && previousResult.loading) ||\n !equal(error, previousResult.error)\n ) {\n this.setResult({\n data: (previousResult && previousResult.data) as TData,\n error: error as ApolloError,\n loading: false,\n networkStatus: NetworkStatus.error,\n });\n }\n };\n\n let subscription = obsQuery.subscribe(onNext, onError);\n\n return () => subscription.unsubscribe();\n }, [\n // We memoize the subscribe function using useCallback and the following\n // dependency keys, because the subscribe function reference is all that\n // useSyncExternalStore uses internally as a dependency key for the\n // useEffect ultimately responsible for the subscription, so we are\n // effectively passing this dependency array to that useEffect buried\n // inside useSyncExternalStore, as desired.\n obsQuery,\n this.renderPromises,\n this.client.disableNetworkFetches,\n ]),\n\n () => this.getCurrentResult(),\n () => this.getCurrentResult(),\n );\n\n // TODO Remove this method when we remove support for options.partialRefetch.\n this.unsafeHandlePartialRefetch(result);\n\n const queryResult = this.toQueryResult(result);\n\n if (!queryResult.loading && this.asyncResolveFns.size) {\n this.asyncResolveFns.forEach(resolve => resolve(queryResult));\n this.asyncResolveFns.clear();\n }\n\n return queryResult;\n }\n\n // These members (except for renderPromises) are all populated by the\n // useOptions method, which is called unconditionally at the beginning of the\n // useQuery method, so we can safely use these members in other/later methods\n // without worrying they might be uninitialized.\n private renderPromises: ApolloContextValue[\"renderPromises\"];\n private queryHookOptions: QueryHookOptions;\n private watchQueryOptions: WatchQueryOptions;\n\n private useOptions(\n options: QueryHookOptions,\n ) {\n const watchQueryOptions = this.createWatchQueryOptions(\n this.queryHookOptions = options,\n );\n\n // Update this.watchQueryOptions, but only when they have changed, which\n // allows us to depend on the referential stability of\n // this.watchQueryOptions elsewhere.\n const currentWatchQueryOptions = this.watchQueryOptions;\n\n // To force this equality test to \"fail,\" thereby reliably triggering\n // observable.reobserve, add any current WatchQueryOptions object(s) you\n // want to be ignored to this.optionsToIgnoreOnce. A similar effect could be\n // achieved by nullifying this.watchQueryOptions so the equality test\n // immediately fails because currentWatchQueryOptions is null, but this way\n // we can promise a truthy this.watchQueryOptions at all times.\n if (\n this.optionsToIgnoreOnce.has(currentWatchQueryOptions) ||\n !equal(watchQueryOptions, currentWatchQueryOptions)\n ) {\n this.watchQueryOptions = watchQueryOptions;\n\n if (currentWatchQueryOptions && this.observable) {\n // As advertised in the -Once of this.optionsToIgnoreOnce, this trick is\n // only good for one forced execution of observable.reobserve per\n // ignored WatchQueryOptions object, though it is unlikely we will ever\n // see this exact currentWatchQueryOptions object again here, since we\n // just replaced this.watchQueryOptions with watchQueryOptions.\n this.optionsToIgnoreOnce.delete(currentWatchQueryOptions);\n\n // Though it might be tempting to postpone this reobserve call to the\n // useEffect block, we need getCurrentResult to return an appropriate\n // loading:true result synchronously (later within the same call to\n // useQuery). Since we already have this.observable here (not true for\n // the very first call to useQuery), we are not initiating any new\n // subscriptions, though it does feel less than ideal that reobserve\n // (potentially) kicks off a network request (for example, when the\n // variables have changed), which is technically a side-effect.\n this.observable.reobserve(this.getObsQueryOptions());\n\n // Make sure getCurrentResult returns a fresh ApolloQueryResult,\n // but save the current data as this.previousData, just like setResult\n // usually does.\n this.previousData = this.result?.data || this.previousData;\n this.result = void 0;\n }\n }\n\n // Make sure state.onCompleted and state.onError always reflect the latest\n // options.onCompleted and options.onError callbacks provided to useQuery,\n // since those functions are often recreated every time useQuery is called.\n // Like the forceUpdate method, the versions of these methods inherited from\n // InternalState.prototype are empty no-ops, but we can override them on the\n // base state object (without modifying the prototype).\n this.onCompleted = options.onCompleted || InternalState.prototype.onCompleted;\n this.onError = options.onError || InternalState.prototype.onError;\n\n if (\n (this.renderPromises || this.client.disableNetworkFetches) &&\n this.queryHookOptions.ssr === false &&\n !this.queryHookOptions.skip\n ) {\n // If SSR has been explicitly disabled, and this function has been called\n // on the server side, return the default loading state.\n this.result = this.ssrDisabledResult;\n } else if (\n this.queryHookOptions.skip ||\n this.watchQueryOptions.fetchPolicy === 'standby'\n ) {\n // When skipping a query (ie. we're not querying for data but still want to\n // render children), make sure the `data` is cleared out and `loading` is\n // set to `false` (since we aren't loading anything).\n //\n // NOTE: We no longer think this is the correct behavior. Skipping should\n // not automatically set `data` to `undefined`, but instead leave the\n // previous data in place. In other words, skipping should not mandate that\n // previously received data is all of a sudden removed. Unfortunately,\n // changing this is breaking, so we'll have to wait until Apollo Client 4.0\n // to address this.\n this.result = this.skipStandbyResult;\n } else if (\n this.result === this.ssrDisabledResult ||\n this.result === this.skipStandbyResult\n ) {\n this.result = void 0;\n }\n }\n\n private getObsQueryOptions(): WatchQueryOptions {\n const toMerge: Array<\n Partial>\n > = [];\n\n const globalDefaults = this.client.defaultOptions.watchQuery;\n if (globalDefaults) toMerge.push(globalDefaults);\n\n if (this.queryHookOptions.defaultOptions) {\n toMerge.push(this.queryHookOptions.defaultOptions);\n }\n\n // We use compact rather than mergeOptions for this part of the merge,\n // because we want watchQueryOptions.variables (if defined) to replace\n // this.observable.options.variables whole. This replacement allows\n // removing variables by removing them from the variables input to\n // useQuery. If the variables were always merged together (rather than\n // replaced), there would be no way to remove existing variables.\n // However, the variables from options.defaultOptions and globalDefaults\n // (if provided) should be merged, to ensure individual defaulted\n // variables always have values, if not otherwise defined in\n // observable.options or watchQueryOptions.\n toMerge.push(compact(\n this.observable && this.observable.options,\n this.watchQueryOptions,\n ));\n\n return toMerge.reduce(\n mergeOptions\n ) as WatchQueryOptions;\n }\n\n private ssrDisabledResult = maybeDeepFreeze({\n loading: true,\n data: void 0 as unknown as TData,\n error: void 0,\n networkStatus: NetworkStatus.loading,\n });\n\n private skipStandbyResult = maybeDeepFreeze({\n loading: false,\n data: void 0 as unknown as TData,\n error: void 0,\n networkStatus: NetworkStatus.ready,\n });\n\n // A function to massage options before passing them to ObservableQuery.\n private createWatchQueryOptions({\n skip,\n ssr,\n onCompleted,\n onError,\n defaultOptions,\n // The above options are useQuery-specific, so this ...otherOptions spread\n // makes otherOptions almost a WatchQueryOptions object, except for the\n // query property that we add below.\n ...otherOptions\n }: QueryHookOptions = {}): WatchQueryOptions {\n // This Object.assign is safe because otherOptions is a fresh ...rest object\n // that did not exist until just now, so modifications are still allowed.\n const watchQueryOptions: WatchQueryOptions =\n Object.assign(otherOptions, { query: this.query });\n\n if (\n this.renderPromises &&\n (\n watchQueryOptions.fetchPolicy === 'network-only' ||\n watchQueryOptions.fetchPolicy === 'cache-and-network'\n )\n ) {\n // this behavior was added to react-apollo without explanation in this PR\n // https://github.com/apollographql/react-apollo/pull/1579\n watchQueryOptions.fetchPolicy = 'cache-first';\n }\n\n if (!watchQueryOptions.variables) {\n watchQueryOptions.variables = {} as TVariables;\n }\n\n if (skip) {\n const {\n fetchPolicy = this.getDefaultFetchPolicy(),\n initialFetchPolicy = fetchPolicy,\n } = watchQueryOptions;\n\n // When skipping, we set watchQueryOptions.fetchPolicy initially to\n // \"standby\", but we also need/want to preserve the initial non-standby\n // fetchPolicy that would have been used if not skipping.\n Object.assign(watchQueryOptions, {\n initialFetchPolicy,\n fetchPolicy: 'standby',\n });\n } else if (!watchQueryOptions.fetchPolicy) {\n watchQueryOptions.fetchPolicy =\n this.observable?.options.initialFetchPolicy ||\n this.getDefaultFetchPolicy();\n }\n\n return watchQueryOptions;\n }\n\n getDefaultFetchPolicy(): WatchQueryFetchPolicy {\n return (\n this.queryHookOptions.defaultOptions?.fetchPolicy ||\n this.client.defaultOptions.watchQuery?.fetchPolicy ||\n \"cache-first\"\n );\n }\n\n // Defining these methods as no-ops on the prototype allows us to call\n // state.onCompleted and/or state.onError without worrying about whether a\n // callback was provided.\n private onCompleted(data: TData) {}\n private onError(error: ApolloError) {}\n\n private observable: ObservableQuery;\n private obsQueryFields: Omit<\n ObservableQueryFields,\n \"variables\"\n >;\n\n private useObservableQuery() {\n // See if there is an existing observable that was used to fetch the same\n // data and if so, use it instead since it will contain the proper queryId\n // to fetch the result set. This is used during SSR.\n const obsQuery = this.observable =\n this.renderPromises\n && this.renderPromises.getSSRObservable(this.watchQueryOptions)\n || this.observable // Reuse this.observable if possible (and not SSR)\n || this.client.watchQuery(this.getObsQueryOptions());\n\n this.obsQueryFields = useMemo(() => ({\n refetch: obsQuery.refetch.bind(obsQuery),\n reobserve: obsQuery.reobserve.bind(obsQuery),\n fetchMore: obsQuery.fetchMore.bind(obsQuery),\n updateQuery: obsQuery.updateQuery.bind(obsQuery),\n startPolling: obsQuery.startPolling.bind(obsQuery),\n stopPolling: obsQuery.stopPolling.bind(obsQuery),\n subscribeToMore: obsQuery.subscribeToMore.bind(obsQuery),\n }), [obsQuery]);\n\n const ssrAllowed = !(\n this.queryHookOptions.ssr === false ||\n this.queryHookOptions.skip\n );\n\n if (this.renderPromises && ssrAllowed) {\n this.renderPromises.registerSSRObservable(obsQuery);\n\n if (obsQuery.getCurrentResult().loading) {\n // TODO: This is a legacy API which could probably be cleaned up\n this.renderPromises.addObservableQueryPromise(obsQuery);\n }\n }\n\n return obsQuery;\n }\n\n // These members are populated by getCurrentResult and setResult, and it's\n // okay/normal for them to be initially undefined.\n private result: undefined | ApolloQueryResult;\n private previousData: undefined | TData;\n\n private setResult(nextResult: ApolloQueryResult) {\n const previousResult = this.result;\n if (previousResult && previousResult.data) {\n this.previousData = previousResult.data;\n }\n this.result = nextResult;\n // Calling state.setResult always triggers an update, though some call sites\n // perform additional equality checks before committing to an update.\n this.forceUpdate();\n this.handleErrorOrCompleted(nextResult);\n }\n\n private handleErrorOrCompleted(result: ApolloQueryResult) {\n if (!result.loading) {\n // wait a tick in case we are in the middle of rendering a component\n Promise.resolve().then(() => {\n if (result.error) {\n this.onError(result.error);\n } else if (result.data) {\n this.onCompleted(result.data);\n }\n }).catch(error => {\n invariant.warn(error);\n });\n }\n }\n\n private getCurrentResult(): ApolloQueryResult {\n // Using this.result as a cache ensures getCurrentResult continues returning\n // the same (===) result object, unless state.setResult has been called, or\n // we're doing server rendering and therefore override the result below.\n if (!this.result) {\n this.handleErrorOrCompleted(\n this.result = this.observable.getCurrentResult()\n );\n }\n return this.result;\n }\n\n // This cache allows the referential stability of this.result (as returned by\n // getCurrentResult) to translate into referential stability of the resulting\n // QueryResult object returned by toQueryResult.\n private toQueryResultCache = new (canUseWeakMap ? WeakMap : Map)<\n ApolloQueryResult,\n QueryResult\n >();\n\n toQueryResult(\n result: ApolloQueryResult,\n ): QueryResult {\n let queryResult = this.toQueryResultCache.get(result);\n if (queryResult) return queryResult;\n\n const { data, partial, ...resultWithoutPartial } = result;\n this.toQueryResultCache.set(result, queryResult = {\n data, // Ensure always defined, even if result.data is missing.\n ...resultWithoutPartial,\n ...this.obsQueryFields,\n client: this.client,\n observable: this.observable,\n variables: this.observable.variables,\n called: !this.queryHookOptions.skip,\n previousData: this.previousData,\n });\n\n if (!queryResult.error && isNonEmptyArray(result.errors)) {\n // Until a set naming convention for networkError and graphQLErrors is\n // decided upon, we map errors (graphQLErrors) to the error options.\n // TODO: Is it possible for both result.error and result.errors to be\n // defined here?\n queryResult.error = new ApolloError({ graphQLErrors: result.errors });\n }\n\n return queryResult;\n }\n\n private unsafeHandlePartialRefetch(result: ApolloQueryResult) {\n // WARNING: SIDE-EFFECTS IN THE RENDER FUNCTION\n //\n // TODO: This code should be removed when the partialRefetch option is\n // removed. I was unable to get this hook to behave reasonably in certain\n // edge cases when this block was put in an effect.\n if (\n result.partial &&\n this.queryHookOptions.partialRefetch &&\n !result.loading &&\n (!result.data || Object.keys(result.data).length === 0) &&\n this.observable.options.fetchPolicy !== 'cache-only'\n ) {\n Object.assign(result, {\n loading: true,\n networkStatus: NetworkStatus.refetch,\n });\n this.observable.refetch();\n }\n }\n}\n","import type {\n QueryOptions,\n WatchQueryOptions,\n MutationOptions,\n} from \"../../core\";\n\nimport { compact } from \"./compact\";\n\ntype OptionsUnion =\n | WatchQueryOptions\n | QueryOptions\n | MutationOptions;\n\nexport function mergeOptions<\n TOptions extends OptionsUnion\n>(\n defaults: TOptions | Partial | undefined,\n options: TOptions | Partial,\n): TOptions {\n return compact(defaults, options, options.variables && {\n variables: {\n ...(defaults && defaults.variables),\n ...options.variables,\n },\n });\n}\n","import \"../../utilities/globals\";\n\nimport { Trie } from \"@wry/trie\";\nimport {\n canUseWeakMap,\n canUseWeakSet,\n isNonNullObject as isObjectOrArray,\n} from \"../../utilities\";\nimport { isArray } from \"./helpers\";\n\nfunction shallowCopy(value: T): T {\n if (isObjectOrArray(value)) {\n return isArray(value)\n ? value.slice(0) as any as T\n : { __proto__: Object.getPrototypeOf(value), ...value };\n }\n return value;\n}\n\n// When programmers talk about the \"canonical form\" of an object, they\n// usually have the following meaning in mind, which I've copied from\n// https://en.wiktionary.org/wiki/canonical_form:\n//\n// 1. A standard or normal presentation of a mathematical entity [or\n// object]. A canonical form is an element of a set of representatives\n// of equivalence classes of forms such that there is a function or\n// procedure which projects every element of each equivalence class\n// onto that one element, the canonical form of that equivalence\n// class. The canonical form is expected to be simpler than the rest of\n// the forms in some way.\n//\n// That's a long-winded way of saying any two objects that have the same\n// canonical form may be considered equivalent, even if they are !==,\n// which usually means the objects are structurally equivalent (deeply\n// equal), but don't necessarily use the same memory.\n//\n// Like a literary or musical canon, this ObjectCanon class represents a\n// collection of unique canonical items (JavaScript objects), with the\n// important property that canon.admit(a) === canon.admit(b) if a and b\n// are deeply equal to each other. In terms of the definition above, the\n// canon.admit method is the \"function or procedure which projects every\"\n// object \"onto that one element, the canonical form.\"\n//\n// In the worst case, the canonicalization process may involve looking at\n// every property in the provided object tree, so it takes the same order\n// of time as deep equality checking. Fortunately, already-canonicalized\n// objects are returned immediately from canon.admit, so the presence of\n// canonical subtrees tends to speed up canonicalization.\n//\n// Since consumers of canonical objects can check for deep equality in\n// constant time, canonicalizing cache results can massively improve the\n// performance of application code that skips re-rendering unchanged\n// results, such as \"pure\" UI components in a framework like React.\n//\n// Of course, since canonical objects may be shared widely between\n// unrelated consumers, it's important to think of them as immutable, even\n// though they are not actually frozen with Object.freeze in production,\n// due to the extra performance overhead that comes with frozen objects.\n//\n// Custom scalar objects whose internal class name is neither Array nor\n// Object can be included safely in the admitted tree, but they will not\n// be replaced with a canonical version (to put it another way, they are\n// assumed to be canonical already).\n//\n// If we ignore custom objects, no detection of cycles or repeated object\n// references is currently required by the StoreReader class, since\n// GraphQL result objects are JSON-serializable trees (and thus contain\n// neither cycles nor repeated subtrees), so we can avoid the complexity\n// of keeping track of objects we've already seen during the recursion of\n// the admit method.\n//\n// In the future, we may consider adding additional cases to the switch\n// statement to handle other common object types, such as \"[object Date]\"\n// objects, as needed.\nexport class ObjectCanon {\n // Set of all canonical objects this ObjectCanon has admitted, allowing\n // canon.admit to return previously-canonicalized objects immediately.\n private known = new (canUseWeakSet ? WeakSet : Set)();\n\n // Efficient storage/lookup structure for canonical objects.\n private pool = new Trie<{\n array?: any[];\n object?: Record;\n keys?: SortedKeysInfo;\n }>(canUseWeakMap);\n\n public isKnown(value: any): boolean {\n return isObjectOrArray(value) && this.known.has(value);\n }\n\n // Make the ObjectCanon assume this value has already been\n // canonicalized.\n private passes = new WeakMap();\n public pass(value: T): T;\n public pass(value: any) {\n if (isObjectOrArray(value)) {\n const copy = shallowCopy(value);\n this.passes.set(copy, value);\n return copy;\n }\n return value;\n }\n\n // Returns the canonical version of value.\n public admit(value: T): T;\n public admit(value: any) {\n if (isObjectOrArray(value)) {\n const original = this.passes.get(value);\n if (original) return original;\n\n const proto = Object.getPrototypeOf(value);\n switch (proto) {\n case Array.prototype: {\n if (this.known.has(value)) return value;\n const array: any[] = (value as any[]).map(this.admit, this);\n // Arrays are looked up in the Trie using their recursively\n // canonicalized elements, and the known version of the array is\n // preserved as node.array.\n const node = this.pool.lookupArray(array);\n if (!node.array) {\n this.known.add(node.array = array);\n // Since canonical arrays may be shared widely between\n // unrelated consumers, it's important to regard them as\n // immutable, even if they are not frozen in production.\n if (__DEV__) {\n Object.freeze(array);\n }\n }\n return node.array;\n }\n\n case null:\n case Object.prototype: {\n if (this.known.has(value)) return value;\n const proto = Object.getPrototypeOf(value);\n const array = [proto];\n const keys = this.sortedKeys(value);\n array.push(keys.json);\n const firstValueIndex = array.length;\n keys.sorted.forEach(key => {\n array.push(this.admit((value as any)[key]));\n });\n // Objects are looked up in the Trie by their prototype (which\n // is *not* recursively canonicalized), followed by a JSON\n // representation of their (sorted) keys, followed by the\n // sequence of recursively canonicalized values corresponding to\n // those keys. To keep the final results unambiguous with other\n // sequences (such as arrays that just happen to contain [proto,\n // keys.json, value1, value2, ...]), the known version of the\n // object is stored as node.object.\n const node = this.pool.lookupArray(array);\n if (!node.object) {\n const obj = node.object = Object.create(proto);\n this.known.add(obj);\n keys.sorted.forEach((key, i) => {\n obj[key] = array[firstValueIndex + i];\n });\n // Since canonical objects may be shared widely between\n // unrelated consumers, it's important to regard them as\n // immutable, even if they are not frozen in production.\n if (__DEV__) {\n Object.freeze(obj);\n }\n }\n return node.object;\n }\n }\n }\n return value;\n }\n\n // It's worthwhile to cache the sorting of arrays of strings, since the\n // same initial unsorted arrays tend to be encountered many times.\n // Fortunately, we can reuse the Trie machinery to look up the sorted\n // arrays in linear time (which is faster than sorting large arrays).\n private sortedKeys(obj: object) {\n const keys = Object.keys(obj);\n const node = this.pool.lookupArray(keys);\n if (!node.keys) {\n keys.sort();\n const json = JSON.stringify(keys);\n if (!(node.keys = this.keysByJSON.get(json))) {\n this.keysByJSON.set(json, node.keys = { sorted: keys, json });\n }\n }\n return node.keys;\n }\n // Arrays that contain the same elements in a different order can share\n // the same SortedKeysInfo object, to save memory.\n private keysByJSON = new Map();\n\n // This has to come last because it depends on keysByJSON.\n public readonly empty = this.admit({});\n}\n\ntype SortedKeysInfo = {\n sorted: string[];\n json: string;\n};\n\n// Since the keys of canonical objects are always created in lexicographically\n// sorted order, we can use the ObjectCanon to implement a fast and stable\n// version of JSON.stringify, which automatically sorts object keys.\nexport const canonicalStringify = Object.assign(function (value: any): string {\n if (isObjectOrArray(value)) {\n if (stringifyCanon === void 0) {\n resetCanonicalStringify();\n }\n const canonical = stringifyCanon.admit(value);\n let json = stringifyCache.get(canonical);\n if (json === void 0) {\n stringifyCache.set(\n canonical,\n json = JSON.stringify(canonical),\n );\n }\n return json;\n }\n return JSON.stringify(value);\n}, {\n reset: resetCanonicalStringify,\n});\n\n// Can be reset by calling canonicalStringify.reset().\nlet stringifyCanon: ObjectCanon;\nlet stringifyCache: WeakMap;\n\nfunction resetCanonicalStringify() {\n stringifyCanon = new ObjectCanon;\n stringifyCache = new (canUseWeakMap ? WeakMap : Map)();\n}\n","export function maybe(thunk: () => T): T | undefined {\n try { return thunk() } catch {}\n}\n","import { DocumentNode, FragmentDefinitionNode, SelectionSetNode } from 'graphql';\n\nimport {\n NormalizedCache,\n InMemoryCacheConfig,\n} from './types';\n\nimport { KeyFieldsContext } from './policies';\nimport { FragmentRegistryAPI } from './fragmentRegistry';\n\nimport {\n Reference,\n isReference,\n StoreValue,\n StoreObject,\n isField,\n DeepMerger,\n resultKeyNameFromField,\n shouldInclude,\n isNonNullObject,\n compact,\n FragmentMap,\n FragmentMapFunction,\n createFragmentMap,\n getFragmentDefinitions,\n} from '../../utilities';\n\nexport const {\n hasOwnProperty: hasOwn,\n} = Object.prototype;\n\nexport function isNullish(value: any): value is null | undefined {\n return value === null || value === void 0;\n}\n\nexport const isArray: (a: any) => a is any[] | readonly any[] = Array.isArray;\n\nexport function defaultDataIdFromObject(\n { __typename, id, _id }: Readonly,\n context?: KeyFieldsContext,\n): string | undefined {\n if (typeof __typename === \"string\") {\n if (context) {\n context.keyObject =\n !isNullish(id) ? { id } :\n !isNullish(_id) ? { _id } :\n void 0;\n }\n\n // If there is no object.id, fall back to object._id.\n if (isNullish(id) && !isNullish(_id)) {\n id = _id;\n }\n\n if (!isNullish(id)) {\n return `${__typename}:${(\n typeof id === \"number\" ||\n typeof id === \"string\"\n ) ? id : JSON.stringify(id)}`;\n }\n }\n}\n\nconst defaultConfig = {\n dataIdFromObject: defaultDataIdFromObject,\n addTypename: true,\n resultCaching: true,\n // Thanks to the shouldCanonizeResults helper, this should be the only line\n // you have to change to reenable canonization by default in the future.\n canonizeResults: false,\n};\n\nexport function normalizeConfig(config: InMemoryCacheConfig) {\n return compact(defaultConfig, config);\n}\n\nexport function shouldCanonizeResults(\n config: Pick,\n): boolean {\n const value = config.canonizeResults;\n return value === void 0 ? defaultConfig.canonizeResults : value;\n}\n\nexport function getTypenameFromStoreObject(\n store: NormalizedCache,\n objectOrReference: StoreObject | Reference,\n): string | undefined {\n return isReference(objectOrReference)\n ? store.get(objectOrReference.__ref, \"__typename\") as string\n : objectOrReference && objectOrReference.__typename;\n}\n\nexport const TypeOrFieldNameRegExp = /^[_a-z][_0-9a-z]*/i;\n\nexport function fieldNameFromStoreName(storeFieldName: string): string {\n const match = storeFieldName.match(TypeOrFieldNameRegExp);\n return match ? match[0] : storeFieldName;\n}\n\nexport function selectionSetMatchesResult(\n selectionSet: SelectionSetNode,\n result: Record,\n variables?: Record,\n): boolean {\n if (isNonNullObject(result)) {\n return isArray(result)\n ? result.every(item => selectionSetMatchesResult(selectionSet, item, variables))\n : selectionSet.selections.every(field => {\n if (isField(field) && shouldInclude(field, variables)) {\n const key = resultKeyNameFromField(field);\n return hasOwn.call(result, key) &&\n (!field.selectionSet ||\n selectionSetMatchesResult(field.selectionSet, result[key], variables));\n }\n // If the selection has been skipped with @skip(true) or\n // @include(false), it should not count against the matching. If\n // the selection is not a field, it must be a fragment (inline or\n // named). We will determine if selectionSetMatchesResult for that\n // fragment when we get to it, so for now we return true.\n return true;\n });\n }\n return false;\n}\n\nexport function storeValueIsStoreObject(\n value: StoreValue,\n): value is StoreObject {\n return isNonNullObject(value) &&\n !isReference(value) &&\n !isArray(value);\n}\n\nexport function makeProcessedFieldsMerger() {\n return new DeepMerger;\n}\n\nexport function extractFragmentContext(\n document: DocumentNode,\n fragments?: FragmentRegistryAPI,\n): {\n fragmentMap: FragmentMap;\n lookupFragment: FragmentMapFunction;\n} {\n // FragmentMap consisting only of fragments defined directly in document, not\n // including other fragments registered in the FragmentRegistry.\n const fragmentMap = createFragmentMap(getFragmentDefinitions(document));\n return {\n fragmentMap,\n lookupFragment(name) {\n let def: FragmentDefinitionNode | null = fragmentMap[name];\n if (!def && fragments) {\n def = fragments.lookup(name);\n }\n return def || null;\n },\n };\n}\n","import { invariant } from '../../utilities/globals';\n\nimport {\n DocumentNode,\n DefinitionNode,\n VariableDefinitionNode,\n OperationDefinitionNode\n} from 'graphql';\n\nexport enum DocumentType {\n Query,\n Mutation,\n Subscription\n}\n\nexport interface IDocumentDefinition {\n type: DocumentType;\n name: string;\n variables: ReadonlyArray;\n}\n\nconst cache = new Map();\n\nexport function operationName(type: DocumentType) {\n let name;\n switch (type) {\n case DocumentType.Query:\n name = 'Query';\n break;\n case DocumentType.Mutation:\n name = 'Mutation';\n break;\n case DocumentType.Subscription:\n name = 'Subscription';\n break;\n }\n return name;\n}\n\n// This parser is mostly used to safety check incoming documents.\nexport function parser(document: DocumentNode): IDocumentDefinition {\n const cached = cache.get(document);\n if (cached) return cached;\n\n let variables, type, name;\n\n invariant(\n !!document && !!document.kind,\n `Argument of ${document} passed to parser was not a valid GraphQL ` +\n `DocumentNode. You may need to use 'graphql-tag' or another method ` +\n `to convert your operation into a document`\n );\n\n const fragments: DefinitionNode[] = []\n const queries: DefinitionNode[] = []\n const mutations: DefinitionNode[] = []\n const subscriptions: DefinitionNode[] = []\n\n for (const x of document.definitions) {\n if (x.kind === 'FragmentDefinition') {\n fragments.push(x);\n continue\n }\n\n if (x.kind === 'OperationDefinition') {\n switch (x.operation) {\n case 'query':\n queries.push(x);\n break;\n case 'mutation':\n mutations.push(x);\n break;\n case 'subscription':\n subscriptions.push(x);\n break;\n }\n }\n }\n\n invariant(\n !fragments.length ||\n (queries.length || mutations.length || subscriptions.length),\n `Passing only a fragment to 'graphql' is not yet supported. ` +\n `You must include a query, subscription or mutation as well`\n );\n\n invariant(\n queries.length + mutations.length + subscriptions.length <= 1,\n `react-apollo only supports a query, subscription, or a mutation per HOC. ` +\n `${document} had ${queries.length} queries, ${subscriptions.length} ` +\n `subscriptions and ${mutations.length} mutations. ` +\n `You can use 'compose' to join multiple operation types to a component`\n );\n\n type = queries.length ? DocumentType.Query : DocumentType.Mutation;\n if (!queries.length && !mutations.length) type = DocumentType.Subscription;\n\n const definitions = queries.length\n ? queries\n : mutations.length\n ? mutations\n : subscriptions;\n\n invariant(\n definitions.length === 1,\n `react-apollo only supports one definition per HOC. ${document} had ` +\n `${definitions.length} definitions. ` +\n `You can use 'compose' to join multiple operation types to a component`\n );\n\n const definition = definitions[0] as OperationDefinitionNode;\n variables = definition.variableDefinitions || [];\n\n if (definition.name && definition.name.kind === 'Name') {\n name = definition.name.value;\n } else {\n name = 'data'; // fallback to using data if no name\n }\n\n const payload = { name, type, variables };\n cache.set(document, payload);\n return payload;\n}\n\nexport function verifyDocumentType(document: DocumentNode, type: DocumentType) {\n const operation = parser(document);\n const requiredOperationName = operationName(type);\n const usedOperationName = operationName(operation.type);\n invariant(\n operation.type === type,\n `Running a ${requiredOperationName} requires a graphql ` +\n `${requiredOperationName}, but a ${usedOperationName} was used instead.`\n );\n}\n\n","import { InvariantError, invariant } from '../../utilities/globals';\n\nimport { Observable, Observer } from '../../utilities';\nimport {\n NextLink,\n Operation,\n RequestHandler,\n FetchResult,\n GraphQLRequest\n} from './types';\nimport {\n validateOperation,\n createOperation,\n transformOperation,\n} from '../utils';\n\nfunction passthrough(op: Operation, forward: NextLink) {\n return (forward ? forward(op) : Observable.of()) as Observable;\n}\n\nfunction toLink(handler: RequestHandler | ApolloLink) {\n return typeof handler === 'function' ? new ApolloLink(handler) : handler;\n}\n\nfunction isTerminating(link: ApolloLink): boolean {\n return link.request.length <= 1;\n}\n\nclass LinkError extends Error {\n public link?: ApolloLink;\n constructor(message?: string, link?: ApolloLink) {\n super(message);\n this.link = link;\n }\n}\n\nexport class ApolloLink {\n public static empty(): ApolloLink {\n return new ApolloLink(() => Observable.of());\n }\n\n public static from(links: (ApolloLink | RequestHandler)[]): ApolloLink {\n if (links.length === 0) return ApolloLink.empty();\n return links.map(toLink).reduce((x, y) => x.concat(y)) as ApolloLink;\n }\n\n public static split(\n test: (op: Operation) => boolean,\n left: ApolloLink | RequestHandler,\n right?: ApolloLink | RequestHandler,\n ): ApolloLink {\n const leftLink = toLink(left);\n const rightLink = toLink(right || new ApolloLink(passthrough));\n\n if (isTerminating(leftLink) && isTerminating(rightLink)) {\n return new ApolloLink(operation => {\n return test(operation)\n ? leftLink.request(operation) || Observable.of()\n : rightLink.request(operation) || Observable.of();\n });\n } else {\n return new ApolloLink((operation, forward) => {\n return test(operation)\n ? leftLink.request(operation, forward) || Observable.of()\n : rightLink.request(operation, forward) || Observable.of();\n });\n }\n }\n\n public static execute(\n link: ApolloLink,\n operation: GraphQLRequest,\n ): Observable {\n return (\n link.request(\n createOperation(\n operation.context,\n transformOperation(validateOperation(operation)),\n ),\n ) || Observable.of()\n );\n }\n\n public static concat(\n first: ApolloLink | RequestHandler,\n second: ApolloLink | RequestHandler,\n ) {\n const firstLink = toLink(first);\n if (isTerminating(firstLink)) {\n invariant.warn(\n new LinkError(\n `You are calling concat on a terminating link, which will have no effect`,\n firstLink,\n ),\n );\n return firstLink;\n }\n const nextLink = toLink(second);\n\n if (isTerminating(nextLink)) {\n return new ApolloLink(\n operation =>\n firstLink.request(\n operation,\n op => nextLink.request(op) || Observable.of(),\n ) || Observable.of(),\n );\n } else {\n return new ApolloLink((operation, forward) => {\n return (\n firstLink.request(operation, op => {\n return nextLink.request(op, forward) || Observable.of();\n }) || Observable.of()\n );\n });\n }\n }\n\n constructor(request?: RequestHandler) {\n if (request) this.request = request;\n }\n\n public split(\n test: (op: Operation) => boolean,\n left: ApolloLink | RequestHandler,\n right?: ApolloLink | RequestHandler,\n ): ApolloLink {\n return this.concat(\n ApolloLink.split(test, left, right || new ApolloLink(passthrough))\n );\n }\n\n public concat(next: ApolloLink | RequestHandler): ApolloLink {\n return ApolloLink.concat(this, next);\n }\n\n public request(\n operation: Operation,\n forward?: NextLink,\n ): Observable | null {\n throw new InvariantError('request is not implemented');\n }\n\n protected onError(\n error: any,\n observer?: Observer,\n ): false | void {\n if (observer && observer.error) {\n observer.error(error);\n // Returning false indicates that observer.error does not need to be\n // called again, since it was already called (on the previous line).\n // Calling observer.error again would not cause any real problems,\n // since only the first call matters, but custom onError functions\n // might have other reasons for wanting to prevent the default\n // behavior by returning false.\n return false;\n }\n // Throw errors will be passed to observer.error.\n throw error;\n }\n\n public setOnError(fn: ApolloLink[\"onError\"]): this {\n this.onError = fn;\n return this;\n }\n}\n","import { GraphQLRequest, Operation } from '../core';\n\nexport function createOperation(\n starting: any,\n operation: GraphQLRequest,\n): Operation {\n let context = { ...starting };\n const setContext = (next: any) => {\n if (typeof next === 'function') {\n context = { ...context, ...next(context) };\n } else {\n context = { ...context, ...next };\n }\n };\n const getContext = () => ({ ...context });\n\n Object.defineProperty(operation, 'setContext', {\n enumerable: false,\n value: setContext,\n });\n\n Object.defineProperty(operation, 'getContext', {\n enumerable: false,\n value: getContext,\n });\n\n return operation as Operation;\n}\n","import { GraphQLRequest, Operation } from '../core';\nimport { getOperationName } from '../../utilities';\n\nexport function transformOperation(operation: GraphQLRequest): GraphQLRequest {\n const transformedOperation: GraphQLRequest = {\n variables: operation.variables || {},\n extensions: operation.extensions || {},\n operationName: operation.operationName,\n query: operation.query,\n };\n\n // Best guess at an operation name\n if (!transformedOperation.operationName) {\n transformedOperation.operationName =\n typeof transformedOperation.query !== 'string'\n ? getOperationName(transformedOperation.query) || undefined\n : '';\n }\n\n return transformedOperation as Operation;\n}\n","import { InvariantError } from '../../utilities/globals'\nimport { GraphQLRequest } from '../core';\n\nexport function validateOperation(operation: GraphQLRequest): GraphQLRequest {\n const OPERATION_FIELDS = [\n 'query',\n 'operationName',\n 'variables',\n 'extensions',\n 'context',\n ];\n for (let key of Object.keys(operation)) {\n if (OPERATION_FIELDS.indexOf(key) < 0) {\n throw new InvariantError(`illegal argument: ${key}`);\n }\n }\n\n return operation;\n}\n","import { invariant } from '../../utilities/globals';\nimport { useContext } from 'react';\nimport { ApolloClient } from '../../core';\nimport { getApolloContext } from '../context';\n\nexport function useApolloClient(\n override?: ApolloClient,\n): ApolloClient {\n const context = useContext(getApolloContext());\n const client = override || context.client;\n invariant(\n !!client,\n 'Could not find \"client\" in the context or passed in as an option. ' +\n 'Wrap the root component in an , or pass an ApolloClient ' +\n 'instance in via options.',\n );\n\n return client;\n}\n","import { maybe } from \"../globals\";\n\nexport const canUseWeakMap =\n typeof WeakMap === 'function' &&\n maybe(() => navigator.product) !== 'ReactNative';\n\nexport const canUseWeakSet = typeof WeakSet === 'function';\n\nexport const canUseSymbol =\n typeof Symbol === 'function' &&\n typeof Symbol.for === 'function';\n\nexport const canUseAsyncIteratorSymbol = canUseSymbol && Symbol.asyncIterator;\n\nexport const canUseDOM =\n typeof maybe(() => window.document.createElement) === \"function\";\n\nconst usingJSDOM: boolean =\n // Following advice found in this comment from @domenic (maintainer of jsdom):\n // https://github.com/jsdom/jsdom/issues/1537#issuecomment-229405327\n //\n // Since we control the version of Jest and jsdom used when running Apollo\n // Client tests, and that version is recent enought to include \" jsdom/x.y.z\"\n // at the end of the user agent string, I believe this case is all we need to\n // check. Testing for \"Node.js\" was recommended for backwards compatibility\n // with older version of jsdom, but we don't have that problem.\n maybe(() => navigator.userAgent.indexOf(\"jsdom\") >= 0) || false;\n\n// Our tests should all continue to pass if we remove this !usingJSDOM\n// condition, thereby allowing useLayoutEffect when using jsdom. Unfortunately,\n// if we allow useLayoutEffect, then useSyncExternalStore generates many\n// warnings about useLayoutEffect doing nothing on the server. While these\n// warnings are harmless, this !usingJSDOM condition seems to be the best way to\n// prevent them (i.e. skipping useLayoutEffect when using jsdom).\nexport const canUseLayoutEffect = canUseDOM && !usingJSDOM;\n","import { invariant, InvariantError } from '../globals';\n\nimport {\n DocumentNode,\n OperationDefinitionNode,\n FragmentDefinitionNode,\n ValueNode,\n} from 'graphql';\n\nimport { valueToObjectRepresentation } from './storeUtils';\n\n// Checks the document for errors and throws an exception if there is an error.\nexport function checkDocument(doc: DocumentNode) {\n invariant(\n doc && doc.kind === 'Document',\n `Expecting a parsed GraphQL document. Perhaps you need to wrap the query \\\nstring in a \"gql\" tag? http://docs.apollostack.com/apollo-client/core.html#gql`,\n );\n\n const operations = doc.definitions\n .filter(d => d.kind !== 'FragmentDefinition')\n .map(definition => {\n if (definition.kind !== 'OperationDefinition') {\n throw new InvariantError(\n `Schema type definitions not allowed in queries. Found: \"${\n definition.kind\n }\"`,\n );\n }\n return definition;\n });\n\n invariant(\n operations.length <= 1,\n `Ambiguous GraphQL document: contains ${operations.length} operations`,\n );\n\n return doc;\n}\n\nexport function getOperationDefinition(\n doc: DocumentNode,\n): OperationDefinitionNode | undefined {\n checkDocument(doc);\n return doc.definitions.filter(\n definition => definition.kind === 'OperationDefinition',\n )[0] as OperationDefinitionNode;\n}\n\nexport function getOperationName(doc: DocumentNode): string | null {\n return (\n doc.definitions\n .filter(\n definition =>\n definition.kind === 'OperationDefinition' && definition.name,\n )\n .map((x: OperationDefinitionNode) => x!.name!.value)[0] || null\n );\n}\n\n// Returns the FragmentDefinitions from a particular document as an array\nexport function getFragmentDefinitions(\n doc: DocumentNode,\n): FragmentDefinitionNode[] {\n return doc.definitions.filter(\n definition => definition.kind === 'FragmentDefinition',\n ) as FragmentDefinitionNode[];\n}\n\nexport function getQueryDefinition(doc: DocumentNode): OperationDefinitionNode {\n const queryDef = getOperationDefinition(doc) as OperationDefinitionNode;\n\n invariant(\n queryDef && queryDef.operation === 'query',\n 'Must contain a query definition.',\n );\n\n return queryDef;\n}\n\nexport function getFragmentDefinition(\n doc: DocumentNode,\n): FragmentDefinitionNode {\n invariant(\n doc.kind === 'Document',\n `Expecting a parsed GraphQL document. Perhaps you need to wrap the query \\\nstring in a \"gql\" tag? http://docs.apollostack.com/apollo-client/core.html#gql`,\n );\n\n invariant(\n doc.definitions.length <= 1,\n 'Fragment must have exactly one definition.',\n );\n\n const fragmentDef = doc.definitions[0] as FragmentDefinitionNode;\n\n invariant(\n fragmentDef.kind === 'FragmentDefinition',\n 'Must be a fragment definition.',\n );\n\n return fragmentDef as FragmentDefinitionNode;\n}\n\n/**\n * Returns the first operation definition found in this document.\n * If no operation definition is found, the first fragment definition will be returned.\n * If no definitions are found, an error will be thrown.\n */\nexport function getMainDefinition(\n queryDoc: DocumentNode,\n): OperationDefinitionNode | FragmentDefinitionNode {\n checkDocument(queryDoc);\n\n let fragmentDefinition;\n\n for (let definition of queryDoc.definitions) {\n if (definition.kind === 'OperationDefinition') {\n const operation = (definition as OperationDefinitionNode).operation;\n if (\n operation === 'query' ||\n operation === 'mutation' ||\n operation === 'subscription'\n ) {\n return definition as OperationDefinitionNode;\n }\n }\n if (definition.kind === 'FragmentDefinition' && !fragmentDefinition) {\n // we do this because we want to allow multiple fragment definitions\n // to precede an operation definition.\n fragmentDefinition = definition as FragmentDefinitionNode;\n }\n }\n\n if (fragmentDefinition) {\n return fragmentDefinition;\n }\n\n throw new InvariantError(\n 'Expected a parsed GraphQL query with a query, mutation, subscription, or a fragment.',\n );\n}\n\nexport function getDefaultValues(\n definition: OperationDefinitionNode | undefined,\n): Record {\n const defaultValues = Object.create(null);\n const defs = definition && definition.variableDefinitions;\n if (defs && defs.length) {\n defs.forEach(def => {\n if (def.defaultValue) {\n valueToObjectRepresentation(\n defaultValues,\n def.variable.name,\n def.defaultValue as ValueNode,\n );\n }\n });\n }\n return defaultValues;\n}\n","import { invariant } from '../globals';\n\n// Provides the methods that allow QueryManager to handle the `skip` and\n// `include` directives within GraphQL.\nimport {\n SelectionNode,\n VariableNode,\n BooleanValueNode,\n DirectiveNode,\n DocumentNode,\n ArgumentNode,\n ValueNode,\n ASTNode,\n visit,\n BREAK,\n} from 'graphql';\n\nexport type DirectiveInfo = {\n [fieldName: string]: { [argName: string]: any };\n};\n\nexport function shouldInclude(\n { directives }: SelectionNode,\n variables?: Record,\n): boolean {\n if (!directives || !directives.length) {\n return true;\n }\n return getInclusionDirectives(\n directives\n ).every(({ directive, ifArgument }) => {\n let evaledValue: boolean = false;\n if (ifArgument.value.kind === 'Variable') {\n evaledValue = variables && variables[(ifArgument.value as VariableNode).name.value];\n invariant(\n evaledValue !== void 0,\n `Invalid variable referenced in @${directive.name.value} directive.`,\n );\n } else {\n evaledValue = (ifArgument.value as BooleanValueNode).value;\n }\n return directive.name.value === 'skip' ? !evaledValue : evaledValue;\n });\n}\n\nexport function getDirectiveNames(root: ASTNode) {\n const names: string[] = [];\n\n visit(root, {\n Directive(node: DirectiveNode) {\n names.push(node.name.value);\n },\n });\n\n return names;\n}\n\nexport const hasAnyDirectives = (\n names: string[],\n root: ASTNode,\n) => hasDirectives(names, root, false);\n\nexport const hasAllDirectives = (\n names: string[],\n root: ASTNode,\n) => hasDirectives(names, root, true);\n\nexport function hasDirectives(\n names: string[],\n root: ASTNode,\n all?: boolean,\n) {\n const nameSet = new Set(names);\n const uniqueCount = nameSet.size;\n\n visit(root, {\n Directive(node) {\n if (\n nameSet.delete(node.name.value) &&\n (!all || !nameSet.size)\n ) {\n return BREAK;\n }\n },\n });\n\n // If we found all the names, nameSet will be empty. If we only care about\n // finding some of them, the < condition is sufficient.\n return all ? !nameSet.size : nameSet.size < uniqueCount;\n}\n\nexport function hasClientExports(document: DocumentNode) {\n return document && hasDirectives(['client', 'export'], document, true);\n}\n\nexport type InclusionDirectives = Array<{\n directive: DirectiveNode;\n ifArgument: ArgumentNode;\n}>;\n\nfunction isInclusionDirective({ name: { value } }: DirectiveNode): boolean {\n return value === 'skip' || value === 'include';\n}\n\nexport function getInclusionDirectives(\n directives: ReadonlyArray,\n): InclusionDirectives {\n const result: InclusionDirectives = [];\n\n if (directives && directives.length) {\n directives.forEach(directive => {\n if (!isInclusionDirective(directive)) return;\n\n const directiveArguments = directive.arguments;\n const directiveName = directive.name.value;\n\n invariant(\n directiveArguments && directiveArguments.length === 1,\n `Incorrect number of arguments for the @${directiveName} directive.`,\n );\n\n const ifArgument = directiveArguments![0];\n invariant(\n ifArgument.name && ifArgument.name.value === 'if',\n `Invalid argument for the @${directiveName} directive.`,\n );\n\n const ifValue: ValueNode = ifArgument.value;\n\n // means it has to be a variable value if this is a valid @skip or @include directive\n invariant(\n ifValue &&\n (ifValue.kind === 'Variable' || ifValue.kind === 'BooleanValue'),\n `Argument for the @${directiveName} directive must be a variable or a boolean value.`,\n );\n\n result.push({ directive, ifArgument });\n });\n }\n\n return result;\n}\n\n","import { isNonNullObject } from \"./objects\";\n\nconst { hasOwnProperty } = Object.prototype;\n\n// These mergeDeep and mergeDeepArray utilities merge any number of objects\n// together, sharing as much memory as possible with the source objects, while\n// remaining careful to avoid modifying any source objects.\n\n// Logically, the return type of mergeDeep should be the intersection of\n// all the argument types. The binary call signature is by far the most\n// common, but we support 0- through 5-ary as well. After that, the\n// resulting type is just the inferred array element type. Note to nerds:\n// there is a more clever way of doing this that converts the tuple type\n// first to a union type (easy enough: T[number]) and then converts the\n// union to an intersection type using distributive conditional type\n// inference, but that approach has several fatal flaws (boolean becomes\n// true & false, and the inferred type ends up as unknown in many cases),\n// in addition to being nearly impossible to explain/understand.\nexport type TupleToIntersection =\n T extends [infer A] ? A :\n T extends [infer A, infer B] ? A & B :\n T extends [infer A, infer B, infer C] ? A & B & C :\n T extends [infer A, infer B, infer C, infer D] ? A & B & C & D :\n T extends [infer A, infer B, infer C, infer D, infer E] ? A & B & C & D & E :\n T extends (infer U)[] ? U : any;\n\nexport function mergeDeep(\n ...sources: T\n): TupleToIntersection {\n return mergeDeepArray(sources);\n}\n\n// In almost any situation where you could succeed in getting the\n// TypeScript compiler to infer a tuple type for the sources array, you\n// could just use mergeDeep instead of mergeDeepArray, so instead of\n// trying to convert T[] to an intersection type we just infer the array\n// element type, which works perfectly when the sources array has a\n// consistent element type.\nexport function mergeDeepArray(sources: T[]): T {\n let target = sources[0] || ({} as T);\n const count = sources.length;\n if (count > 1) {\n const merger = new DeepMerger();\n for (let i = 1; i < count; ++i) {\n target = merger.merge(target, sources[i]);\n }\n }\n return target;\n}\n\nexport type ReconcilerFunction = (\n this: DeepMerger,\n target: Record,\n source: Record,\n property: string | number,\n ...context: TContextArgs\n) => any;\n\nconst defaultReconciler: ReconcilerFunction =\n function (target, source, property) {\n return this.merge(target[property], source[property]);\n };\n\nexport class DeepMerger {\n constructor(\n private reconciler: ReconcilerFunction = defaultReconciler,\n ) {}\n\n public merge(target: any, source: any, ...context: TContextArgs): any {\n if (isNonNullObject(source) && isNonNullObject(target)) {\n Object.keys(source).forEach(sourceKey => {\n if (hasOwnProperty.call(target, sourceKey)) {\n const targetValue = target[sourceKey];\n if (source[sourceKey] !== targetValue) {\n const result = this.reconciler(target, source, sourceKey, ...context);\n // A well-implemented reconciler may return targetValue to indicate\n // the merge changed nothing about the structure of the target.\n if (result !== targetValue) {\n target = this.shallowCopyForMerge(target);\n target[sourceKey] = result;\n }\n }\n } else {\n // If there is no collision, the target can safely share memory with\n // the source, and the recursion can terminate here.\n target = this.shallowCopyForMerge(target);\n target[sourceKey] = source[sourceKey];\n }\n });\n\n return target;\n }\n\n // If source (or target) is not an object, let source replace target.\n return source;\n }\n\n public isObject = isNonNullObject;\n\n private pastCopies = new Set();\n\n public shallowCopyForMerge(value: T): T {\n if (isNonNullObject(value)) {\n if (!this.pastCopies.has(value)) {\n if (Array.isArray(value)) {\n value = (value as any).slice(0);\n } else {\n value = {\n __proto__: Object.getPrototypeOf(value),\n ...value,\n };\n }\n this.pastCopies.add(value);\n }\n }\n return value;\n }\n}\n"],"sourceRoot":""}