Project files

This commit is contained in:
2023-11-09 18:47:11 +01:00
parent 695abe054b
commit c415135aae
8554 changed files with 858111 additions and 0 deletions

1
receipeServer/frontend_old/node_modules/.bin/acorn generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../acorn/bin/acorn

1
receipeServer/frontend_old/node_modules/.bin/ansi-html generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../ansi-html-community/bin/ansi-html

View File

@@ -0,0 +1 @@
../autoprefixer/bin/autoprefixer

View File

@@ -0,0 +1 @@
../browserslist/cli.js

View File

@@ -0,0 +1 @@
../css-blank-pseudo/dist/cli.cjs

View File

@@ -0,0 +1 @@
../css-has-pseudo/dist/cli.cjs

View File

@@ -0,0 +1 @@
../css-prefers-color-scheme/dist/cli.cjs

1
receipeServer/frontend_old/node_modules/.bin/cssesc generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../cssesc/bin/cssesc

1
receipeServer/frontend_old/node_modules/.bin/envinfo generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../envinfo/dist/cli.js

1
receipeServer/frontend_old/node_modules/.bin/he generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../he/bin/he

View File

@@ -0,0 +1 @@
../html-minifier-terser/cli.js

View File

@@ -0,0 +1 @@
../import-local/fixtures/cli.js

1
receipeServer/frontend_old/node_modules/.bin/is-docker generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../is-docker/cli.js

1
receipeServer/frontend_old/node_modules/.bin/jsesc generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../jsesc/bin/jsesc

1
receipeServer/frontend_old/node_modules/.bin/json5 generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../json5/lib/cli.js

View File

@@ -0,0 +1 @@
../loose-envify/cli.js

1
receipeServer/frontend_old/node_modules/.bin/mime generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../mime/cli.js

1
receipeServer/frontend_old/node_modules/.bin/mkdirp generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../mkdirp/bin/cmd.js

View File

@@ -0,0 +1 @@
../multicast-dns/cli.js

1
receipeServer/frontend_old/node_modules/.bin/nanoid generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../nanoid/bin/nanoid.cjs

1
receipeServer/frontend_old/node_modules/.bin/node-which generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../which/bin/node-which

1
receipeServer/frontend_old/node_modules/.bin/parser generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../@babel/parser/bin/babel-parser.js

View File

@@ -0,0 +1 @@
../regjsparser/bin/parser

1
receipeServer/frontend_old/node_modules/.bin/resolve generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../resolve/bin/resolve

1
receipeServer/frontend_old/node_modules/.bin/rimraf generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../rimraf/bin.js

1
receipeServer/frontend_old/node_modules/.bin/semver generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../semver/bin/semver.js

1
receipeServer/frontend_old/node_modules/.bin/terser generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../terser/bin/terser

1
receipeServer/frontend_old/node_modules/.bin/uuid generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../uuid/dist/bin/uuid

1
receipeServer/frontend_old/node_modules/.bin/webpack generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../webpack/bin/webpack.js

View File

@@ -0,0 +1 @@
../webpack-cli/bin/cli.js

View File

@@ -0,0 +1 @@
../webpack-dev-server/bin/webpack-dev-server.js

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2019 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -0,0 +1,218 @@
# @ampproject/remapping
> Remap sequential sourcemaps through transformations to point at the original source code
Remapping allows you to take the sourcemaps generated through transforming your code and "remap"
them to the original source locations. Think "my minified code, transformed with babel and bundled
with webpack", all pointing to the correct location in your original source code.
With remapping, none of your source code transformations need to be aware of the input's sourcemap,
they only need to generate an output sourcemap. This greatly simplifies building custom
transformations (think a find-and-replace).
## Installation
```sh
npm install @ampproject/remapping
```
## Usage
```typescript
function remapping(
map: SourceMap | SourceMap[],
loader: (file: string, ctx: LoaderContext) => (SourceMap | null | undefined),
options?: { excludeContent: boolean, decodedMappings: boolean }
): SourceMap;
// LoaderContext gives the loader the importing sourcemap, tree depth, the ability to override the
// "source" location (where child sources are resolved relative to, or the location of original
// source), and the ability to override the "content" of an original source for inclusion in the
// output sourcemap.
type LoaderContext = {
readonly importer: string;
readonly depth: number;
source: string;
content: string | null | undefined;
}
```
`remapping` takes the final output sourcemap, and a `loader` function. For every source file pointer
in the sourcemap, the `loader` will be called with the resolved path. If the path itself represents
a transformed file (it has a sourcmap associated with it), then the `loader` should return that
sourcemap. If not, the path will be treated as an original, untransformed source code.
```js
// Babel transformed "helloworld.js" into "transformed.js"
const transformedMap = JSON.stringify({
file: 'transformed.js',
// 1st column of 2nd line of output file translates into the 1st source
// file, line 3, column 2
mappings: ';CAEE',
sources: ['helloworld.js'],
version: 3,
});
// Uglify minified "transformed.js" into "transformed.min.js"
const minifiedTransformedMap = JSON.stringify({
file: 'transformed.min.js',
// 0th column of 1st line of output file translates into the 1st source
// file, line 2, column 1.
mappings: 'AACC',
names: [],
sources: ['transformed.js'],
version: 3,
});
const remapped = remapping(
minifiedTransformedMap,
(file, ctx) => {
// The "transformed.js" file is an transformed file.
if (file === 'transformed.js') {
// The root importer is empty.
console.assert(ctx.importer === '');
// The depth in the sourcemap tree we're currently loading.
// The root `minifiedTransformedMap` is depth 0, and its source children are depth 1, etc.
console.assert(ctx.depth === 1);
return transformedMap;
}
// Loader will be called to load transformedMap's source file pointers as well.
console.assert(file === 'helloworld.js');
// `transformed.js`'s sourcemap points into `helloworld.js`.
console.assert(ctx.importer === 'transformed.js');
// This is a source child of `transformed`, which is a source child of `minifiedTransformedMap`.
console.assert(ctx.depth === 2);
return null;
}
);
console.log(remapped);
// {
// file: 'transpiled.min.js',
// mappings: 'AAEE',
// sources: ['helloworld.js'],
// version: 3,
// };
```
In this example, `loader` will be called twice:
1. `"transformed.js"`, the first source file pointer in the `minifiedTransformedMap`. We return the
associated sourcemap for it (its a transformed file, after all) so that sourcemap locations can
be traced through it into the source files it represents.
2. `"helloworld.js"`, our original, unmodified source code. This file does not have a sourcemap, so
we return `null`.
The `remapped` sourcemap now points from `transformed.min.js` into locations in `helloworld.js`. If
you were to read the `mappings`, it says "0th column of the first line output line points to the 1st
column of the 2nd line of the file `helloworld.js`".
### Multiple transformations of a file
As a convenience, if you have multiple single-source transformations of a file, you may pass an
array of sourcemap files in the order of most-recent transformation sourcemap first. Note that this
changes the `importer` and `depth` of each call to our loader. So our above example could have been
written as:
```js
const remapped = remapping(
[minifiedTransformedMap, transformedMap],
() => null
);
console.log(remapped);
// {
// file: 'transpiled.min.js',
// mappings: 'AAEE',
// sources: ['helloworld.js'],
// version: 3,
// };
```
### Advanced control of the loading graph
#### `source`
The `source` property can overridden to any value to change the location of the current load. Eg,
for an original source file, it allows us to change the location to the original source regardless
of what the sourcemap source entry says. And for transformed files, it allows us to change the
relative resolving location for child sources of the loaded sourcemap.
```js
const remapped = remapping(
minifiedTransformedMap,
(file, ctx) => {
if (file === 'transformed.js') {
// We pretend the transformed.js file actually exists in the 'src/' directory. When the nested
// source files are loaded, they will now be relative to `src/`.
ctx.source = 'src/transformed.js';
return transformedMap;
}
console.assert(file === 'src/helloworld.js');
// We could futher change the source of this original file, eg, to be inside a nested directory
// itself. This will be reflected in the remapped sourcemap.
ctx.source = 'src/nested/transformed.js';
return null;
}
);
console.log(remapped);
// {
// …,
// sources: ['src/nested/helloworld.js'],
// };
```
#### `content`
The `content` property can be overridden when we encounter an original source file. Eg, this allows
you to manually provide the source content of the original file regardless of whether the
`sourcesContent` field is present in the parent sourcemap. It can also be set to `null` to remove
the source content.
```js
const remapped = remapping(
minifiedTransformedMap,
(file, ctx) => {
if (file === 'transformed.js') {
// transformedMap does not include a `sourcesContent` field, so usually the remapped sourcemap
// would not include any `sourcesContent` values.
return transformedMap;
}
console.assert(file === 'helloworld.js');
// We can read the file to provide the source content.
ctx.content = fs.readFileSync(file, 'utf8');
return null;
}
);
console.log(remapped);
// {
// …,
// sourcesContent: [
// 'console.log("Hello world!")',
// ],
// };
```
### Options
#### excludeContent
By default, `excludeContent` is `false`. Passing `{ excludeContent: true }` will exclude the
`sourcesContent` field from the returned sourcemap. This is mainly useful when you want to reduce
the size out the sourcemap.
#### decodedMappings
By default, `decodedMappings` is `false`. Passing `{ decodedMappings: true }` will leave the
`mappings` field in a [decoded state](https://github.com/rich-harris/sourcemap-codec) instead of
encoding into a VLQ string.

View File

@@ -0,0 +1,62 @@
{
"name": "@ampproject/remapping",
"version": "2.1.2",
"description": "Remap sequential sourcemaps through transformations to point at the original source code",
"keywords": [
"source",
"map",
"remap"
],
"main": "dist/remapping.umd.js",
"module": "dist/remapping.mjs",
"typings": "dist/types/remapping.d.ts",
"files": [
"dist"
],
"author": "Justin Ridgewell <jridgewell@google.com>",
"repository": {
"type": "git",
"url": "git+https://github.com/ampproject/remapping.git"
},
"license": "Apache-2.0",
"engines": {
"node": ">=6.0.0"
},
"scripts": {
"build": "run-s -n build:*",
"build:rollup": "rollup -c rollup.config.js",
"build:ts": "tsc --project tsconfig.build.json",
"lint": "run-s -n lint:*",
"lint:prettier": "npm run test:lint:prettier -- --write",
"lint:ts": "npm run test:lint:ts -- --fix",
"prebuild": "rm -rf dist",
"prepublishOnly": "npm run preversion",
"preversion": "run-s test build",
"test": "run-s -n test:lint test:only",
"test:debug": "node --inspect-brk node_modules/.bin/jest --runInBand",
"test:lint": "run-s -n test:lint:*",
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
"test:only": "jest --coverage",
"test:watch": "jest --coverage --watch"
},
"devDependencies": {
"@rollup/plugin-typescript": "8.3.0",
"@types/jest": "27.4.0",
"@typescript-eslint/eslint-plugin": "5.10.2",
"@typescript-eslint/parser": "5.10.2",
"eslint": "8.8.0",
"eslint-config-prettier": "8.3.0",
"jest": "27.4.7",
"jest-config": "27.4.7",
"npm-run-all": "4.1.5",
"prettier": "2.5.1",
"rollup": "2.67.0",
"ts-jest": "27.1.3",
"tslib": "2.3.1",
"typescript": "4.5.5"
},
"dependencies": {
"@jridgewell/trace-mapping": "^0.3.0"
}
}

View File

@@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2019 Meteor Development Group, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,21 @@
# <a href="https://www.apollographql.com/"><img src="https://user-images.githubusercontent.com/841294/53402609-b97a2180-39ba-11e9-8100-812bab86357c.png" height="100" alt="React Apollo"></a>
## React Apollo - Hooks
[![npm version](https://badge.fury.io/js/%40apollo%2Freact-hooks.svg)](https://badge.fury.io/js/%40apollo%2Freact-hooks)
[![Build Status](https://circleci.com/gh/apollographql/react-apollo.svg?style=svg)](https://circleci.com/gh/apollographql/react-apollo)
[![Join the community on Spectrum](https://withspectrum.github.io/badge/badge.svg)](https://spectrum.chat/apollo)
React Apollo `useQuery`, `useLazyQuery`, `useMutation`, `useSubscription` and `useApolloClient` hooks.
### Installation
```
npm install @apollo/react-hooks
```
### Documentation
All Apollo Client documentation, including React Apollo usage articles and helpful recipes, lives on [https://www.apollographql.com/docs/react/](https://www.apollographql.com/docs/react/)
For the React Apollo API reference, visit [https://www.apollographql.com/docs/react/api/react-apollo.html](https://www.apollographql.com/docs/react/api/react-apollo.html)

View File

@@ -0,0 +1 @@
export * from '@apollo/client';

View File

@@ -0,0 +1 @@
export * from '@apollo/client';

View File

@@ -0,0 +1 @@
module.exports = require('@apollo/client');

View File

@@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2021 Apollo Graph, Inc. (Formerly Meteor Development Group, Inc.)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,40 @@
# <a href="https://www.apollographql.com/"><img src="https://user-images.githubusercontent.com/841294/53402609-b97a2180-39ba-11e9-8100-812bab86357c.png" height="100" alt="Apollo Client"></a>
## Apollo Client
[![npm version](https://badge.fury.io/js/%40apollo%2Fclient.svg)](https://badge.fury.io/js/%40apollo%2Fclient)
[![Build Status](https://circleci.com/gh/apollographql/apollo-client.svg?style=svg)](https://circleci.com/gh/apollographql/apollo-client)
[![Join the community](https://img.shields.io/discourse/status?label=Join%20the%20community&server=https%3A%2F%2Fcommunity.apollographql.com)](https://community.apollographql.com)
Apollo Client is a fully-featured caching GraphQL client with integrations for React, Angular, and more. It allows you to easily build UI components that fetch data via GraphQL.
## Documentation
All Apollo Client documentation, including React integration articles and helpful recipes, can be found at: <br/>
[https://www.apollographql.com/docs/react/](https://www.apollographql.com/docs/react/)
The Apollo Client API reference can be found at: <br/>
[https://www.apollographql.com/docs/react/api/apollo-client/](https://www.apollographql.com/docs/react/api/apollo-client/)
Learn how to use Apollo Client with self-paced hands-on training on Odyssey, Apollo's official learning platform: <br/>
[https://odyssey.apollographql.com/](https://odyssey.apollographql.com/)
## Maintainers
- [@benjamn](https://github.com/benjamn) (Apollo)
- [@brainkim](https://github.com/brainkim) (Apollo)
- [@jcreighton](https://github.com/jcreighton) (Netflix)
- [@hwillson](https://github.com/hwillson) (Apollo)
## Who is Apollo?
[Apollo](https://apollographql.com/) builds open-source software and a graph platform to unify GraphQL across your apps and services. We help you ship faster with:
* [Apollo Studio](https://www.apollographql.com/studio/develop/) A free, end-to-end platform for managing your GraphQL lifecycle. Track your GraphQL schemas in a hosted registry to create a source of truth for everything in your graph. Studio provides an IDE (Apollo Explorer) so you can explore data, collaborate on queries, observe usage, and safely make schema changes.
* [Apollo Federation](https://www.apollographql.com/apollo-federation) The industry-standard open architecture for building a distributed graph. Use Apollos gateway to compose a unified graph from multiple subgraphs, determine a query plan, and route requests across your services.
* [Apollo Client](https://www.apollographql.com/apollo-client/) The most popular GraphQL client for the web. Apollo also builds and maintains [Apollo iOS](https://github.com/apollographql/apollo-ios) and [Apollo Android](https://github.com/apollographql/apollo-android).
* [Apollo Server](https://www.apollographql.com/docs/apollo-server/) A production-ready JavaScript GraphQL server that connects to any microservice, API, or database. Compatible with all popular JavaScript frameworks and deployable in serverless environments.
## Learn how to build with Apollo
Check out the [Odyssey](https://odyssey.apollographql.com/) learning platform, the perfect place to start your GraphQL journey with videos and interactive code challenges. Join the [Apollo Community](https://community.apollographql.com/) to interact with and get technical help from the GraphQL community.

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,32 @@
import { DocumentNode } from 'graphql';
import { StoreObject, Reference } from '../../utilities';
import { DataProxy } from './types/DataProxy';
import { Cache } from './types/Cache';
export declare type Transaction<T> = (c: ApolloCache<T>) => void;
export declare abstract class ApolloCache<TSerialized> implements DataProxy {
abstract read<TData = any, TVariables = any>(query: Cache.ReadOptions<TVariables, TData>): TData | null;
abstract write<TData = any, TVariables = any>(write: Cache.WriteOptions<TData, TVariables>): Reference | undefined;
abstract diff<T>(query: Cache.DiffOptions): Cache.DiffResult<T>;
abstract watch<TData = any, TVariables = any>(watch: Cache.WatchOptions<TData, TVariables>): () => void;
abstract reset(options?: Cache.ResetOptions): Promise<void>;
abstract evict(options: Cache.EvictOptions): boolean;
abstract restore(serializedState: TSerialized): ApolloCache<TSerialized>;
abstract extract(optimistic?: boolean): TSerialized;
abstract removeOptimistic(id: string): void;
batch<U>(options: Cache.BatchOptions<this, U>): U;
abstract performTransaction(transaction: Transaction<TSerialized>, optimisticId?: string | null): void;
recordOptimisticTransaction(transaction: Transaction<TSerialized>, optimisticId: string): void;
transformDocument(document: DocumentNode): DocumentNode;
identify(object: StoreObject | Reference): string | undefined;
gc(): string[];
modify(options: Cache.ModifyOptions): boolean;
transformForLink(document: DocumentNode): DocumentNode;
readQuery<QueryType, TVariables = any>(options: Cache.ReadQueryOptions<QueryType, TVariables>, optimistic?: boolean): QueryType | null;
private getFragmentDoc;
readFragment<FragmentType, TVariables = any>(options: Cache.ReadFragmentOptions<FragmentType, TVariables>, optimistic?: boolean): FragmentType | null;
writeQuery<TData = any, TVariables = any>({ id, data, ...options }: Cache.WriteQueryOptions<TData, TVariables>): Reference | undefined;
writeFragment<TData = any, TVariables = any>({ id, data, fragment, fragmentName, ...options }: Cache.WriteFragmentOptions<TData, TVariables>): Reference | undefined;
updateQuery<TData = any, TVariables = any>(options: Cache.UpdateQueryOptions<TData, TVariables>, update: (data: TData | null) => TData | null | void): TData | null;
updateFragment<TData = any, TVariables = any>(options: Cache.UpdateFragmentOptions<TData, TVariables>, update: (data: TData | null) => TData | null | void): TData | null;
}
//# sourceMappingURL=cache.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"cache.d.ts","sourceRoot":"","sources":["../../../src/cache/core/cache.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,SAAS,CAAC;AAGvC,OAAO,EACL,WAAW,EACX,SAAS,EAEV,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AAEtC,oBAAY,WAAW,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,CAAC,KAAK,IAAI,CAAC;AAEzD,8BAAsB,WAAW,CAAC,WAAW,CAAE,YAAW,SAAS;aAGjD,IAAI,CAAC,KAAK,GAAG,GAAG,EAAE,UAAU,GAAG,GAAG,EAChD,KAAK,EAAE,KAAK,CAAC,WAAW,CAAC,UAAU,EAAE,KAAK,CAAC,GAC1C,KAAK,GAAG,IAAI;aACC,KAAK,CAAC,KAAK,GAAG,GAAG,EAAE,UAAU,GAAG,GAAG,EACjD,KAAK,EAAE,KAAK,CAAC,YAAY,CAAC,KAAK,EAAE,UAAU,CAAC,GAC3C,SAAS,GAAG,SAAS;aACR,IAAI,CAAC,CAAC,EAAE,KAAK,EAAE,KAAK,CAAC,WAAW,GAAG,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC;aACtD,KAAK,CAAC,KAAK,GAAG,GAAG,EAAE,UAAU,GAAG,GAAG,EACjD,KAAK,EAAE,KAAK,CAAC,YAAY,CAAC,KAAK,EAAE,UAAU,CAAC,GAC3C,MAAM,IAAI;aAIG,KAAK,CAAC,OAAO,CAAC,EAAE,KAAK,CAAC,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC;aAOlD,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,YAAY,GAAG,OAAO;aAU3C,OAAO,CACrB,eAAe,EAAE,WAAW,GAC3B,WAAW,CAAC,WAAW,CAAC;aAKX,OAAO,CAAC,UAAU,CAAC,EAAE,OAAO,GAAG,WAAW;aAI1C,gBAAgB,CAAC,EAAE,EAAE,MAAM,GAAG,IAAI;IAS3C,KAAK,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC;aAYxC,kBAAkB,CAChC,WAAW,EAAE,WAAW,CAAC,WAAW,CAAC,EAQrC,YAAY,CAAC,EAAE,MAAM,GAAG,IAAI,GAC3B,IAAI;IAEA,2BAA2B,CAChC,WAAW,EAAE,WAAW,CAAC,WAAW,CAAC,EACrC,YAAY,EAAE,MAAM;IAOf,iBAAiB,CAAC,QAAQ,EAAE,YAAY,GAAG,YAAY;IAIvD,QAAQ,CAAC,MAAM,EAAE,WAAW,GAAG,SAAS,GAAG,MAAM,GAAG,SAAS;IAI7D,EAAE,IAAI,MAAM,EAAE;IAId,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,aAAa,GAAG,OAAO;IAM7C,gBAAgB,CAAC,QAAQ,EAAE,YAAY,GAAG,YAAY;IAUtD,SAAS,CAAC,SAAS,EAAE,UAAU,GAAG,GAAG,EAC1C,OAAO,EAAE,KAAK,CAAC,gBAAgB,CAAC,SAAS,EAAE,UAAU,CAAC,EACtD,UAAU,UAAuB,GAChC,SAAS,GAAG,IAAI;IAUnB,OAAO,CAAC,cAAc,CAAkC;IAEjD,YAAY,CAAC,YAAY,EAAE,UAAU,GAAG,GAAG,EAChD,OAAO,EAAE,KAAK,CAAC,mBAAmB,CAAC,YAAY,EAAE,UAAU,CAAC,EAC5D,UAAU,UAAuB,GAChC,YAAY,GAAG,IAAI;IASf,UAAU,CAAC,KAAK,GAAG,GAAG,EAAE,UAAU,GAAG,GAAG,EAAE,EAC/C,EAAE,EACF,IAAI,EACJ,GAAG,OAAO,EACX,EAAE,KAAK,CAAC,iBAAiB,CAAC,KAAK,EAAE,UAAU,CAAC,GAAG,SAAS,GAAG,SAAS;IAO9D,aAAa,CAAC,KAAK,GAAG,GAAG,EAAE,UAAU,GAAG,GAAG,EAAE,EAClD,EAAE,EACF,IAAI,EACJ,QAAQ,EACR,YAAY,EACZ,GAAG,OAAO,EACX,EAAE,KAAK,CAAC,oBAAoB,CAAC,KAAK,EAAE,UAAU,CAAC,GAAG,SAAS,GAAG,SAAS;IAQjE,WAAW,CAAC,KAAK,GAAG,GAAG,EAAE,UAAU,GAAG,GAAG,EAC9C,OAAO,EAAE,KAAK,CAAC,kBAAkB,CAAC,KAAK,EAAE,UAAU,CAAC,EACpD,MAAM,EAAE,CAAC,IAAI,EAAE,KAAK,GAAG,IAAI,KAAK,KAAK,GAAG,IAAI,GAAG,IAAI,GAClD,KAAK,GAAG,IAAI;IAYR,cAAc,CAAC,KAAK,GAAG,GAAG,EAAE,UAAU,GAAG,GAAG,EACjD,OAAO,EAAE,KAAK,CAAC,qBAAqB,CAAC,KAAK,EAAE,UAAU,CAAC,EACvD,MAAM,EAAE,CAAC,IAAI,EAAE,KAAK,GAAG,IAAI,KAAK,KAAK,GAAG,IAAI,GAAG,IAAI,GAClD,KAAK,GAAG,IAAI;CAWhB"}

View File

@@ -0,0 +1,84 @@
import { __assign, __rest } from "tslib";
import { wrap } from 'optimism';
import { getFragmentQueryDocument, } from "../../utilities/index.js";
var ApolloCache = (function () {
function ApolloCache() {
this.getFragmentDoc = wrap(getFragmentQueryDocument);
}
ApolloCache.prototype.batch = function (options) {
var _this = this;
var optimisticId = typeof options.optimistic === "string" ? options.optimistic :
options.optimistic === false ? null : void 0;
var updateResult;
this.performTransaction(function () { return updateResult = options.update(_this); }, optimisticId);
return updateResult;
};
ApolloCache.prototype.recordOptimisticTransaction = function (transaction, optimisticId) {
this.performTransaction(transaction, optimisticId);
};
ApolloCache.prototype.transformDocument = function (document) {
return document;
};
ApolloCache.prototype.identify = function (object) {
return;
};
ApolloCache.prototype.gc = function () {
return [];
};
ApolloCache.prototype.modify = function (options) {
return false;
};
ApolloCache.prototype.transformForLink = function (document) {
return document;
};
ApolloCache.prototype.readQuery = function (options, optimistic) {
if (optimistic === void 0) { optimistic = !!options.optimistic; }
return this.read(__assign(__assign({}, options), { rootId: options.id || 'ROOT_QUERY', optimistic: optimistic }));
};
ApolloCache.prototype.readFragment = function (options, optimistic) {
if (optimistic === void 0) { optimistic = !!options.optimistic; }
return this.read(__assign(__assign({}, options), { query: this.getFragmentDoc(options.fragment, options.fragmentName), rootId: options.id, optimistic: optimistic }));
};
ApolloCache.prototype.writeQuery = function (_a) {
var id = _a.id, data = _a.data, options = __rest(_a, ["id", "data"]);
return this.write(Object.assign(options, {
dataId: id || 'ROOT_QUERY',
result: data,
}));
};
ApolloCache.prototype.writeFragment = function (_a) {
var id = _a.id, data = _a.data, fragment = _a.fragment, fragmentName = _a.fragmentName, options = __rest(_a, ["id", "data", "fragment", "fragmentName"]);
return this.write(Object.assign(options, {
query: this.getFragmentDoc(fragment, fragmentName),
dataId: id,
result: data,
}));
};
ApolloCache.prototype.updateQuery = function (options, update) {
return this.batch({
update: function (cache) {
var value = cache.readQuery(options);
var data = update(value);
if (data === void 0 || data === null)
return value;
cache.writeQuery(__assign(__assign({}, options), { data: data }));
return data;
},
});
};
ApolloCache.prototype.updateFragment = function (options, update) {
return this.batch({
update: function (cache) {
var value = cache.readFragment(options);
var data = update(value);
if (data === void 0 || data === null)
return value;
cache.writeFragment(__assign(__assign({}, options), { data: data }));
return data;
},
});
};
return ApolloCache;
}());
export { ApolloCache };
//# sourceMappingURL=cache.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,55 @@
import { DataProxy } from './DataProxy';
import { Modifier, Modifiers } from './common';
import { ApolloCache } from '../cache';
export declare namespace Cache {
type WatchCallback<TData = any> = (diff: Cache.DiffResult<TData>, lastDiff?: Cache.DiffResult<TData>) => void;
interface ReadOptions<TVariables = any, TData = any> extends DataProxy.Query<TVariables, TData> {
rootId?: string;
previousResult?: any;
optimistic: boolean;
returnPartialData?: boolean;
canonizeResults?: boolean;
}
interface WriteOptions<TResult = any, TVariables = any> extends Omit<DataProxy.Query<TVariables, TResult>, "id">, Omit<DataProxy.WriteOptions<TResult>, "data"> {
dataId?: string;
result: TResult;
}
interface DiffOptions<TData = any, TVariables = any> extends ReadOptions<TVariables, TData> {
}
interface WatchOptions<TData = any, TVariables = any> extends ReadOptions<TVariables, TData> {
watcher?: object;
immediate?: boolean;
callback: WatchCallback<TData>;
lastDiff?: DiffResult<TData>;
}
interface EvictOptions {
id?: string;
fieldName?: string;
args?: Record<string, any>;
broadcast?: boolean;
}
interface ResetOptions {
discardWatches?: boolean;
}
interface ModifyOptions {
id?: string;
fields: Modifiers | Modifier<any>;
optimistic?: boolean;
broadcast?: boolean;
}
interface BatchOptions<TCache extends ApolloCache<any>, TUpdateResult = void> {
update(cache: TCache): TUpdateResult;
optimistic?: string | boolean;
removeOptimistic?: string;
onWatchUpdated?: (this: TCache, watch: Cache.WatchOptions, diff: Cache.DiffResult<any>, lastDiff: Cache.DiffResult<any> | undefined) => any;
}
export import DiffResult = DataProxy.DiffResult;
export import ReadQueryOptions = DataProxy.ReadQueryOptions;
export import ReadFragmentOptions = DataProxy.ReadFragmentOptions;
export import WriteQueryOptions = DataProxy.WriteQueryOptions;
export import WriteFragmentOptions = DataProxy.WriteFragmentOptions;
export import UpdateQueryOptions = DataProxy.UpdateQueryOptions;
export import UpdateFragmentOptions = DataProxy.UpdateFragmentOptions;
export import Fragment = DataProxy.Fragment;
}
//# sourceMappingURL=Cache.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"Cache.d.ts","sourceRoot":"","sources":["../../../../src/cache/core/types/Cache.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,UAAU,CAAC;AAC/C,OAAO,EAAE,WAAW,EAAE,MAAM,UAAU,CAAC;AAEvC,yBAAiB,KAAK,CAAC;IACrB,KAAY,aAAa,CAAC,KAAK,GAAG,GAAG,IAAI,CACvC,IAAI,EAAE,KAAK,CAAC,UAAU,CAAC,KAAK,CAAC,EAC7B,QAAQ,CAAC,EAAE,KAAK,CAAC,UAAU,CAAC,KAAK,CAAC,KAC/B,IAAI,CAAC;IAEV,UAAiB,WAAW,CAAC,UAAU,GAAG,GAAG,EAAE,KAAK,GAAG,GAAG,CACxD,SAAQ,SAAS,CAAC,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC;QAC1C,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,cAAc,CAAC,EAAE,GAAG,CAAC;QACrB,UAAU,EAAE,OAAO,CAAC;QACpB,iBAAiB,CAAC,EAAE,OAAO,CAAC;QAC5B,eAAe,CAAC,EAAE,OAAO,CAAC;KAC3B;IAED,UAAiB,YAAY,CAAC,OAAO,GAAG,GAAG,EAAE,UAAU,GAAG,GAAG,CAC3D,SAAQ,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,EAChD,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;QAErD,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,MAAM,EAAE,OAAO,CAAC;KACjB;IAED,UAAiB,WAAW,CAC1B,KAAK,GAAG,GAAG,EACX,UAAU,GAAG,GAAG,CAChB,SAAQ,WAAW,CAAC,UAAU,EAAE,KAAK,CAAC;KAIvC;IAED,UAAiB,YAAY,CAC3B,KAAK,GAAG,GAAG,EACX,UAAU,GAAG,GAAG,CAChB,SAAQ,WAAW,CAAC,UAAU,EAAE,KAAK,CAAC;QACtC,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,SAAS,CAAC,EAAE,OAAO,CAAC;QACpB,QAAQ,EAAE,aAAa,CAAC,KAAK,CAAC,CAAC;QAC/B,QAAQ,CAAC,EAAE,UAAU,CAAC,KAAK,CAAC,CAAC;KAC9B;IAED,UAAiB,YAAY;QAC3B,EAAE,CAAC,EAAE,MAAM,CAAC;QACZ,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAC3B,SAAS,CAAC,EAAE,OAAO,CAAC;KACrB;IAID,UAAiB,YAAY;QAC3B,cAAc,CAAC,EAAE,OAAO,CAAC;KAC1B;IAED,UAAiB,aAAa;QAC5B,EAAE,CAAC,EAAE,MAAM,CAAC;QACZ,MAAM,EAAE,SAAS,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC;QAClC,UAAU,CAAC,EAAE,OAAO,CAAC;QACrB,SAAS,CAAC,EAAE,OAAO,CAAC;KACrB;IAED,UAAiB,YAAY,CAC3B,MAAM,SAAS,WAAW,CAAC,GAAG,CAAC,EAC/B,aAAa,GAAG,IAAI;QAIpB,MAAM,CAAC,KAAK,EAAE,MAAM,GAAG,aAAa,CAAC;QASrC,UAAU,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;QAQ9B,gBAAgB,CAAC,EAAE,MAAM,CAAC;QAK1B,cAAc,CAAC,EAAE,CACf,IAAI,EAAE,MAAM,EACZ,KAAK,EAAE,KAAK,CAAC,YAAY,EACzB,IAAI,EAAE,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,EAC3B,QAAQ,EAAE,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,SAAS,KACxC,GAAG,CAAC;KACV;IAED,MAAM,QAAQ,UAAU,GAAG,SAAS,CAAC,UAAU,CAAC;IAChD,MAAM,QAAQ,gBAAgB,GAAG,SAAS,CAAC,gBAAgB,CAAC;IAC5D,MAAM,QAAQ,mBAAmB,GAAG,SAAS,CAAC,mBAAmB,CAAC;IAClE,MAAM,QAAQ,iBAAiB,GAAG,SAAS,CAAC,iBAAiB,CAAC;IAC9D,MAAM,QAAQ,oBAAoB,GAAG,SAAS,CAAC,oBAAoB,CAAC;IACpE,MAAM,QAAQ,kBAAkB,GAAG,SAAS,CAAC,kBAAkB,CAAC;IAChE,MAAM,QAAQ,qBAAqB,GAAG,SAAS,CAAC,qBAAqB,CAAC;IACtE,MAAM,QAAQ,QAAQ,GAAG,SAAS,CAAC,QAAQ,CAAC;CAC7C"}

View File

@@ -0,0 +1,4 @@
export var Cache;
(function (Cache) {
})(Cache || (Cache = {}));
//# sourceMappingURL=Cache.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"Cache.js","sourceRoot":"","sources":["../../../../src/cache/core/types/Cache.ts"],"names":[],"mappings":"AAIA,MAAM,KAAW,KAAK,CA0GrB;AA1GD,WAAiB,KAAK;AA0GtB,CAAC,EA1GgB,KAAK,KAAL,KAAK,QA0GrB","sourcesContent":["import { DataProxy } from './DataProxy';\nimport { Modifier, Modifiers } from './common';\nimport { ApolloCache } from '../cache';\n\nexport namespace Cache {\n export type WatchCallback<TData = any> = (\n diff: Cache.DiffResult<TData>,\n lastDiff?: Cache.DiffResult<TData>,\n ) => void;\n\n export interface ReadOptions<TVariables = any, TData = any>\n extends DataProxy.Query<TVariables, TData> {\n rootId?: string;\n previousResult?: any;\n optimistic: boolean;\n returnPartialData?: boolean;\n canonizeResults?: boolean;\n }\n\n export interface WriteOptions<TResult = any, TVariables = any>\n extends Omit<DataProxy.Query<TVariables, TResult>, \"id\">,\n Omit<DataProxy.WriteOptions<TResult>, \"data\">\n {\n dataId?: string;\n result: TResult;\n }\n\n export interface DiffOptions<\n TData = any,\n TVariables = any,\n > extends ReadOptions<TVariables, TData> {\n // The DiffOptions interface is currently just an alias for\n // ReadOptions, though DiffOptions used to be responsible for\n // declaring the returnPartialData option.\n }\n\n export interface WatchOptions<\n TData = any,\n TVariables = any,\n > extends ReadOptions<TVariables, TData> {\n watcher?: object;\n immediate?: boolean;\n callback: WatchCallback<TData>;\n lastDiff?: DiffResult<TData>;\n }\n\n export interface EvictOptions {\n id?: string;\n fieldName?: string;\n args?: Record<string, any>;\n broadcast?: boolean;\n }\n\n // Although you can call cache.reset() without options, its behavior can be\n // configured by passing a Cache.ResetOptions object.\n export interface ResetOptions {\n discardWatches?: boolean;\n }\n\n export interface ModifyOptions {\n id?: string;\n fields: Modifiers | Modifier<any>;\n optimistic?: boolean;\n broadcast?: boolean;\n }\n\n export interface BatchOptions<\n TCache extends ApolloCache<any>,\n TUpdateResult = void,\n > {\n // Same as the first parameter of performTransaction, except the cache\n // argument will have the subclass type rather than ApolloCache.\n update(cache: TCache): TUpdateResult;\n\n // Passing a string for this option creates a new optimistic layer, with the\n // given string as its layer.id, just like passing a string for the\n // optimisticId parameter of performTransaction. Passing true is the same as\n // passing undefined to performTransaction (running the batch operation\n // against the current top layer of the cache), and passing false is the\n // same as passing null (running the operation against root/non-optimistic\n // cache data).\n optimistic?: string | boolean;\n\n // If you specify the ID of an optimistic layer using this option, that\n // layer will be removed as part of the batch transaction, triggering at\n // most one broadcast for both the transaction and the removal of the layer.\n // Note: this option is needed because calling cache.removeOptimistic during\n // the transaction function may not be not safe, since any modifications to\n // cache layers may be discarded after the transaction finishes.\n removeOptimistic?: string;\n\n // If you want to find out which watched queries were invalidated during\n // this batch operation, pass this optional callback function. Returning\n // false from the callback will prevent broadcasting this result.\n onWatchUpdated?: (\n this: TCache,\n watch: Cache.WatchOptions,\n diff: Cache.DiffResult<any>,\n lastDiff: Cache.DiffResult<any> | undefined,\n ) => any;\n }\n\n export import DiffResult = DataProxy.DiffResult;\n export import ReadQueryOptions = DataProxy.ReadQueryOptions;\n export import ReadFragmentOptions = DataProxy.ReadFragmentOptions;\n export import WriteQueryOptions = DataProxy.WriteQueryOptions;\n export import WriteFragmentOptions = DataProxy.WriteFragmentOptions;\n export import UpdateQueryOptions = DataProxy.UpdateQueryOptions;\n export import UpdateFragmentOptions = DataProxy.UpdateFragmentOptions;\n export import Fragment = DataProxy.Fragment;\n}\n"]}

View File

@@ -0,0 +1,52 @@
import { DocumentNode } from 'graphql';
import { TypedDocumentNode } from '@graphql-typed-document-node/core';
import { MissingFieldError } from './common';
export declare namespace DataProxy {
interface Query<TVariables, TData> {
query: DocumentNode | TypedDocumentNode<TData, TVariables>;
variables?: TVariables;
id?: string;
}
interface Fragment<TVariables, TData> {
id?: string;
fragment: DocumentNode | TypedDocumentNode<TData, TVariables>;
fragmentName?: string;
variables?: TVariables;
}
interface ReadQueryOptions<TData, TVariables> extends Query<TVariables, TData> {
returnPartialData?: boolean;
optimistic?: boolean;
canonizeResults?: boolean;
}
interface ReadFragmentOptions<TData, TVariables> extends Fragment<TVariables, TData> {
returnPartialData?: boolean;
optimistic?: boolean;
canonizeResults?: boolean;
}
interface WriteOptions<TData> {
data: TData;
broadcast?: boolean;
overwrite?: boolean;
}
interface WriteQueryOptions<TData, TVariables> extends Query<TVariables, TData>, WriteOptions<TData> {
}
interface WriteFragmentOptions<TData, TVariables> extends Fragment<TVariables, TData>, WriteOptions<TData> {
}
interface UpdateQueryOptions<TData, TVariables> extends Omit<(ReadQueryOptions<TData, TVariables> & WriteQueryOptions<TData, TVariables>), 'data'> {
}
interface UpdateFragmentOptions<TData, TVariables> extends Omit<(ReadFragmentOptions<TData, TVariables> & WriteFragmentOptions<TData, TVariables>), 'data'> {
}
type DiffResult<T> = {
result?: T;
complete?: boolean;
missing?: MissingFieldError[];
fromOptimisticTransaction?: boolean;
};
}
export interface DataProxy {
readQuery<QueryType, TVariables = any>(options: DataProxy.ReadQueryOptions<QueryType, TVariables>, optimistic?: boolean): QueryType | null;
readFragment<FragmentType, TVariables = any>(options: DataProxy.ReadFragmentOptions<FragmentType, TVariables>, optimistic?: boolean): FragmentType | null;
writeQuery<TData = any, TVariables = any>(options: DataProxy.WriteQueryOptions<TData, TVariables>): void;
writeFragment<TData = any, TVariables = any>(options: DataProxy.WriteFragmentOptions<TData, TVariables>): void;
}
//# sourceMappingURL=DataProxy.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"DataProxy.d.ts","sourceRoot":"","sources":["../../../../src/cache/core/types/DataProxy.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,SAAS,CAAC;AACvC,OAAO,EAAE,iBAAiB,EAAE,MAAM,mCAAmC,CAAC;AAEtE,OAAO,EAAE,iBAAiB,EAAE,MAAM,UAAU,CAAC;AAE7C,yBAAiB,SAAS,CAAC;IACzB,UAAiB,KAAK,CAAC,UAAU,EAAE,KAAK;QAMtC,KAAK,EAAE,YAAY,GAAG,iBAAiB,CAAC,KAAK,EAAE,UAAU,CAAC,CAAC;QAK3D,SAAS,CAAC,EAAE,UAAU,CAAC;QAOvB,EAAE,CAAC,EAAE,MAAM,CAAC;KACb;IAED,UAAiB,QAAQ,CAAC,UAAU,EAAE,KAAK;QAMzC,EAAE,CAAC,EAAE,MAAM,CAAC;QAQZ,QAAQ,EAAE,YAAY,GAAG,iBAAiB,CAAC,KAAK,EAAE,UAAU,CAAC,CAAC;QAO9D,YAAY,CAAC,EAAE,MAAM,CAAC;QAKtB,SAAS,CAAC,EAAE,UAAU,CAAC;KACxB;IAED,UAAiB,gBAAgB,CAAC,KAAK,EAAE,UAAU,CACjD,SAAQ,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC;QAKhC,iBAAiB,CAAC,EAAE,OAAO,CAAC;QAM5B,UAAU,CAAC,EAAE,OAAO,CAAC;QAMrB,eAAe,CAAC,EAAE,OAAO,CAAC;KAC3B;IAED,UAAiB,mBAAmB,CAAC,KAAK,EAAE,UAAU,CACpD,SAAQ,QAAQ,CAAC,UAAU,EAAE,KAAK,CAAC;QAKnC,iBAAiB,CAAC,EAAE,OAAO,CAAC;QAM5B,UAAU,CAAC,EAAE,OAAO,CAAC;QAMrB,eAAe,CAAC,EAAE,OAAO,CAAC;KAC3B;IAED,UAAiB,YAAY,CAAC,KAAK;QAIjC,IAAI,EAAE,KAAK,CAAC;QAIZ,SAAS,CAAC,EAAE,OAAO,CAAC;QAKpB,SAAS,CAAC,EAAE,OAAO,CAAC;KACrB;IAED,UAAiB,iBAAiB,CAAC,KAAK,EAAE,UAAU,CAClD,SAAQ,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,EAAE,YAAY,CAAC,KAAK,CAAC;KAAG;IAE1D,UAAiB,oBAAoB,CAAC,KAAK,EAAE,UAAU,CACrD,SAAQ,QAAQ,CAAC,UAAU,EAAE,KAAK,CAAC,EAAE,YAAY,CAAC,KAAK,CAAC;KAAG;IAE7D,UAAiB,kBAAkB,CAAC,KAAK,EAAE,UAAU,CACnD,SAAQ,IAAI,CAAC,CACX,gBAAgB,CAAC,KAAK,EAAE,UAAU,CAAC,GACnC,iBAAiB,CAAC,KAAK,EAAE,UAAU,CAAC,CACrC,EAAE,MAAM,CAAC;KAAG;IAEf,UAAiB,qBAAqB,CAAC,KAAK,EAAE,UAAU,CACtD,SAAQ,IAAI,CAAC,CACX,mBAAmB,CAAC,KAAK,EAAE,UAAU,CAAC,GACtC,oBAAoB,CAAC,KAAK,EAAE,UAAU,CAAC,CACxC,EAAE,MAAM,CAAC;KAAG;IAEf,KAAY,UAAU,CAAC,CAAC,IAAI;QAC1B,MAAM,CAAC,EAAE,CAAC,CAAC;QACX,QAAQ,CAAC,EAAE,OAAO,CAAC;QACnB,OAAO,CAAC,EAAE,iBAAiB,EAAE,CAAC;QAC9B,yBAAyB,CAAC,EAAE,OAAO,CAAC;KACrC,CAAA;CACF;AAQD,MAAM,WAAW,SAAS;IAIxB,SAAS,CAAC,SAAS,EAAE,UAAU,GAAG,GAAG,EACnC,OAAO,EAAE,SAAS,CAAC,gBAAgB,CAAC,SAAS,EAAE,UAAU,CAAC,EAC1D,UAAU,CAAC,EAAE,OAAO,GACnB,SAAS,GAAG,IAAI,CAAC;IAOpB,YAAY,CAAC,YAAY,EAAE,UAAU,GAAG,GAAG,EACzC,OAAO,EAAE,SAAS,CAAC,mBAAmB,CAAC,YAAY,EAAE,UAAU,CAAC,EAChE,UAAU,CAAC,EAAE,OAAO,GACnB,YAAY,GAAG,IAAI,CAAC;IAKvB,UAAU,CAAC,KAAK,GAAG,GAAG,EAAE,UAAU,GAAG,GAAG,EACtC,OAAO,EAAE,SAAS,CAAC,iBAAiB,CAAC,KAAK,EAAE,UAAU,CAAC,GACtD,IAAI,CAAC;IAOR,aAAa,CAAC,KAAK,GAAG,GAAG,EAAE,UAAU,GAAG,GAAG,EACzC,OAAO,EAAE,SAAS,CAAC,oBAAoB,CAAC,KAAK,EAAE,UAAU,CAAC,GACzD,IAAI,CAAC;CACT"}

View File

@@ -0,0 +1,2 @@
export {};
//# sourceMappingURL=DataProxy.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,45 @@
import { DocumentNode, FieldNode } from 'graphql';
import { Reference, StoreObject, StoreValue, isReference } from '../../../utilities';
import { StorageType } from '../../inmemory/policies';
export declare type SafeReadonly<T> = T extends object ? Readonly<T> : T;
export declare type MissingTree = string | {
readonly [key: string]: MissingTree;
};
export declare class MissingFieldError {
readonly message: string;
readonly path: MissingTree | Array<string | number>;
readonly query: DocumentNode;
readonly variables?: Record<string, any> | undefined;
constructor(message: string, path: MissingTree | Array<string | number>, query: DocumentNode, variables?: Record<string, any> | undefined);
}
export interface FieldSpecifier {
typename?: string;
fieldName: string;
field?: FieldNode;
args?: Record<string, any>;
variables?: Record<string, any>;
}
export interface ReadFieldOptions extends FieldSpecifier {
from?: StoreObject | Reference;
}
export interface ReadFieldFunction {
<V = StoreValue>(options: ReadFieldOptions): SafeReadonly<V> | undefined;
<V = StoreValue>(fieldName: string, from?: StoreObject | Reference): SafeReadonly<V> | undefined;
}
export declare type ToReferenceFunction = (objOrIdOrRef: StoreObject | string | Reference, mergeIntoStore?: boolean) => Reference | undefined;
export declare type CanReadFunction = (value: StoreValue) => boolean;
export declare type Modifier<T> = (value: T, details: {
DELETE: any;
INVALIDATE: any;
fieldName: string;
storeFieldName: string;
readField: ReadFieldFunction;
canRead: CanReadFunction;
isReference: typeof isReference;
toReference: ToReferenceFunction;
storage: StorageType;
}) => T;
export declare type Modifiers = {
[fieldName: string]: Modifier<any>;
};
//# sourceMappingURL=common.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"common.d.ts","sourceRoot":"","sources":["../../../../src/cache/core/types/common.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,SAAS,EAAE,MAAM,SAAS,CAAC;AAElD,OAAO,EACL,SAAS,EACT,WAAW,EACX,UAAU,EACV,WAAW,EACZ,MAAM,oBAAoB,CAAC;AAE5B,OAAO,EAAE,WAAW,EAAE,MAAM,yBAAyB,CAAC;AAStD,oBAAY,YAAY,CAAC,CAAC,IAAI,CAAC,SAAS,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AAEjE,oBAAY,WAAW,GAAG,MAAM,GAAG;IACjC,QAAQ,EAAE,GAAG,EAAE,MAAM,GAAG,WAAW,CAAC;CACrC,CAAC;AAEF,qBAAa,iBAAiB;aAEV,OAAO,EAAE,MAAM;aACf,IAAI,EAAE,WAAW,GAAG,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;aAC1C,KAAK,EAAE,YAAY;aACnB,SAAS,CAAC;gBAHV,OAAO,EAAE,MAAM,EACf,IAAI,EAAE,WAAW,GAAG,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC,EAC1C,KAAK,EAAE,YAAY,EACnB,SAAS,CAAC,iCAAqB;CAElD;AAED,MAAM,WAAW,cAAc;IAC7B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,KAAK,CAAC,EAAE,SAAS,CAAC;IAClB,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC3B,SAAS,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;CACjC;AAED,MAAM,WAAW,gBAAiB,SAAQ,cAAc;IACtD,IAAI,CAAC,EAAE,WAAW,GAAG,SAAS,CAAC;CAChC;AAED,MAAM,WAAW,iBAAiB;IAChC,CAAC,CAAC,GAAG,UAAU,EAAE,OAAO,EAAE,gBAAgB,GAAG,YAAY,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC;IACzE,CAAC,CAAC,GAAG,UAAU,EACb,SAAS,EAAE,MAAM,EACjB,IAAI,CAAC,EAAE,WAAW,GAAG,SAAS,GAC7B,YAAY,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC;CAChC;AAED,oBAAY,mBAAmB,GAAG,CAChC,YAAY,EAAE,WAAW,GAAG,MAAM,GAAG,SAAS,EAC9C,cAAc,CAAC,EAAE,OAAO,KACrB,SAAS,GAAG,SAAS,CAAC;AAE3B,oBAAY,eAAe,GAAG,CAAC,KAAK,EAAE,UAAU,KAAK,OAAO,CAAC;AAE7D,oBAAY,QAAQ,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,EAAE,OAAO,EAAE;IAC5C,MAAM,EAAE,GAAG,CAAC;IACZ,UAAU,EAAE,GAAG,CAAC;IAChB,SAAS,EAAE,MAAM,CAAC;IAClB,cAAc,EAAE,MAAM,CAAC;IACvB,SAAS,EAAE,iBAAiB,CAAC;IAC7B,OAAO,EAAE,eAAe,CAAC;IACzB,WAAW,EAAE,OAAO,WAAW,CAAC;IAChC,WAAW,EAAE,mBAAmB,CAAC;IACjC,OAAO,EAAE,WAAW,CAAC;CACtB,KAAK,CAAC,CAAC;AAER,oBAAY,SAAS,GAAG;IACtB,CAAC,SAAS,EAAE,MAAM,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC;CACpC,CAAC"}

View File

@@ -0,0 +1,11 @@
var MissingFieldError = (function () {
function MissingFieldError(message, path, query, variables) {
this.message = message;
this.path = path;
this.query = query;
this.variables = variables;
}
return MissingFieldError;
}());
export { MissingFieldError };
//# sourceMappingURL=common.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"common.js","sourceRoot":"","sources":["../../../../src/cache/core/types/common.ts"],"names":[],"mappings":"AAwBA;IACE,2BACkB,OAAe,EACf,IAA0C,EAC1C,KAAmB,EACnB,SAA+B;QAH/B,YAAO,GAAP,OAAO,CAAQ;QACf,SAAI,GAAJ,IAAI,CAAsC;QAC1C,UAAK,GAAL,KAAK,CAAc;QACnB,cAAS,GAAT,SAAS,CAAsB;IAC9C,CAAC;IACN,wBAAC;AAAD,CAAC,AAPD,IAOC","sourcesContent":["import { DocumentNode, FieldNode } from 'graphql';\n\nimport {\n Reference,\n StoreObject,\n StoreValue,\n isReference,\n} from '../../../utilities';\n\nimport { StorageType } from '../../inmemory/policies';\n\n// The Readonly<T> type only really works for object types, since it marks\n// all of the object's properties as readonly, but there are many cases when\n// a generic type parameter like TExisting might be a string or some other\n// primitive type, in which case we need to avoid wrapping it with Readonly.\n// SafeReadonly<string> collapses to just string, which makes string\n// assignable to SafeReadonly<any>, whereas string is not assignable to\n// Readonly<any>, somewhat surprisingly.\nexport type SafeReadonly<T> = T extends object ? Readonly<T> : T;\n\nexport type MissingTree = string | {\n readonly [key: string]: MissingTree;\n};\n\nexport class MissingFieldError {\n constructor(\n public readonly message: string,\n public readonly path: MissingTree | Array<string | number>,\n public readonly query: DocumentNode,\n public readonly variables?: Record<string, any>,\n ) {}\n}\n\nexport interface FieldSpecifier {\n typename?: string;\n fieldName: string;\n field?: FieldNode;\n args?: Record<string, any>;\n variables?: Record<string, any>;\n}\n\nexport interface ReadFieldOptions extends FieldSpecifier {\n from?: StoreObject | Reference;\n}\n\nexport interface ReadFieldFunction {\n <V = StoreValue>(options: ReadFieldOptions): SafeReadonly<V> | undefined;\n <V = StoreValue>(\n fieldName: string,\n from?: StoreObject | Reference,\n ): SafeReadonly<V> | undefined;\n}\n\nexport type ToReferenceFunction = (\n objOrIdOrRef: StoreObject | string | Reference,\n mergeIntoStore?: boolean,\n) => Reference | undefined;\n\nexport type CanReadFunction = (value: StoreValue) => boolean;\n\nexport type Modifier<T> = (value: T, details: {\n DELETE: any;\n INVALIDATE: any;\n fieldName: string;\n storeFieldName: string;\n readField: ReadFieldFunction;\n canRead: CanReadFunction;\n isReference: typeof isReference;\n toReference: ToReferenceFunction;\n storage: StorageType;\n}) => T;\n\nexport type Modifiers = {\n [fieldName: string]: Modifier<any>;\n};\n"]}

View File

@@ -0,0 +1,14 @@
import '../utilities/globals';
export { Transaction, ApolloCache } from './core/cache';
export { Cache } from './core/types/Cache';
export { DataProxy } from './core/types/DataProxy';
export { MissingFieldError, ReadFieldOptions } from './core/types/common';
export { Reference, isReference, makeReference, } from '../utilities';
export { EntityStore } from './inmemory/entityStore';
export { fieldNameFromStoreName, defaultDataIdFromObject, } from './inmemory/helpers';
export { InMemoryCache, } from './inmemory/inMemoryCache';
export { ReactiveVar, makeVar, cacheSlot, } from './inmemory/reactiveVars';
export { TypePolicies, TypePolicy, FieldPolicy, FieldReadFunction, FieldMergeFunction, FieldFunctionOptions, PossibleTypesMap, Policies, } from './inmemory/policies';
export { canonicalStringify, } from './inmemory/object-canon';
export * from './inmemory/types';
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/cache/index.ts"],"names":[],"mappings":"AAAA,OAAO,sBAAsB,CAAC;AAE9B,OAAO,EAAE,WAAW,EAAE,WAAW,EAAE,MAAM,cAAc,CAAC;AACxD,OAAO,EAAE,KAAK,EAAE,MAAM,oBAAoB,CAAC;AAC3C,OAAO,EAAE,SAAS,EAAE,MAAM,wBAAwB,CAAC;AACnD,OAAO,EACL,iBAAiB,EACjB,gBAAgB,EACjB,MAAM,qBAAqB,CAAC;AAE7B,OAAO,EACL,SAAS,EACT,WAAW,EACX,aAAa,GACd,MAAM,cAAc,CAAC;AAEtB,OAAO,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AACrD,OAAO,EACL,sBAAsB,EACtB,uBAAuB,GACxB,MAAM,oBAAoB,CAAA;AAE3B,OAAO,EACL,aAAa,GACd,MAAM,0BAA0B,CAAC;AAElC,OAAO,EACL,WAAW,EACX,OAAO,EACP,SAAS,GACV,MAAM,yBAAyB,CAAC;AAEjC,OAAO,EACL,YAAY,EACZ,UAAU,EACV,WAAW,EACX,iBAAiB,EACjB,kBAAkB,EAClB,oBAAoB,EACpB,gBAAgB,EAChB,QAAQ,GACT,MAAM,qBAAqB,CAAC;AAE7B,OAAO,EACL,kBAAkB,GACnB,MAAM,yBAAyB,CAAC;AAEjC,cAAc,kBAAkB,CAAC"}

View File

@@ -0,0 +1,13 @@
import "../utilities/globals/index.js";
export { ApolloCache } from "./core/cache.js";
export { Cache } from "./core/types/Cache.js";
export { MissingFieldError } from "./core/types/common.js";
export { isReference, makeReference, } from "../utilities/index.js";
export { EntityStore } from "./inmemory/entityStore.js";
export { fieldNameFromStoreName, defaultDataIdFromObject, } from "./inmemory/helpers.js";
export { InMemoryCache, } from "./inmemory/inMemoryCache.js";
export { makeVar, cacheSlot, } from "./inmemory/reactiveVars.js";
export { Policies, } from "./inmemory/policies.js";
export { canonicalStringify, } from "./inmemory/object-canon.js";
export * from "./inmemory/types.js";
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/cache/index.ts"],"names":[],"mappings":"AAAA,OAAO,sBAAsB,CAAC;AAE9B,OAAO,EAAe,WAAW,EAAE,MAAM,cAAc,CAAC;AACxD,OAAO,EAAE,KAAK,EAAE,MAAM,oBAAoB,CAAC;AAE3C,OAAO,EACL,iBAAiB,EAElB,MAAM,qBAAqB,CAAC;AAE7B,OAAO,EAEL,WAAW,EACX,aAAa,GACd,MAAM,cAAc,CAAC;AAEtB,OAAO,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AACrD,OAAO,EACL,sBAAsB,EACtB,uBAAuB,GACxB,MAAM,oBAAoB,CAAA;AAE3B,OAAO,EACL,aAAa,GACd,MAAM,0BAA0B,CAAC;AAElC,OAAO,EAEL,OAAO,EACP,SAAS,GACV,MAAM,yBAAyB,CAAC;AAEjC,OAAO,EAQL,QAAQ,GACT,MAAM,qBAAqB,CAAC;AAE7B,OAAO,EACL,kBAAkB,GACnB,MAAM,yBAAyB,CAAC;AAEjC,cAAc,kBAAkB,CAAC","sourcesContent":["import '../utilities/globals';\n\nexport { Transaction, ApolloCache } from './core/cache';\nexport { Cache } from './core/types/Cache';\nexport { DataProxy } from './core/types/DataProxy';\nexport {\n MissingFieldError,\n ReadFieldOptions\n} from './core/types/common';\n\nexport {\n Reference,\n isReference,\n makeReference,\n} from '../utilities';\n\nexport { EntityStore } from './inmemory/entityStore';\nexport {\n fieldNameFromStoreName,\n defaultDataIdFromObject,\n} from './inmemory/helpers'\n\nexport {\n InMemoryCache,\n} from './inmemory/inMemoryCache';\n\nexport {\n ReactiveVar,\n makeVar,\n cacheSlot,\n} from './inmemory/reactiveVars';\n\nexport {\n TypePolicies,\n TypePolicy,\n FieldPolicy,\n FieldReadFunction,\n FieldMergeFunction,\n FieldFunctionOptions,\n PossibleTypesMap,\n Policies,\n} from './inmemory/policies';\n\nexport {\n canonicalStringify,\n} from './inmemory/object-canon';\n\nexport * from './inmemory/types';\n"]}

View File

@@ -0,0 +1,2 @@
declare const optimism: any;
//# sourceMappingURL=optimism.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"optimism.d.ts","sourceRoot":"","sources":["../../../../src/cache/inmemory/__mocks__/optimism.ts"],"names":[],"mappings":"AAAA,QAAA,MAAM,QAAQ,KAAiC,CAAC"}

View File

@@ -0,0 +1,14 @@
var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var optimism = jest.requireActual('optimism');
module.exports = __assign(__assign({}, optimism), { wrap: jest.fn(optimism.wrap) });
//# sourceMappingURL=optimism.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"optimism.js","sourceRoot":"","sources":["../../../../src/cache/inmemory/__mocks__/optimism.ts"],"names":[],"mappings":";;;;;;;;;;;AAAA,IAAM,QAAQ,GAAG,IAAI,CAAC,aAAa,CAAC,UAAU,CAAC,CAAC;AAChD,MAAM,CAAC,OAAO,yBACT,QAAQ,KACX,IAAI,EAAE,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,GAC7B,CAAC","sourcesContent":["const optimism = jest.requireActual('optimism');\nmodule.exports = {\n ...optimism,\n wrap: jest.fn(optimism.wrap),\n};\n"]}

View File

@@ -0,0 +1,83 @@
import { Trie } from '@wry/trie';
import { StoreValue, StoreObject, Reference } from '../../utilities';
import { NormalizedCache, NormalizedCacheObject } from './types';
import { Policies, StorageType } from './policies';
import { Cache } from '../core/types/Cache';
import { SafeReadonly, Modifier, Modifiers, ToReferenceFunction, CanReadFunction } from '../core/types/common';
export declare abstract class EntityStore implements NormalizedCache {
readonly policies: Policies;
readonly group: CacheGroup;
protected data: NormalizedCacheObject;
constructor(policies: Policies, group: CacheGroup);
abstract addLayer(layerId: string, replay: (layer: EntityStore) => any): Layer;
abstract removeLayer(layerId: string): EntityStore;
toObject(): NormalizedCacheObject;
has(dataId: string): boolean;
get(dataId: string, fieldName: string): StoreValue;
protected lookup(dataId: string, dependOnExistence?: boolean): StoreObject | undefined;
merge(older: string | StoreObject, newer: StoreObject | string): void;
modify(dataId: string, fields: Modifier<any> | Modifiers): boolean;
delete(dataId: string, fieldName?: string, args?: Record<string, any>): boolean;
evict(options: Cache.EvictOptions, limit: EntityStore): boolean;
clear(): void;
extract(): NormalizedCacheObject;
replace(newData: NormalizedCacheObject | null): void;
abstract getStorage(idOrObj: string | StoreObject, ...storeFieldNames: (string | number)[]): StorageType;
private rootIds;
retain(rootId: string): number;
release(rootId: string): number;
getRootIdSet(ids?: Set<string>): Set<string>;
gc(): string[];
private refs;
findChildRefIds(dataId: string): Record<string, true>;
makeCacheKey(...args: any[]): object;
getFieldValue: <T = StoreValue>(objectOrReference: StoreObject | Reference | undefined, storeFieldName: string) => SafeReadonly<T>;
canRead: CanReadFunction;
toReference: ToReferenceFunction;
}
export declare type FieldValueGetter = EntityStore["getFieldValue"];
declare class CacheGroup {
readonly caching: boolean;
private parent;
private d;
keyMaker: Trie<object>;
constructor(caching: boolean, parent?: CacheGroup | null);
resetCaching(): void;
depend(dataId: string, storeFieldName: string): void;
dirty(dataId: string, storeFieldName: string): void;
}
export declare function maybeDependOnExistenceOfEntity(store: NormalizedCache, entityId: string): void;
export declare namespace EntityStore {
class Root extends EntityStore {
constructor({ policies, resultCaching, seed, }: {
policies: Policies;
resultCaching?: boolean;
seed?: NormalizedCacheObject;
});
readonly stump: Stump;
addLayer(layerId: string, replay: (layer: EntityStore) => any): Layer;
removeLayer(): Root;
readonly storageTrie: Trie<StorageType>;
getStorage(): StorageType;
}
}
declare class Layer extends EntityStore {
readonly id: string;
readonly parent: EntityStore;
readonly replay: (layer: EntityStore) => any;
readonly group: CacheGroup;
constructor(id: string, parent: EntityStore, replay: (layer: EntityStore) => any, group: CacheGroup);
addLayer(layerId: string, replay: (layer: EntityStore) => any): Layer;
removeLayer(layerId: string): EntityStore;
toObject(): NormalizedCacheObject;
findChildRefIds(dataId: string): Record<string, true>;
getStorage(): StorageType;
}
declare class Stump extends Layer {
constructor(root: EntityStore.Root);
removeLayer(): this;
merge(): any;
}
export declare function supportsResultCaching(store: any): store is EntityStore;
export {};
//# sourceMappingURL=entityStore.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"entityStore.d.ts","sourceRoot":"","sources":["../../../src/cache/inmemory/entityStore.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AAEjC,OAAO,EAEL,UAAU,EACV,WAAW,EACX,SAAS,EAMV,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,eAAe,EAAE,qBAAqB,EAAE,MAAM,SAAS,CAAC;AAEjE,OAAO,EAAE,QAAQ,EAAE,WAAW,EAAE,MAAM,YAAY,CAAC;AACnD,OAAO,EAAE,KAAK,EAAE,MAAM,qBAAqB,CAAC;AAC5C,OAAO,EACL,YAAY,EACZ,QAAQ,EACR,SAAS,EAET,mBAAmB,EACnB,eAAe,EAChB,MAAM,sBAAsB,CAAC;AAM9B,8BAAsB,WAAY,YAAW,eAAe;aAIxC,QAAQ,EAAE,QAAQ;aAClB,KAAK,EAAE,UAAU;IAJnC,SAAS,CAAC,IAAI,EAAE,qBAAqB,CAAuB;gBAG1C,QAAQ,EAAE,QAAQ,EAClB,KAAK,EAAE,UAAU;aAGnB,QAAQ,CACtB,OAAO,EAAE,MAAM,EACf,MAAM,EAAE,CAAC,KAAK,EAAE,WAAW,KAAK,GAAG,GAClC,KAAK;aAEQ,WAAW,CAAC,OAAO,EAAE,MAAM,GAAG,WAAW;IAMlD,QAAQ,IAAI,qBAAqB;IAIjC,GAAG,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO;IAI5B,GAAG,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,UAAU;IAiBzD,SAAS,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,iBAAiB,CAAC,EAAE,OAAO,GAAG,WAAW,GAAG,SAAS;IAqB/E,KAAK,CACV,KAAK,EAAE,MAAM,GAAG,WAAW,EAC3B,KAAK,EAAE,WAAW,GAAG,MAAM,GAC1B,IAAI;IAyFA,MAAM,CACX,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,QAAQ,CAAC,GAAG,CAAC,GAAG,SAAS,GAChC,OAAO;IAkFH,MAAM,CACX,MAAM,EAAE,MAAM,EACd,SAAS,CAAC,EAAE,MAAM,EAClB,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC;IAerB,KAAK,CACV,OAAO,EAAE,KAAK,CAAC,YAAY,EAC3B,KAAK,EAAE,WAAW,GACjB,OAAO;IAoBH,KAAK,IAAI,IAAI;IAIb,OAAO,IAAI,qBAAqB;IAchC,OAAO,CAAC,OAAO,EAAE,qBAAqB,GAAG,IAAI,GAAG,IAAI;aAiB3C,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,WAAW,EAC7B,GAAG,eAAe,EAAE,CAAC,MAAM,GAAG,MAAM,CAAC,EAAE,GACtC,WAAW;IAKd,OAAO,CAAC,OAAO,CAES;IAEjB,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM;IAI9B,OAAO,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM;IAW/B,YAAY,CAAC,GAAG,cAAoB;IAiBpC,EAAE;IAwBT,OAAO,CAAC,IAAI,CAEY;IAEjB,eAAe,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC;IAoCrD,YAAY,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,GAAG,MAAM;IAOpC,aAAa,sCACC,WAAW,GAAG,SAAS,GAAG,SAAS,kBACtC,MAAM,qBAKH;IAKd,OAAO,EAAE,eAAe,CAI7B;IAMK,WAAW,EAAE,mBAAmB,CAqBrC;CACH;AAED,oBAAY,gBAAgB,GAAG,WAAW,CAAC,eAAe,CAAC,CAAC;AAe5D,cAAM,UAAU;aAQI,OAAO,EAAE,OAAO;IAChC,OAAO,CAAC,MAAM;IARhB,OAAO,CAAC,CAAC,CAAqD;IAIvD,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;gBAGZ,OAAO,EAAE,OAAO,EACxB,MAAM,GAAE,UAAU,GAAG,IAAW;IAKnC,YAAY;IAKZ,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,cAAc,EAAE,MAAM;IAkB7C,KAAK,CAAC,MAAM,EAAE,MAAM,EAAE,cAAc,EAAE,MAAM;CAepD;AASD,wBAAgB,8BAA8B,CAC5C,KAAK,EAAE,eAAe,EACtB,QAAQ,EAAE,MAAM,QAajB;AAED,yBAAiB,WAAW,CAAC;IAE3B,MAAa,IAAK,SAAQ,WAAW;oBACvB,EACV,QAAQ,EACR,aAAoB,EACpB,IAAI,GACL,EAAE;YACD,QAAQ,EAAE,QAAQ,CAAC;YACnB,aAAa,CAAC,EAAE,OAAO,CAAC;YACxB,IAAI,CAAC,EAAE,qBAAqB,CAAC;SAC9B;QAKD,SAAgB,KAAK,QAAmB;QAEjC,QAAQ,CACb,OAAO,EAAE,MAAM,EACf,MAAM,EAAE,CAAC,KAAK,EAAE,WAAW,KAAK,GAAG,GAClC,KAAK;QAOD,WAAW,IAAI,IAAI;QAK1B,SAAgB,WAAW,oBAAwC;QAC5D,UAAU,IAAI,WAAW;KAGjC;CACF;AAID,cAAM,KAAM,SAAQ,WAAW;aAEX,EAAE,EAAE,MAAM;aACV,MAAM,EAAE,WAAW;aACnB,MAAM,EAAE,CAAC,KAAK,EAAE,WAAW,KAAK,GAAG;aACnC,KAAK,EAAE,UAAU;gBAHjB,EAAE,EAAE,MAAM,EACV,MAAM,EAAE,WAAW,EACnB,MAAM,EAAE,CAAC,KAAK,EAAE,WAAW,KAAK,GAAG,EACnC,KAAK,EAAE,UAAU;IAM5B,QAAQ,CACb,OAAO,EAAE,MAAM,EACf,MAAM,EAAE,CAAC,KAAK,EAAE,WAAW,KAAK,GAAG,GAClC,KAAK;IAID,WAAW,CAAC,OAAO,EAAE,MAAM,GAAG,WAAW;IAqDzC,QAAQ,IAAI,qBAAqB;IAOjC,eAAe,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC;IAQrD,UAAU,IAAI,WAAW;CAKjC;AAMD,cAAM,KAAM,SAAQ,KAAK;gBACX,IAAI,EAAE,WAAW,CAAC,IAAI;IAS3B,WAAW;IAKX,KAAK;CAQb;AAiBD,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,GAAG,GAAG,KAAK,IAAI,WAAW,CAGtE"}

View File

@@ -0,0 +1,467 @@
import { __assign, __extends, __rest } from "tslib";
import { invariant } from "../../utilities/globals/index.js";
import { dep } from 'optimism';
import { equal } from '@wry/equality';
import { Trie } from '@wry/trie';
import { isReference, makeReference, DeepMerger, maybeDeepFreeze, canUseWeakMap, isNonNullObject, } from "../../utilities/index.js";
import { hasOwn, fieldNameFromStoreName } from "./helpers.js";
var DELETE = Object.create(null);
var delModifier = function () { return DELETE; };
var INVALIDATE = Object.create(null);
var EntityStore = (function () {
function EntityStore(policies, group) {
var _this = this;
this.policies = policies;
this.group = group;
this.data = Object.create(null);
this.rootIds = Object.create(null);
this.refs = Object.create(null);
this.getFieldValue = function (objectOrReference, storeFieldName) { return maybeDeepFreeze(isReference(objectOrReference)
? _this.get(objectOrReference.__ref, storeFieldName)
: objectOrReference && objectOrReference[storeFieldName]); };
this.canRead = function (objOrRef) {
return isReference(objOrRef)
? _this.has(objOrRef.__ref)
: typeof objOrRef === "object";
};
this.toReference = function (objOrIdOrRef, mergeIntoStore) {
if (typeof objOrIdOrRef === "string") {
return makeReference(objOrIdOrRef);
}
if (isReference(objOrIdOrRef)) {
return objOrIdOrRef;
}
var id = _this.policies.identify(objOrIdOrRef)[0];
if (id) {
var ref = makeReference(id);
if (mergeIntoStore) {
_this.merge(id, objOrIdOrRef);
}
return ref;
}
};
}
EntityStore.prototype.toObject = function () {
return __assign({}, this.data);
};
EntityStore.prototype.has = function (dataId) {
return this.lookup(dataId, true) !== void 0;
};
EntityStore.prototype.get = function (dataId, fieldName) {
this.group.depend(dataId, fieldName);
if (hasOwn.call(this.data, dataId)) {
var storeObject = this.data[dataId];
if (storeObject && hasOwn.call(storeObject, fieldName)) {
return storeObject[fieldName];
}
}
if (fieldName === "__typename" &&
hasOwn.call(this.policies.rootTypenamesById, dataId)) {
return this.policies.rootTypenamesById[dataId];
}
if (this instanceof Layer) {
return this.parent.get(dataId, fieldName);
}
};
EntityStore.prototype.lookup = function (dataId, dependOnExistence) {
if (dependOnExistence)
this.group.depend(dataId, "__exists");
if (hasOwn.call(this.data, dataId)) {
return this.data[dataId];
}
if (this instanceof Layer) {
return this.parent.lookup(dataId, dependOnExistence);
}
if (this.policies.rootTypenamesById[dataId]) {
return Object.create(null);
}
};
EntityStore.prototype.merge = function (older, newer) {
var _this = this;
var dataId;
if (isReference(older))
older = older.__ref;
if (isReference(newer))
newer = newer.__ref;
var existing = typeof older === "string"
? this.lookup(dataId = older)
: older;
var incoming = typeof newer === "string"
? this.lookup(dataId = newer)
: newer;
if (!incoming)
return;
__DEV__ ? invariant(typeof dataId === "string", "store.merge expects a string ID") : invariant(typeof dataId === "string", 1);
var merged = new DeepMerger(storeObjectReconciler).merge(existing, incoming);
this.data[dataId] = merged;
if (merged !== existing) {
delete this.refs[dataId];
if (this.group.caching) {
var fieldsToDirty_1 = Object.create(null);
if (!existing)
fieldsToDirty_1.__exists = 1;
Object.keys(incoming).forEach(function (storeFieldName) {
if (!existing || existing[storeFieldName] !== merged[storeFieldName]) {
fieldsToDirty_1[storeFieldName] = 1;
var fieldName = fieldNameFromStoreName(storeFieldName);
if (fieldName !== storeFieldName &&
!_this.policies.hasKeyArgs(merged.__typename, fieldName)) {
fieldsToDirty_1[fieldName] = 1;
}
if (merged[storeFieldName] === void 0 && !(_this instanceof Layer)) {
delete merged[storeFieldName];
}
}
});
if (fieldsToDirty_1.__typename &&
!(existing && existing.__typename) &&
this.policies.rootTypenamesById[dataId] === merged.__typename) {
delete fieldsToDirty_1.__typename;
}
Object.keys(fieldsToDirty_1).forEach(function (fieldName) { return _this.group.dirty(dataId, fieldName); });
}
}
};
EntityStore.prototype.modify = function (dataId, fields) {
var _this = this;
var storeObject = this.lookup(dataId);
if (storeObject) {
var changedFields_1 = Object.create(null);
var needToMerge_1 = false;
var allDeleted_1 = true;
var sharedDetails_1 = {
DELETE: DELETE,
INVALIDATE: INVALIDATE,
isReference: isReference,
toReference: this.toReference,
canRead: this.canRead,
readField: function (fieldNameOrOptions, from) { return _this.policies.readField(typeof fieldNameOrOptions === "string" ? {
fieldName: fieldNameOrOptions,
from: from || makeReference(dataId),
} : fieldNameOrOptions, { store: _this }); },
};
Object.keys(storeObject).forEach(function (storeFieldName) {
var fieldName = fieldNameFromStoreName(storeFieldName);
var fieldValue = storeObject[storeFieldName];
if (fieldValue === void 0)
return;
var modify = typeof fields === "function"
? fields
: fields[storeFieldName] || fields[fieldName];
if (modify) {
var newValue = modify === delModifier ? DELETE :
modify(maybeDeepFreeze(fieldValue), __assign(__assign({}, sharedDetails_1), { fieldName: fieldName, storeFieldName: storeFieldName, storage: _this.getStorage(dataId, storeFieldName) }));
if (newValue === INVALIDATE) {
_this.group.dirty(dataId, storeFieldName);
}
else {
if (newValue === DELETE)
newValue = void 0;
if (newValue !== fieldValue) {
changedFields_1[storeFieldName] = newValue;
needToMerge_1 = true;
fieldValue = newValue;
}
}
}
if (fieldValue !== void 0) {
allDeleted_1 = false;
}
});
if (needToMerge_1) {
this.merge(dataId, changedFields_1);
if (allDeleted_1) {
if (this instanceof Layer) {
this.data[dataId] = void 0;
}
else {
delete this.data[dataId];
}
this.group.dirty(dataId, "__exists");
}
return true;
}
}
return false;
};
EntityStore.prototype.delete = function (dataId, fieldName, args) {
var _a;
var storeObject = this.lookup(dataId);
if (storeObject) {
var typename = this.getFieldValue(storeObject, "__typename");
var storeFieldName = fieldName && args
? this.policies.getStoreFieldName({ typename: typename, fieldName: fieldName, args: args })
: fieldName;
return this.modify(dataId, storeFieldName ? (_a = {},
_a[storeFieldName] = delModifier,
_a) : delModifier);
}
return false;
};
EntityStore.prototype.evict = function (options, limit) {
var evicted = false;
if (options.id) {
if (hasOwn.call(this.data, options.id)) {
evicted = this.delete(options.id, options.fieldName, options.args);
}
if (this instanceof Layer && this !== limit) {
evicted = this.parent.evict(options, limit) || evicted;
}
if (options.fieldName || evicted) {
this.group.dirty(options.id, options.fieldName || "__exists");
}
}
return evicted;
};
EntityStore.prototype.clear = function () {
this.replace(null);
};
EntityStore.prototype.extract = function () {
var _this = this;
var obj = this.toObject();
var extraRootIds = [];
this.getRootIdSet().forEach(function (id) {
if (!hasOwn.call(_this.policies.rootTypenamesById, id)) {
extraRootIds.push(id);
}
});
if (extraRootIds.length) {
obj.__META = { extraRootIds: extraRootIds.sort() };
}
return obj;
};
EntityStore.prototype.replace = function (newData) {
var _this = this;
Object.keys(this.data).forEach(function (dataId) {
if (!(newData && hasOwn.call(newData, dataId))) {
_this.delete(dataId);
}
});
if (newData) {
var __META = newData.__META, rest_1 = __rest(newData, ["__META"]);
Object.keys(rest_1).forEach(function (dataId) {
_this.merge(dataId, rest_1[dataId]);
});
if (__META) {
__META.extraRootIds.forEach(this.retain, this);
}
}
};
EntityStore.prototype.retain = function (rootId) {
return this.rootIds[rootId] = (this.rootIds[rootId] || 0) + 1;
};
EntityStore.prototype.release = function (rootId) {
if (this.rootIds[rootId] > 0) {
var count = --this.rootIds[rootId];
if (!count)
delete this.rootIds[rootId];
return count;
}
return 0;
};
EntityStore.prototype.getRootIdSet = function (ids) {
if (ids === void 0) { ids = new Set(); }
Object.keys(this.rootIds).forEach(ids.add, ids);
if (this instanceof Layer) {
this.parent.getRootIdSet(ids);
}
else {
Object.keys(this.policies.rootTypenamesById).forEach(ids.add, ids);
}
return ids;
};
EntityStore.prototype.gc = function () {
var _this = this;
var ids = this.getRootIdSet();
var snapshot = this.toObject();
ids.forEach(function (id) {
if (hasOwn.call(snapshot, id)) {
Object.keys(_this.findChildRefIds(id)).forEach(ids.add, ids);
delete snapshot[id];
}
});
var idsToRemove = Object.keys(snapshot);
if (idsToRemove.length) {
var root_1 = this;
while (root_1 instanceof Layer)
root_1 = root_1.parent;
idsToRemove.forEach(function (id) { return root_1.delete(id); });
}
return idsToRemove;
};
EntityStore.prototype.findChildRefIds = function (dataId) {
if (!hasOwn.call(this.refs, dataId)) {
var found_1 = this.refs[dataId] = Object.create(null);
var root = this.data[dataId];
if (!root)
return found_1;
var workSet_1 = new Set([root]);
workSet_1.forEach(function (obj) {
if (isReference(obj)) {
found_1[obj.__ref] = true;
}
if (isNonNullObject(obj)) {
Object.keys(obj).forEach(function (key) {
var child = obj[key];
if (isNonNullObject(child)) {
workSet_1.add(child);
}
});
}
});
}
return this.refs[dataId];
};
EntityStore.prototype.makeCacheKey = function () {
return this.group.keyMaker.lookupArray(arguments);
};
return EntityStore;
}());
export { EntityStore };
var CacheGroup = (function () {
function CacheGroup(caching, parent) {
if (parent === void 0) { parent = null; }
this.caching = caching;
this.parent = parent;
this.d = null;
this.resetCaching();
}
CacheGroup.prototype.resetCaching = function () {
this.d = this.caching ? dep() : null;
this.keyMaker = new Trie(canUseWeakMap);
};
CacheGroup.prototype.depend = function (dataId, storeFieldName) {
if (this.d) {
this.d(makeDepKey(dataId, storeFieldName));
var fieldName = fieldNameFromStoreName(storeFieldName);
if (fieldName !== storeFieldName) {
this.d(makeDepKey(dataId, fieldName));
}
if (this.parent) {
this.parent.depend(dataId, storeFieldName);
}
}
};
CacheGroup.prototype.dirty = function (dataId, storeFieldName) {
if (this.d) {
this.d.dirty(makeDepKey(dataId, storeFieldName), storeFieldName === "__exists" ? "forget" : "setDirty");
}
};
return CacheGroup;
}());
function makeDepKey(dataId, storeFieldName) {
return storeFieldName + '#' + dataId;
}
export function maybeDependOnExistenceOfEntity(store, entityId) {
if (supportsResultCaching(store)) {
store.group.depend(entityId, "__exists");
}
}
(function (EntityStore) {
var Root = (function (_super) {
__extends(Root, _super);
function Root(_a) {
var policies = _a.policies, _b = _a.resultCaching, resultCaching = _b === void 0 ? true : _b, seed = _a.seed;
var _this = _super.call(this, policies, new CacheGroup(resultCaching)) || this;
_this.stump = new Stump(_this);
_this.storageTrie = new Trie(canUseWeakMap);
if (seed)
_this.replace(seed);
return _this;
}
Root.prototype.addLayer = function (layerId, replay) {
return this.stump.addLayer(layerId, replay);
};
Root.prototype.removeLayer = function () {
return this;
};
Root.prototype.getStorage = function () {
return this.storageTrie.lookupArray(arguments);
};
return Root;
}(EntityStore));
EntityStore.Root = Root;
})(EntityStore || (EntityStore = {}));
var Layer = (function (_super) {
__extends(Layer, _super);
function Layer(id, parent, replay, group) {
var _this = _super.call(this, parent.policies, group) || this;
_this.id = id;
_this.parent = parent;
_this.replay = replay;
_this.group = group;
replay(_this);
return _this;
}
Layer.prototype.addLayer = function (layerId, replay) {
return new Layer(layerId, this, replay, this.group);
};
Layer.prototype.removeLayer = function (layerId) {
var _this = this;
var parent = this.parent.removeLayer(layerId);
if (layerId === this.id) {
if (this.group.caching) {
Object.keys(this.data).forEach(function (dataId) {
var ownStoreObject = _this.data[dataId];
var parentStoreObject = parent["lookup"](dataId);
if (!parentStoreObject) {
_this.delete(dataId);
}
else if (!ownStoreObject) {
_this.group.dirty(dataId, "__exists");
Object.keys(parentStoreObject).forEach(function (storeFieldName) {
_this.group.dirty(dataId, storeFieldName);
});
}
else if (ownStoreObject !== parentStoreObject) {
Object.keys(ownStoreObject).forEach(function (storeFieldName) {
if (!equal(ownStoreObject[storeFieldName], parentStoreObject[storeFieldName])) {
_this.group.dirty(dataId, storeFieldName);
}
});
}
});
}
return parent;
}
if (parent === this.parent)
return this;
return parent.addLayer(this.id, this.replay);
};
Layer.prototype.toObject = function () {
return __assign(__assign({}, this.parent.toObject()), this.data);
};
Layer.prototype.findChildRefIds = function (dataId) {
var fromParent = this.parent.findChildRefIds(dataId);
return hasOwn.call(this.data, dataId) ? __assign(__assign({}, fromParent), _super.prototype.findChildRefIds.call(this, dataId)) : fromParent;
};
Layer.prototype.getStorage = function () {
var p = this.parent;
while (p.parent)
p = p.parent;
return p.getStorage.apply(p, arguments);
};
return Layer;
}(EntityStore));
var Stump = (function (_super) {
__extends(Stump, _super);
function Stump(root) {
return _super.call(this, "EntityStore.Stump", root, function () { }, new CacheGroup(root.group.caching, root.group)) || this;
}
Stump.prototype.removeLayer = function () {
return this;
};
Stump.prototype.merge = function () {
return this.parent.merge.apply(this.parent, arguments);
};
return Stump;
}(Layer));
function storeObjectReconciler(existingObject, incomingObject, property) {
var existingValue = existingObject[property];
var incomingValue = incomingObject[property];
return equal(existingValue, incomingValue) ? existingValue : incomingValue;
}
export function supportsResultCaching(store) {
return !!(store instanceof EntityStore && store.group.caching);
}
//# sourceMappingURL=entityStore.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
//# sourceMappingURL=fixPolyfills.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"fixPolyfills.d.ts","sourceRoot":"","sources":["../../../src/cache/inmemory/fixPolyfills.ts"],"names":[],"mappings":""}

View File

@@ -0,0 +1 @@
//# sourceMappingURL=fixPolyfills.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"fixPolyfills.js","sourceRoot":"","sources":["../../../src/cache/inmemory/fixPolyfills.ts"],"names":[],"mappings":"","sourcesContent":["// Most JavaScript environments do not need the workarounds implemented in\n// fixPolyfills.native.ts, so importing fixPolyfills.ts merely imports\n// this empty module, adding nothing to bundle sizes or execution times.\n// When bundling for React Native, we substitute fixPolyfills.native.js\n// for fixPolyfills.js (see the \"react-native\" section of package.json),\n// to work around problems with Map and Set polyfills in older versions of\n// React Native (which should have been fixed in react-native@0.59.0):\n// https://github.com/apollographql/apollo-client/pull/5962\n"]}

View File

@@ -0,0 +1,2 @@
export {};
//# sourceMappingURL=fixPolyfills.native.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"fixPolyfills.native.d.ts","sourceRoot":"","sources":["../../../src/cache/inmemory/fixPolyfills.native.ts"],"names":[],"mappings":"AAoDA,OAAO,EAAE,CAAA"}

View File

@@ -0,0 +1,48 @@
var testMap = new Map();
if (testMap.set(1, 2) !== testMap) {
var set_1 = testMap.set;
Map.prototype.set = function () {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
set_1.apply(this, args);
return this;
};
}
var testSet = new Set();
if (testSet.add(3) !== testSet) {
var add_1 = testSet.add;
Set.prototype.add = function () {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
add_1.apply(this, args);
return this;
};
}
var frozen = {};
if (typeof Object.freeze === 'function') {
Object.freeze(frozen);
}
try {
testMap.set(frozen, frozen).delete(frozen);
}
catch (_a) {
var wrap = function (method) {
return method && (function (obj) {
try {
testMap.set(obj, obj).delete(obj);
}
finally {
return method.call(Object, obj);
}
});
};
Object.freeze = wrap(Object.freeze);
Object.seal = wrap(Object.seal);
Object.preventExtensions = wrap(Object.preventExtensions);
}
export {};
//# sourceMappingURL=fixPolyfills.native.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"fixPolyfills.native.js","sourceRoot":"","sources":["../../../src/cache/inmemory/fixPolyfills.native.ts"],"names":[],"mappings":"AAEA,IAAM,OAAO,GAAG,IAAI,GAAG,EAAE,CAAC;AAC1B,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,OAAO,EAAE;IACzB,IAAA,KAAG,GAAK,OAAO,IAAZ,CAAa;IACxB,GAAG,CAAC,SAAS,CAAC,GAAG,GAAG;QAAU,cAAO;aAAP,UAAO,EAAP,qBAAO,EAAP,IAAO;YAAP,yBAAO;;QACnC,KAAG,CAAC,KAAK,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;QACtB,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;CACH;AAGD,IAAM,OAAO,GAAG,IAAI,GAAG,EAAE,CAAC;AAC1B,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,OAAO,EAAE;IACtB,IAAA,KAAG,GAAK,OAAO,IAAZ,CAAa;IACxB,GAAG,CAAC,SAAS,CAAC,GAAG,GAAG;QAAU,cAAO;aAAP,UAAO,EAAP,qBAAO,EAAP,IAAO;YAAP,yBAAO;;QACnC,KAAG,CAAC,KAAK,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;QACtB,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;CACH;AAED,IAAM,MAAM,GAAG,EAAE,CAAC;AAClB,IAAI,OAAO,MAAM,CAAC,MAAM,KAAK,UAAU,EAAE;IACvC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;CACvB;AAED,IAAI;IAOF,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;CAC5C;AAAC,WAAM;IACN,IAAM,IAAI,GAAG,UAAC,MAAwB;QACpC,OAAO,MAAM,IAAI,CAAC,UAAA,GAAG;YACnB,IAAI;gBAEF,OAAO,CAAC,GAAG,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;aACnC;oBAAS;gBAGR,OAAO,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;aACjC;QACH,CAAC,CAAC,CAAC;IACL,CAAC,CAAC;IACF,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IACpC,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;IAChC,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,MAAM,CAAC,iBAAiB,CAAC,CAAC;CAC3D","sourcesContent":["// Make sure Map.prototype.set returns the Map instance, per spec.\n// https://github.com/apollographql/apollo-client/issues/4024\nconst testMap = new Map();\nif (testMap.set(1, 2) !== testMap) {\n const { set } = testMap;\n Map.prototype.set = function (...args) {\n set.apply(this, args);\n return this;\n };\n}\n\n// Make sure Set.prototype.add returns the Set instance, per spec.\nconst testSet = new Set();\nif (testSet.add(3) !== testSet) {\n const { add } = testSet;\n Set.prototype.add = function (...args) {\n add.apply(this, args);\n return this;\n };\n}\n\nconst frozen = {};\nif (typeof Object.freeze === 'function') {\n Object.freeze(frozen);\n}\n\ntry {\n // If non-extensible objects can't be stored as keys in a Map, make sure we\n // do not freeze/seal/etc. an object without first attempting to put it in a\n // Map. For example, this gives the React Native Map polyfill a chance to tag\n // objects before they become non-extensible:\n // https://github.com/facebook/react-native/blob/98a6f19d7c/Libraries/vendor/core/Map.js#L44-L50\n // https://github.com/apollographql/react-apollo/issues/2442#issuecomment-426489517\n testMap.set(frozen, frozen).delete(frozen);\n} catch {\n const wrap = (method: <T>(obj: T) => T): typeof method => {\n return method && (obj => {\n try {\n // If .set succeeds, also call .delete to avoid leaking memory.\n testMap.set(obj, obj).delete(obj);\n } finally {\n // If .set or .delete fails, the exception will be silently swallowed\n // by this return-from-finally statement:\n return method.call(Object, obj);\n }\n });\n };\n Object.freeze = wrap(Object.freeze);\n Object.seal = wrap(Object.seal);\n Object.preventExtensions = wrap(Object.preventExtensions);\n}\n\nexport {}\n"]}

View File

@@ -0,0 +1,21 @@
import { SelectionSetNode } from 'graphql';
import { NormalizedCache, InMemoryCacheConfig } from './types';
import { KeyFieldsContext } from './policies';
import { Reference, StoreValue, StoreObject, DeepMerger } from '../../utilities';
export declare const hasOwn: (v: PropertyKey) => boolean;
export declare function defaultDataIdFromObject({ __typename, id, _id }: Readonly<StoreObject>, context?: KeyFieldsContext): string | undefined;
export declare function normalizeConfig(config: InMemoryCacheConfig): {
dataIdFromObject: typeof defaultDataIdFromObject;
addTypename: boolean;
resultCaching: boolean;
canonizeResults: boolean;
} & InMemoryCacheConfig;
export declare function shouldCanonizeResults(config: Pick<InMemoryCacheConfig, "canonizeResults">): boolean;
export declare function getTypenameFromStoreObject(store: NormalizedCache, objectOrReference: StoreObject | Reference): string | undefined;
export declare const TypeOrFieldNameRegExp: RegExp;
export declare function fieldNameFromStoreName(storeFieldName: string): string;
export declare function selectionSetMatchesResult(selectionSet: SelectionSetNode, result: Record<string, any>, variables?: Record<string, any>): boolean;
export declare function storeValueIsStoreObject(value: StoreValue): value is StoreObject;
export declare function makeProcessedFieldsMerger(): DeepMerger<any[]>;
export declare const isArray: (a: any) => a is any[] | readonly any[];
//# sourceMappingURL=helpers.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"helpers.d.ts","sourceRoot":"","sources":["../../../src/cache/inmemory/helpers.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,MAAM,SAAS,CAAC;AAE3C,OAAO,EACL,eAAe,EACf,mBAAmB,EACpB,MAAM,SAAS,CAAC;AAEjB,OAAO,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAE9C,OAAO,EACL,SAAS,EAET,UAAU,EACV,WAAW,EAEX,UAAU,EAKX,MAAM,iBAAiB,CAAC;AAEzB,eAAO,MACW,MAAM,6BACJ,CAAC;AAErB,wBAAgB,uBAAuB,CACrC,EAAE,UAAU,EAAE,EAAE,EAAE,GAAG,EAAE,EAAE,QAAQ,CAAC,WAAW,CAAC,EAC9C,OAAO,CAAC,EAAE,gBAAgB,GACzB,MAAM,GAAG,SAAS,CAiBpB;AAWD,wBAAgB,eAAe,CAAC,MAAM,EAAE,mBAAmB;;;;;wBAE1D;AAED,wBAAgB,qBAAqB,CACnC,MAAM,EAAE,IAAI,CAAC,mBAAmB,EAAE,iBAAiB,CAAC,GACnD,OAAO,CAGT;AAED,wBAAgB,0BAA0B,CACxC,KAAK,EAAE,eAAe,EACtB,iBAAiB,EAAE,WAAW,GAAG,SAAS,GACzC,MAAM,GAAG,SAAS,CAIpB;AAED,eAAO,MAAM,qBAAqB,QAAuB,CAAC;AAE1D,wBAAgB,sBAAsB,CAAC,cAAc,EAAE,MAAM,GAAG,MAAM,CAGrE;AAED,wBAAgB,yBAAyB,CACvC,YAAY,EAAE,gBAAgB,EAC9B,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,EAC3B,SAAS,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAC9B,OAAO,CAoBT;AAED,wBAAgB,uBAAuB,CACrC,KAAK,EAAE,UAAU,GAChB,KAAK,IAAI,WAAW,CAItB;AAED,wBAAgB,yBAAyB,sBAExC;AAED,eAAO,MAAM,OAAO,MAAO,GAAG,gCAAkD,CAAA"}

View File

@@ -0,0 +1,68 @@
import { isReference, isField, DeepMerger, resultKeyNameFromField, shouldInclude, isNonNullObject, compact, } from "../../utilities/index.js";
export var hasOwn = Object.prototype.hasOwnProperty;
export function defaultDataIdFromObject(_a, context) {
var __typename = _a.__typename, id = _a.id, _id = _a._id;
if (typeof __typename === "string") {
if (context) {
context.keyObject =
id !== void 0 ? { id: id } :
_id !== void 0 ? { _id: _id } :
void 0;
}
if (id === void 0)
id = _id;
if (id !== void 0) {
return "".concat(__typename, ":").concat((typeof id === "number" ||
typeof id === "string") ? id : JSON.stringify(id));
}
}
}
var defaultConfig = {
dataIdFromObject: defaultDataIdFromObject,
addTypename: true,
resultCaching: true,
canonizeResults: false,
};
export function normalizeConfig(config) {
return compact(defaultConfig, config);
}
export function shouldCanonizeResults(config) {
var value = config.canonizeResults;
return value === void 0 ? defaultConfig.canonizeResults : value;
}
export function getTypenameFromStoreObject(store, objectOrReference) {
return isReference(objectOrReference)
? store.get(objectOrReference.__ref, "__typename")
: objectOrReference && objectOrReference.__typename;
}
export var TypeOrFieldNameRegExp = /^[_a-z][_0-9a-z]*/i;
export function fieldNameFromStoreName(storeFieldName) {
var match = storeFieldName.match(TypeOrFieldNameRegExp);
return match ? match[0] : storeFieldName;
}
export function selectionSetMatchesResult(selectionSet, result, variables) {
if (isNonNullObject(result)) {
return isArray(result)
? result.every(function (item) { return selectionSetMatchesResult(selectionSet, item, variables); })
: selectionSet.selections.every(function (field) {
if (isField(field) && shouldInclude(field, variables)) {
var key = resultKeyNameFromField(field);
return hasOwn.call(result, key) &&
(!field.selectionSet ||
selectionSetMatchesResult(field.selectionSet, result[key], variables));
}
return true;
});
}
return false;
}
export function storeValueIsStoreObject(value) {
return isNonNullObject(value) &&
!isReference(value) &&
!isArray(value);
}
export function makeProcessedFieldsMerger() {
return new DeepMerger;
}
export var isArray = function (a) { return Array.isArray(a); };
//# sourceMappingURL=helpers.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,50 @@
import './fixPolyfills';
import { DocumentNode } from 'graphql';
import { ApolloCache } from '../core/cache';
import { Cache } from '../core/types/Cache';
import { StoreObject, Reference } from '../../utilities';
import { InMemoryCacheConfig, NormalizedCacheObject } from './types';
import { makeVar } from './reactiveVars';
import { Policies } from './policies';
declare type BroadcastOptions = Pick<Cache.BatchOptions<InMemoryCache>, "optimistic" | "onWatchUpdated">;
export declare class InMemoryCache extends ApolloCache<NormalizedCacheObject> {
private data;
private optimisticData;
protected config: InMemoryCacheConfig;
private watches;
private addTypename;
private typenameDocumentCache;
private storeReader;
private storeWriter;
private maybeBroadcastWatch;
readonly policies: Policies;
readonly makeVar: typeof makeVar;
constructor(config?: InMemoryCacheConfig);
private init;
private resetResultCache;
restore(data: NormalizedCacheObject): this;
extract(optimistic?: boolean): NormalizedCacheObject;
read<T>(options: Cache.ReadOptions): T | null;
write(options: Cache.WriteOptions): Reference | undefined;
modify(options: Cache.ModifyOptions): boolean;
diff<TData, TVariables = any>(options: Cache.DiffOptions<TData, TVariables>): Cache.DiffResult<TData>;
watch<TData = any, TVariables = any>(watch: Cache.WatchOptions<TData, TVariables>): () => void;
gc(options?: {
resetResultCache?: boolean;
resetResultIdentities?: boolean;
}): string[];
retain(rootId: string, optimistic?: boolean): number;
release(rootId: string, optimistic?: boolean): number;
identify(object: StoreObject | Reference): string | undefined;
evict(options: Cache.EvictOptions): boolean;
reset(options?: Cache.ResetOptions): Promise<void>;
removeOptimistic(idToRemove: string): void;
private txCount;
batch<TUpdateResult>(options: Cache.BatchOptions<InMemoryCache, TUpdateResult>): TUpdateResult;
performTransaction(update: (cache: InMemoryCache) => any, optimisticId?: string | null): any;
transformDocument(document: DocumentNode): DocumentNode;
protected broadcastWatches(options?: BroadcastOptions): void;
private broadcastWatch;
}
export {};
//# sourceMappingURL=inMemoryCache.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"inMemoryCache.d.ts","sourceRoot":"","sources":["../../../src/cache/inmemory/inMemoryCache.ts"],"names":[],"mappings":"AAGA,OAAO,gBAAgB,CAAC;AAExB,OAAO,EAAE,YAAY,EAAE,MAAM,SAAS,CAAC;AAIvC,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,KAAK,EAAE,MAAM,qBAAqB,CAAC;AAE5C,OAAO,EAEL,WAAW,EACX,SAAS,EAEV,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,mBAAmB,EAAE,qBAAqB,EAAE,MAAM,SAAS,CAAC;AAIrE,OAAO,EAAE,OAAO,EAA4B,MAAM,gBAAgB,CAAC;AACnE,OAAO,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AAItC,aAAK,gBAAgB,GAAG,IAAI,CAC1B,KAAK,CAAC,YAAY,CAAC,aAAa,CAAC,EAC/B,YAAY,GACZ,gBAAgB,CACnB,CAAA;AAED,qBAAa,aAAc,SAAQ,WAAW,CAAC,qBAAqB,CAAC;IACnE,OAAO,CAAC,IAAI,CAAc;IAC1B,OAAO,CAAC,cAAc,CAAc;IAEpC,SAAS,CAAC,MAAM,EAAE,mBAAmB,CAAC;IACtC,OAAO,CAAC,OAAO,CAAiC;IAChD,OAAO,CAAC,WAAW,CAAU;IAE7B,OAAO,CAAC,qBAAqB,CAAyC;IACtE,OAAO,CAAC,WAAW,CAAc;IACjC,OAAO,CAAC,WAAW,CAAc;IAEjC,OAAO,CAAC,mBAAmB,CAGH;IAKxB,SAAgB,QAAQ,EAAE,QAAQ,CAAC;IAEnC,SAAgB,OAAO,iBAAW;gBAEtB,MAAM,GAAE,mBAAwB;IAe5C,OAAO,CAAC,IAAI;IAmBZ,OAAO,CAAC,gBAAgB;IAwDjB,OAAO,CAAC,IAAI,EAAE,qBAAqB,GAAG,IAAI;IAS1C,OAAO,CAAC,UAAU,GAAE,OAAe,GAAG,qBAAqB;IAI3D,IAAI,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,CAAC,WAAW,GAAG,CAAC,GAAG,IAAI;IA+B7C,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,YAAY,GAAG,SAAS,GAAG,SAAS;IAWzD,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,aAAa,GAAG,OAAO;IA0B7C,IAAI,CAAC,KAAK,EAAE,UAAU,GAAG,GAAG,EACjC,OAAO,EAAE,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE,UAAU,CAAC,GAC5C,KAAK,CAAC,UAAU,CAAC,KAAK,CAAC;IASnB,KAAK,CAAC,KAAK,GAAG,GAAG,EAAE,UAAU,GAAG,GAAG,EACxC,KAAK,EAAE,KAAK,CAAC,YAAY,CAAC,KAAK,EAAE,UAAU,CAAC,GAC3C,MAAM,IAAI;IAgCN,EAAE,CAAC,OAAO,CAAC,EAAE;QAGlB,gBAAgB,CAAC,EAAE,OAAO,CAAC;QAI3B,qBAAqB,CAAC,EAAE,OAAO,CAAC;KACjC;IAoBM,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,EAAE,OAAO,GAAG,MAAM;IASpD,OAAO,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,EAAE,OAAO,GAAG,MAAM;IAUrD,QAAQ,CAAC,MAAM,EAAE,WAAW,GAAG,SAAS,GAAG,MAAM,GAAG,SAAS;IAS7D,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,YAAY,GAAG,OAAO;IA0B3C,KAAK,CAAC,OAAO,CAAC,EAAE,KAAK,CAAC,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC;IAwBlD,gBAAgB,CAAC,UAAU,EAAE,MAAM;IAQ1C,OAAO,CAAC,OAAO,CAAK;IAEb,KAAK,CAAC,aAAa,EACxB,OAAO,EAAE,KAAK,CAAC,YAAY,CAAC,aAAa,EAAE,aAAa,CAAC,GACxD,aAAa;IAmGT,kBAAkB,CACvB,MAAM,EAAE,CAAC,KAAK,EAAE,aAAa,KAAK,GAAG,EACrC,YAAY,CAAC,EAAE,MAAM,GAAG,IAAI;IAQvB,iBAAiB,CAAC,QAAQ,EAAE,YAAY,GAAG,YAAY;IAgB9D,SAAS,CAAC,gBAAgB,CAAC,OAAO,CAAC,EAAE,gBAAgB;IAYrD,OAAO,CAAC,cAAc;CAgCvB"}

View File

@@ -0,0 +1,308 @@
import { __assign, __extends } from "tslib";
import { invariant } from "../../utilities/globals/index.js";
import "./fixPolyfills.js";
import { wrap } from 'optimism';
import { equal } from '@wry/equality';
import { ApolloCache } from "../core/cache.js";
import { MissingFieldError } from "../core/types/common.js";
import { addTypenameToDocument, isReference, } from "../../utilities/index.js";
import { StoreReader } from "./readFromStore.js";
import { StoreWriter } from "./writeToStore.js";
import { EntityStore, supportsResultCaching } from "./entityStore.js";
import { makeVar, forgetCache, recallCache } from "./reactiveVars.js";
import { Policies } from "./policies.js";
import { hasOwn, normalizeConfig, shouldCanonizeResults } from "./helpers.js";
import { canonicalStringify } from "./object-canon.js";
var InMemoryCache = (function (_super) {
__extends(InMemoryCache, _super);
function InMemoryCache(config) {
if (config === void 0) { config = {}; }
var _this = _super.call(this) || this;
_this.watches = new Set();
_this.typenameDocumentCache = new Map();
_this.makeVar = makeVar;
_this.txCount = 0;
_this.config = normalizeConfig(config);
_this.addTypename = !!_this.config.addTypename;
_this.policies = new Policies({
cache: _this,
dataIdFromObject: _this.config.dataIdFromObject,
possibleTypes: _this.config.possibleTypes,
typePolicies: _this.config.typePolicies,
});
_this.init();
return _this;
}
InMemoryCache.prototype.init = function () {
var rootStore = this.data = new EntityStore.Root({
policies: this.policies,
resultCaching: this.config.resultCaching,
});
this.optimisticData = rootStore.stump;
this.resetResultCache();
};
InMemoryCache.prototype.resetResultCache = function (resetResultIdentities) {
var _this = this;
var previousReader = this.storeReader;
this.storeWriter = new StoreWriter(this, this.storeReader = new StoreReader({
cache: this,
addTypename: this.addTypename,
resultCacheMaxSize: this.config.resultCacheMaxSize,
canonizeResults: shouldCanonizeResults(this.config),
canon: resetResultIdentities
? void 0
: previousReader && previousReader.canon,
}));
this.maybeBroadcastWatch = wrap(function (c, options) {
return _this.broadcastWatch(c, options);
}, {
max: this.config.resultCacheMaxSize,
makeCacheKey: function (c) {
var store = c.optimistic ? _this.optimisticData : _this.data;
if (supportsResultCaching(store)) {
var optimistic = c.optimistic, rootId = c.rootId, variables = c.variables;
return store.makeCacheKey(c.query, c.callback, canonicalStringify({ optimistic: optimistic, rootId: rootId, variables: variables }));
}
}
});
new Set([
this.data.group,
this.optimisticData.group,
]).forEach(function (group) { return group.resetCaching(); });
};
InMemoryCache.prototype.restore = function (data) {
this.init();
if (data)
this.data.replace(data);
return this;
};
InMemoryCache.prototype.extract = function (optimistic) {
if (optimistic === void 0) { optimistic = false; }
return (optimistic ? this.optimisticData : this.data).extract();
};
InMemoryCache.prototype.read = function (options) {
var _a = options.returnPartialData, returnPartialData = _a === void 0 ? false : _a;
try {
return this.storeReader.diffQueryAgainstStore(__assign(__assign({}, options), { store: options.optimistic ? this.optimisticData : this.data, config: this.config, returnPartialData: returnPartialData })).result || null;
}
catch (e) {
if (e instanceof MissingFieldError) {
return null;
}
throw e;
}
};
InMemoryCache.prototype.write = function (options) {
try {
++this.txCount;
return this.storeWriter.writeToStore(this.data, options);
}
finally {
if (!--this.txCount && options.broadcast !== false) {
this.broadcastWatches();
}
}
};
InMemoryCache.prototype.modify = function (options) {
if (hasOwn.call(options, "id") && !options.id) {
return false;
}
var store = options.optimistic
? this.optimisticData
: this.data;
try {
++this.txCount;
return store.modify(options.id || "ROOT_QUERY", options.fields);
}
finally {
if (!--this.txCount && options.broadcast !== false) {
this.broadcastWatches();
}
}
};
InMemoryCache.prototype.diff = function (options) {
return this.storeReader.diffQueryAgainstStore(__assign(__assign({}, options), { store: options.optimistic ? this.optimisticData : this.data, rootId: options.id || "ROOT_QUERY", config: this.config }));
};
InMemoryCache.prototype.watch = function (watch) {
var _this = this;
if (!this.watches.size) {
recallCache(this);
}
this.watches.add(watch);
if (watch.immediate) {
this.maybeBroadcastWatch(watch);
}
return function () {
if (_this.watches.delete(watch) && !_this.watches.size) {
forgetCache(_this);
}
_this.maybeBroadcastWatch.forget(watch);
};
};
InMemoryCache.prototype.gc = function (options) {
canonicalStringify.reset();
var ids = this.optimisticData.gc();
if (options && !this.txCount) {
if (options.resetResultCache) {
this.resetResultCache(options.resetResultIdentities);
}
else if (options.resetResultIdentities) {
this.storeReader.resetCanon();
}
}
return ids;
};
InMemoryCache.prototype.retain = function (rootId, optimistic) {
return (optimistic ? this.optimisticData : this.data).retain(rootId);
};
InMemoryCache.prototype.release = function (rootId, optimistic) {
return (optimistic ? this.optimisticData : this.data).release(rootId);
};
InMemoryCache.prototype.identify = function (object) {
if (isReference(object))
return object.__ref;
try {
return this.policies.identify(object)[0];
}
catch (e) {
__DEV__ && invariant.warn(e);
}
};
InMemoryCache.prototype.evict = function (options) {
if (!options.id) {
if (hasOwn.call(options, "id")) {
return false;
}
options = __assign(__assign({}, options), { id: "ROOT_QUERY" });
}
try {
++this.txCount;
return this.optimisticData.evict(options, this.data);
}
finally {
if (!--this.txCount && options.broadcast !== false) {
this.broadcastWatches();
}
}
};
InMemoryCache.prototype.reset = function (options) {
var _this = this;
this.init();
canonicalStringify.reset();
if (options && options.discardWatches) {
this.watches.forEach(function (watch) { return _this.maybeBroadcastWatch.forget(watch); });
this.watches.clear();
forgetCache(this);
}
else {
this.broadcastWatches();
}
return Promise.resolve();
};
InMemoryCache.prototype.removeOptimistic = function (idToRemove) {
var newOptimisticData = this.optimisticData.removeLayer(idToRemove);
if (newOptimisticData !== this.optimisticData) {
this.optimisticData = newOptimisticData;
this.broadcastWatches();
}
};
InMemoryCache.prototype.batch = function (options) {
var _this = this;
var update = options.update, _a = options.optimistic, optimistic = _a === void 0 ? true : _a, removeOptimistic = options.removeOptimistic, onWatchUpdated = options.onWatchUpdated;
var updateResult;
var perform = function (layer) {
var _a = _this, data = _a.data, optimisticData = _a.optimisticData;
++_this.txCount;
if (layer) {
_this.data = _this.optimisticData = layer;
}
try {
return updateResult = update(_this);
}
finally {
--_this.txCount;
_this.data = data;
_this.optimisticData = optimisticData;
}
};
var alreadyDirty = new Set();
if (onWatchUpdated && !this.txCount) {
this.broadcastWatches(__assign(__assign({}, options), { onWatchUpdated: function (watch) {
alreadyDirty.add(watch);
return false;
} }));
}
if (typeof optimistic === 'string') {
this.optimisticData = this.optimisticData.addLayer(optimistic, perform);
}
else if (optimistic === false) {
perform(this.data);
}
else {
perform();
}
if (typeof removeOptimistic === "string") {
this.optimisticData = this.optimisticData.removeLayer(removeOptimistic);
}
if (onWatchUpdated && alreadyDirty.size) {
this.broadcastWatches(__assign(__assign({}, options), { onWatchUpdated: function (watch, diff) {
var result = onWatchUpdated.call(this, watch, diff);
if (result !== false) {
alreadyDirty.delete(watch);
}
return result;
} }));
if (alreadyDirty.size) {
alreadyDirty.forEach(function (watch) { return _this.maybeBroadcastWatch.dirty(watch); });
}
}
else {
this.broadcastWatches(options);
}
return updateResult;
};
InMemoryCache.prototype.performTransaction = function (update, optimisticId) {
return this.batch({
update: update,
optimistic: optimisticId || (optimisticId !== null),
});
};
InMemoryCache.prototype.transformDocument = function (document) {
if (this.addTypename) {
var result = this.typenameDocumentCache.get(document);
if (!result) {
result = addTypenameToDocument(document);
this.typenameDocumentCache.set(document, result);
this.typenameDocumentCache.set(result, result);
}
return result;
}
return document;
};
InMemoryCache.prototype.broadcastWatches = function (options) {
var _this = this;
if (!this.txCount) {
this.watches.forEach(function (c) { return _this.maybeBroadcastWatch(c, options); });
}
};
InMemoryCache.prototype.broadcastWatch = function (c, options) {
var lastDiff = c.lastDiff;
var diff = this.diff(c);
if (options) {
if (c.optimistic &&
typeof options.optimistic === "string") {
diff.fromOptimisticTransaction = true;
}
if (options.onWatchUpdated &&
options.onWatchUpdated.call(this, c, diff, lastDiff) === false) {
return;
}
}
if (!lastDiff || !equal(lastDiff.result, diff.result)) {
c.callback(c.lastDiff = diff, lastDiff);
}
};
return InMemoryCache;
}(ApolloCache));
export { InMemoryCache };
//# sourceMappingURL=inMemoryCache.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,9 @@
import { KeySpecifier, KeyFieldsFunction, KeyArgsFunction } from "./policies";
export declare function keyFieldsFnFromSpecifier(specifier: KeySpecifier): KeyFieldsFunction;
export declare function keyArgsFnFromSpecifier(specifier: KeySpecifier): KeyArgsFunction;
export declare function collectSpecifierPaths(specifier: KeySpecifier, extractor: (path: string[]) => any): Record<string, any>;
export declare function getSpecifierPaths(spec: KeySpecifier): string[][];
declare function extractKey<TObj extends Record<string, any>, TKey extends string>(object: TObj, key: TKey): TObj[TKey] | undefined;
export declare function extractKeyPath(object: Record<string, any>, path: string[], extract?: typeof extractKey): any;
export {};
//# sourceMappingURL=key-extractor.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"key-extractor.d.ts","sourceRoot":"","sources":["../../../src/cache/inmemory/key-extractor.ts"],"names":[],"mappings":"AAUA,OAAO,EACL,YAAY,EACZ,iBAAiB,EACjB,eAAe,EAChB,MAAM,YAAY,CAAC;AAkBpB,wBAAgB,wBAAwB,CACtC,SAAS,EAAE,YAAY,GACtB,iBAAiB,CAoDnB;AASD,wBAAgB,sBAAsB,CAAC,SAAS,EAAE,YAAY,GAAG,eAAe,CAyE/E;AAED,wBAAgB,qBAAqB,CACnC,SAAS,EAAE,YAAY,EACvB,SAAS,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,GAAG,GACjC,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAgBrB;AAED,wBAAgB,iBAAiB,CAAC,IAAI,EAAE,YAAY,GAAG,MAAM,EAAE,EAAE,CAsBhE;AAED,iBAAS,UAAU,CACjB,IAAI,SAAS,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,EAChC,IAAI,SAAS,MAAM,EACnB,MAAM,EAAE,IAAI,EAAE,GAAG,EAAE,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,SAAS,CAEjD;AAED,wBAAgB,cAAc,CAC5B,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,EAC3B,IAAI,EAAE,MAAM,EAAE,EACd,OAAO,CAAC,EAAE,OAAO,UAAU,GAC1B,GAAG,CAkBL"}

View File

@@ -0,0 +1,118 @@
import { invariant } from "../../utilities/globals/index.js";
import { argumentsObjectFromField, DeepMerger, isNonEmptyArray, isNonNullObject, } from "../../utilities/index.js";
import { hasOwn, isArray } from "./helpers.js";
var specifierInfoCache = Object.create(null);
function lookupSpecifierInfo(spec) {
var cacheKey = JSON.stringify(spec);
return specifierInfoCache[cacheKey] ||
(specifierInfoCache[cacheKey] = Object.create(null));
}
export function keyFieldsFnFromSpecifier(specifier) {
var info = lookupSpecifierInfo(specifier);
return info.keyFieldsFn || (info.keyFieldsFn = function (object, context) {
var extract = function (from, key) { return context.readField(key, from); };
var keyObject = context.keyObject = collectSpecifierPaths(specifier, function (schemaKeyPath) {
var extracted = extractKeyPath(context.storeObject, schemaKeyPath, extract);
if (extracted === void 0 &&
object !== context.storeObject &&
hasOwn.call(object, schemaKeyPath[0])) {
extracted = extractKeyPath(object, schemaKeyPath, extractKey);
}
__DEV__ ? invariant(extracted !== void 0, "Missing field '".concat(schemaKeyPath.join('.'), "' while extracting keyFields from ").concat(JSON.stringify(object))) : invariant(extracted !== void 0, 2);
return extracted;
});
return "".concat(context.typename, ":").concat(JSON.stringify(keyObject));
});
}
export function keyArgsFnFromSpecifier(specifier) {
var info = lookupSpecifierInfo(specifier);
return info.keyArgsFn || (info.keyArgsFn = function (args, _a) {
var field = _a.field, variables = _a.variables, fieldName = _a.fieldName;
var collected = collectSpecifierPaths(specifier, function (keyPath) {
var firstKey = keyPath[0];
var firstChar = firstKey.charAt(0);
if (firstChar === "@") {
if (field && isNonEmptyArray(field.directives)) {
var directiveName_1 = firstKey.slice(1);
var d = field.directives.find(function (d) { return d.name.value === directiveName_1; });
var directiveArgs = d && argumentsObjectFromField(d, variables);
return directiveArgs && extractKeyPath(directiveArgs, keyPath.slice(1));
}
return;
}
if (firstChar === "$") {
var variableName = firstKey.slice(1);
if (variables && hasOwn.call(variables, variableName)) {
var varKeyPath = keyPath.slice(0);
varKeyPath[0] = variableName;
return extractKeyPath(variables, varKeyPath);
}
return;
}
if (args) {
return extractKeyPath(args, keyPath);
}
});
var suffix = JSON.stringify(collected);
if (args || suffix !== "{}") {
fieldName += ":" + suffix;
}
return fieldName;
});
}
export function collectSpecifierPaths(specifier, extractor) {
var merger = new DeepMerger;
return getSpecifierPaths(specifier).reduce(function (collected, path) {
var _a;
var toMerge = extractor(path);
if (toMerge !== void 0) {
for (var i = path.length - 1; i >= 0; --i) {
toMerge = (_a = {}, _a[path[i]] = toMerge, _a);
}
collected = merger.merge(collected, toMerge);
}
return collected;
}, Object.create(null));
}
export function getSpecifierPaths(spec) {
var info = lookupSpecifierInfo(spec);
if (!info.paths) {
var paths_1 = info.paths = [];
var currentPath_1 = [];
spec.forEach(function (s, i) {
if (isArray(s)) {
getSpecifierPaths(s).forEach(function (p) { return paths_1.push(currentPath_1.concat(p)); });
currentPath_1.length = 0;
}
else {
currentPath_1.push(s);
if (!isArray(spec[i + 1])) {
paths_1.push(currentPath_1.slice(0));
currentPath_1.length = 0;
}
}
});
}
return info.paths;
}
function extractKey(object, key) {
return object[key];
}
export function extractKeyPath(object, path, extract) {
extract = extract || extractKey;
return normalize(path.reduce(function reducer(obj, key) {
return isArray(obj)
? obj.map(function (child) { return reducer(child, key); })
: obj && extract(obj, key);
}, object));
}
function normalize(value) {
if (isNonNullObject(value)) {
if (isArray(value)) {
return value.map(normalize);
}
return collectSpecifierPaths(Object.keys(value).sort(), function (path) { return extractKeyPath(value, path); });
}
return value;
}
//# sourceMappingURL=key-extractor.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,18 @@
import "../../utilities/globals";
export declare class ObjectCanon {
private known;
private pool;
isKnown(value: any): boolean;
private passes;
pass<T>(value: T): T;
admit<T>(value: T): T;
private sortedKeys;
private keysByJSON;
readonly empty: {};
}
export declare const canonicalStringify: ((value: any) => string) & {
reset: typeof resetCanonicalStringify;
};
declare function resetCanonicalStringify(): void;
export {};
//# sourceMappingURL=object-canon.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"object-canon.d.ts","sourceRoot":"","sources":["../../../src/cache/inmemory/object-canon.ts"],"names":[],"mappings":"AAAA,OAAO,yBAAyB,CAAC;AA0EjC,qBAAa,WAAW;IAGtB,OAAO,CAAC,KAAK,CAAiD;IAG9D,OAAO,CAAC,IAAI,CAIM;IAEX,OAAO,CAAC,KAAK,EAAE,GAAG,GAAG,OAAO;IAMnC,OAAO,CAAC,MAAM,CAAiC;IACxC,IAAI,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,GAAG,CAAC;IAWpB,KAAK,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,GAAG,CAAC;IAuE5B,OAAO,CAAC,UAAU;IAclB,OAAO,CAAC,UAAU,CAAqC;IAGvD,SAAgB,KAAK,KAAkB;CACxC;AAUD,eAAO,MAAM,kBAAkB,WAAkC,GAAG,KAAG,MAAM;;CAkB3E,CAAC;AAMH,iBAAS,uBAAuB,SAG/B"}

View File

@@ -0,0 +1,120 @@
import { __assign } from "tslib";
import "../../utilities/globals/index.js";
import { Trie } from "@wry/trie";
import { canUseWeakMap, canUseWeakSet, isNonNullObject as isObjectOrArray, } from "../../utilities/index.js";
import { isArray } from "./helpers.js";
function shallowCopy(value) {
if (isObjectOrArray(value)) {
return isArray(value)
? value.slice(0)
: __assign({ __proto__: Object.getPrototypeOf(value) }, value);
}
return value;
}
var ObjectCanon = (function () {
function ObjectCanon() {
this.known = new (canUseWeakSet ? WeakSet : Set)();
this.pool = new Trie(canUseWeakMap);
this.passes = new WeakMap();
this.keysByJSON = new Map();
this.empty = this.admit({});
}
ObjectCanon.prototype.isKnown = function (value) {
return isObjectOrArray(value) && this.known.has(value);
};
ObjectCanon.prototype.pass = function (value) {
if (isObjectOrArray(value)) {
var copy = shallowCopy(value);
this.passes.set(copy, value);
return copy;
}
return value;
};
ObjectCanon.prototype.admit = function (value) {
var _this = this;
if (isObjectOrArray(value)) {
var original = this.passes.get(value);
if (original)
return original;
var proto = Object.getPrototypeOf(value);
switch (proto) {
case Array.prototype: {
if (this.known.has(value))
return value;
var array = value.map(this.admit, this);
var node = this.pool.lookupArray(array);
if (!node.array) {
this.known.add(node.array = array);
if (__DEV__) {
Object.freeze(array);
}
}
return node.array;
}
case null:
case Object.prototype: {
if (this.known.has(value))
return value;
var proto_1 = Object.getPrototypeOf(value);
var array_1 = [proto_1];
var keys = this.sortedKeys(value);
array_1.push(keys.json);
var firstValueIndex_1 = array_1.length;
keys.sorted.forEach(function (key) {
array_1.push(_this.admit(value[key]));
});
var node = this.pool.lookupArray(array_1);
if (!node.object) {
var obj_1 = node.object = Object.create(proto_1);
this.known.add(obj_1);
keys.sorted.forEach(function (key, i) {
obj_1[key] = array_1[firstValueIndex_1 + i];
});
if (__DEV__) {
Object.freeze(obj_1);
}
}
return node.object;
}
}
}
return value;
};
ObjectCanon.prototype.sortedKeys = function (obj) {
var keys = Object.keys(obj);
var node = this.pool.lookupArray(keys);
if (!node.keys) {
keys.sort();
var json = JSON.stringify(keys);
if (!(node.keys = this.keysByJSON.get(json))) {
this.keysByJSON.set(json, node.keys = { sorted: keys, json: json });
}
}
return node.keys;
};
return ObjectCanon;
}());
export { ObjectCanon };
export var canonicalStringify = Object.assign(function (value) {
if (isObjectOrArray(value)) {
if (stringifyCanon === void 0) {
resetCanonicalStringify();
}
var canonical = stringifyCanon.admit(value);
var json = stringifyCache.get(canonical);
if (json === void 0) {
stringifyCache.set(canonical, json = JSON.stringify(canonical));
}
return json;
}
return JSON.stringify(value);
}, {
reset: resetCanonicalStringify,
});
var stringifyCanon;
var stringifyCache;
function resetCanonicalStringify() {
stringifyCanon = new ObjectCanon;
stringifyCache = new (canUseWeakMap ? WeakMap : Map)();
}
//# sourceMappingURL=object-canon.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,96 @@
import { InlineFragmentNode, FragmentDefinitionNode, SelectionSetNode, FieldNode } from 'graphql';
import { FragmentMap, StoreValue, StoreObject, Reference, isReference } from '../../utilities';
import { IdGetter, MergeInfo, ReadMergeModifyContext } from "./types";
import { InMemoryCache } from './inMemoryCache';
import { SafeReadonly, FieldSpecifier, ToReferenceFunction, ReadFieldFunction, ReadFieldOptions, CanReadFunction } from '../core/types/common';
import { WriteContext } from './writeToStore';
export declare type TypePolicies = {
[__typename: string]: TypePolicy;
};
export declare type KeySpecifier = ReadonlyArray<string | KeySpecifier>;
export declare type KeyFieldsContext = {
typename: string | undefined;
storeObject: StoreObject;
readField: ReadFieldFunction;
selectionSet?: SelectionSetNode;
fragmentMap?: FragmentMap;
keyObject?: Record<string, any>;
};
export declare type KeyFieldsFunction = (object: Readonly<StoreObject>, context: KeyFieldsContext) => KeySpecifier | false | ReturnType<IdGetter>;
export declare type TypePolicy = {
keyFields?: KeySpecifier | KeyFieldsFunction | false;
merge?: FieldMergeFunction | boolean;
queryType?: true;
mutationType?: true;
subscriptionType?: true;
fields?: {
[fieldName: string]: FieldPolicy<any> | FieldReadFunction<any>;
};
};
export declare type KeyArgsFunction = (args: Record<string, any> | null, context: {
typename: string;
fieldName: string;
field: FieldNode | null;
variables?: Record<string, any>;
}) => KeySpecifier | false | ReturnType<IdGetter>;
export declare type FieldPolicy<TExisting = any, TIncoming = TExisting, TReadResult = TIncoming, TOptions extends FieldFunctionOptions = FieldFunctionOptions> = {
keyArgs?: KeySpecifier | KeyArgsFunction | false;
read?: FieldReadFunction<TExisting, TReadResult, TOptions>;
merge?: FieldMergeFunction<TExisting, TIncoming, TOptions> | boolean;
};
export declare type StorageType = Record<string, any>;
export interface FieldFunctionOptions<TArgs = Record<string, any>, TVars = Record<string, any>> {
args: TArgs | null;
fieldName: string;
storeFieldName: string;
field: FieldNode | null;
variables?: TVars;
isReference: typeof isReference;
toReference: ToReferenceFunction;
storage: StorageType;
cache: InMemoryCache;
readField: ReadFieldFunction;
canRead: CanReadFunction;
mergeObjects: MergeObjectsFunction;
}
declare type MergeObjectsFunction = <T extends StoreObject | Reference>(existing: T, incoming: T) => T;
export declare type FieldReadFunction<TExisting = any, TReadResult = TExisting, TOptions extends FieldFunctionOptions = FieldFunctionOptions> = (existing: SafeReadonly<TExisting> | undefined, options: TOptions) => TReadResult | undefined;
export declare type FieldMergeFunction<TExisting = any, TIncoming = TExisting, TOptions extends FieldFunctionOptions = FieldFunctionOptions> = (existing: SafeReadonly<TExisting> | undefined, incoming: SafeReadonly<TIncoming>, options: TOptions) => SafeReadonly<TExisting>;
export declare type PossibleTypesMap = {
[supertype: string]: string[];
};
export declare class Policies {
private config;
private typePolicies;
private toBeAdded;
private supertypeMap;
private fuzzySubtypes;
readonly cache: InMemoryCache;
readonly rootIdsByTypename: Record<string, string>;
readonly rootTypenamesById: Record<string, string>;
readonly usingPossibleTypes = false;
constructor(config: {
cache: InMemoryCache;
dataIdFromObject?: KeyFieldsFunction;
possibleTypes?: PossibleTypesMap;
typePolicies?: TypePolicies;
});
identify(object: StoreObject, partialContext?: Partial<KeyFieldsContext>): [string?, StoreObject?];
addTypePolicies(typePolicies: TypePolicies): void;
private updateTypePolicy;
private setRootTypename;
addPossibleTypes(possibleTypes: PossibleTypesMap): void;
private getTypePolicy;
private getFieldPolicy;
private getSupertypeSet;
fragmentMatches(fragment: InlineFragmentNode | FragmentDefinitionNode, typename: string | undefined, result?: Record<string, any>, variables?: Record<string, any>): boolean;
hasKeyArgs(typename: string | undefined, fieldName: string): boolean;
getStoreFieldName(fieldSpec: FieldSpecifier): string;
readField<V = StoreValue>(options: ReadFieldOptions, context: ReadMergeModifyContext): SafeReadonly<V> | undefined;
getReadFunction(typename: string | undefined, fieldName: string): FieldReadFunction | undefined;
getMergeFunction(parentTypename: string | undefined, fieldName: string, childTypename: string | undefined): FieldMergeFunction | undefined;
runMergeFunction(existing: StoreValue, incoming: StoreValue, { field, typename, merge }: MergeInfo, context: WriteContext, storage?: StorageType): any;
}
export declare function normalizeReadFieldOptions(readFieldArgs: IArguments, objectOrReference: StoreObject | Reference | undefined, variables?: ReadMergeModifyContext["variables"]): ReadFieldOptions;
export {};
//# sourceMappingURL=policies.d.ts.map

Some files were not shown because too many files have changed in this diff Show More