New Upstream Release - graphql-relay
Ready changes
Summary
Merged new upstream version: 3.2.0 (was: 2.0.1).
Diff
diff --git a/.bumpversion.cfg b/.bumpversion.cfg
new file mode 100644
index 0000000..2f67a24
--- /dev/null
+++ b/.bumpversion.cfg
@@ -0,0 +1,12 @@
+[bumpversion]
+current_version = 3.2.0
+commit = False
+tag = False
+
+[bumpversion:file:src/graphql/version.py]
+search = version = "{current_version}"
+replace = version = "{new_version}"
+
+[bumpversion:file:pyproject.toml]
+search = version = "{current_version}"
+replace = version = "{new_version}"
diff --git a/.coveragerc b/.coveragerc
new file mode 100644
index 0000000..ef00470
--- /dev/null
+++ b/.coveragerc
@@ -0,0 +1,19 @@
+[run]
+branch = True
+source = src
+
+[report]
+exclude_lines =
+ pragma: no cover
+ except ImportError:
+ \# Python <
+ raise NotImplementedError
+ raise TypeError\(f?"Unexpected
+ assert False,
+ \s+next\($
+ if MYPY:
+ if TYPE_CHECKING:
+ ^\s+\.\.\.$
+ ^\s+pass$
+ignore_errors = True
+
diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 0000000..d4a2c44
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,21 @@
+# http://editorconfig.org
+
+root = true
+
+[*]
+indent_style = space
+indent_size = 4
+trim_trailing_whitespace = true
+insert_final_newline = true
+charset = utf-8
+end_of_line = lf
+
+[*.bat]
+indent_style = tab
+end_of_line = crlf
+
+[LICENSE]
+insert_final_newline = false
+
+[Makefile]
+indent_style = tab
diff --git a/.flake8 b/.flake8
index 12b6f7d..ccded58 100644
--- a/.flake8
+++ b/.flake8
@@ -1,4 +1,4 @@
[flake8]
-ignore = E203,W503,E704
+ignore = E203,W503
exclude = .git,.mypy_cache,.pytest_cache,.tox,.venv,__pycache__,build,dist,docs
max-line-length = 88
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
new file mode 100644
index 0000000..ac4c67a
--- /dev/null
+++ b/.github/workflows/lint.yml
@@ -0,0 +1,25 @@
+name: Code quality
+
+on: [push, pull_request]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Set up Python 3.9
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.9
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install tox
+
+ - name: Run code quality tests with tox
+ run: tox
+ env:
+ TOXENV: black,flake8,mypy,manifest
diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
new file mode 100644
index 0000000..fc16674
--- /dev/null
+++ b/.github/workflows/publish.yml
@@ -0,0 +1,29 @@
+name: Publish
+
+on:
+ push:
+ tags:
+ - 'v*'
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Set up Python 3.9
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.9
+
+ - name: Build wheel and source tarball
+ run: |
+ pip install wheel
+ python setup.py sdist bdist_wheel
+
+ - name: Publish a Python distribution to PyPI
+ uses: pypa/gh-action-pypi-publish@release/v1
+ with:
+ user: __token__
+ password: ${{ secrets.PYPI_TOKEN }}
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
new file mode 100644
index 0000000..09278c1
--- /dev/null
+++ b/.github/workflows/test.yml
@@ -0,0 +1,27 @@
+name: Tests
+
+on: [push, pull_request]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+
+ strategy:
+ matrix:
+ python: ['3.6', '3.7', '3.8', '3.9', '3.10', 'pypy3']
+
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python-version }}
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install tox tox-gh-actions
+
+ - name: Run unit tests with tox
+ run: tox
diff --git a/.gitignore b/.gitignore
index f55a50f..2b424f0 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,62 +1,28 @@
-__pycache__/
-*.py[cod]
-
-.Python
-build/
-develop-eggs/
-dist/
-downloads/
-eggs/
-.eggs/
-lib/
-lib64/
-parts/
-sdist/
-var/
-wheels/
-pip-wheel-metadata/
-share/python-wheels/
-*.egg-info/
-.installed.cfg
-*.egg
-MANIFEST
-*.manifest
-*.spec
-
-pip-log.txt
-pip-delete-this-directory.txt
-
-htmlcov/
-.tox/
-.nox/
-.coverage
-.coverage.*
.cache
-nosetests.xml
-coverage.xml
-*.cover
-.pytest_cache/
-
-*.mo
-*.pot
-
-*.log
-
-docs/_build/
-
-target/
-
-.python-version
-
+.coverage
.env
+.env.bak
+.idea
+.mypy_cache
+.pytest_cache
+.tox
.venv
-env/
-venv/
-ENV/
-env.bak/
-venv.bak/
+.venv.bak
+.vs
+
+build
+dist
+docs/_build
+pip-wheel-metadata
+wheels
-.mypy_cache/
+play
-.idea/
+__pycache__
+
+*.cover
+*.egg
+*.egg-info
+*.log
+*.py[cod]
diff --git a/.mypy.ini b/.mypy.ini
new file mode 100644
index 0000000..ffc79d9
--- /dev/null
+++ b/.mypy.ini
@@ -0,0 +1,11 @@
+[mypy]
+python_version = 3.9
+check_untyped_defs = True
+no_implicit_optional = True
+strict_optional = True
+warn_redundant_casts = True
+warn_unused_ignores = True
+disallow_untyped_defs = True
+
+[mypy-tests.*]
+disallow_untyped_defs = False
diff --git a/.pyup.yml b/.pyup.yml
new file mode 100644
index 0000000..a438176
--- /dev/null
+++ b/.pyup.yml
@@ -0,0 +1 @@
+branch: main
\ No newline at end of file
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index ea2d288..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,31 +0,0 @@
-language: python
-dist: xenial
-
-python:
- - "2.7"
- - "3.5"
- - "3.6"
- - "3.7"
- - "pypy3"
-install:
- - pip install .
- - pip install "flake8>=3.7,<4"
-script:
- - python setup.py test -a "--cov=graphql_relay"
- - flake8 setup.py src tests
-after_success:
- - pip install coveralls
- - coveralls
-
-deploy:
- provider: pypi
- distributions: sdist bdist_wheel
- on:
- branch: master
- tags: true
- python: 3.7
- skip_existing: true
- user: mvanlonden
- password:
- secure: gu79sV5e3wkUI8eH20TbBRctOOGkuWe6EQUjJP17q1Jm15/sJsehOaaLPiG0psxtqBGABp+z+xXkjhRfNMowY2xJPLjGmpF23pOIWGjBWKLKXzJStegKM6kwJYyHzcykIhBEb74047FhqhcwOgZcsp6sZpfzuby+onevLzPDI0C4jTARw7WAqMln9JIBZJyoQKp9fpr8AoXL2JtHr6sjkeJ94lz1XViL9rtTo7qFYfqYS3XdKO0wJhLfMUXSihIzSWQykBVv+75tMKQr0CtGALXcJSRGT6Sb31GiFyG93rtcOcBvbjFt1MK8aNKyIMhwgSqjcgKvxWAvXn4MsCaabHPVEv0YuT9t9cQzaAvi81LqHkpALgpDfXFfsMzHG18/8ME9TpM8u52r/ST5lhukglfnxWVy4hg8VLZ0iiTtpS3hx1Ba4Uecr++6fI5X+KL3EPabApQM+t4rcC0h4mEbjq3IkZ/ANAIJ2UjKTMcUkbQbKqJ1MY4xQyw+vVugffBmhEWSb4GnQPEadMGD6qfUI+t7epDP0ipp67rOiUooFYGabQp40pf7MxFPG23fvJ3JWbo3fzcIRmQiSalrIL1gFXH2DQnv4xhZvwnci+dIK29mamH0CfmjAJ8BdfzRsjV156BZbfkzXRfWDMes9G2E8S27xJJ2N4ySNIMkaXsu1u4=
-
diff --git a/CODEOWNERS b/CODEOWNERS
new file mode 100644
index 0000000..9a4d3d1
--- /dev/null
+++ b/CODEOWNERS
@@ -0,0 +1 @@
+* @Cito
\ No newline at end of file
diff --git a/MANIFEST.in b/MANIFEST.in
index 5f8af26..d7aae87 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,12 +1,20 @@
include MANIFEST.in
+include CODEOWNERS
include LICENSE
include README.md
+include .bumpversion.cfg
+include .coveragerc
+include .editorconfig
include .flake8
+include .mypy.ini
include tox.ini
+include poetry.lock
+include pyproject.toml
+
graft src/graphql_relay
graft tests
diff --git a/README.md b/README.md
index b12dcf3..5a76f63 100644
--- a/README.md
+++ b/README.md
@@ -1,15 +1,25 @@
# Relay Library for GraphQL Python
-This is a library to allow the easy creation of Relay-compliant servers using
-the [GraphQL Python](https://github.com/graphql-python/graphql-core) reference implementation
-of a GraphQL server.
+GraphQL-relay-py is the [Relay](https://relay.dev/) library for
+[GraphQL-core](https://github.com/graphql-python/graphql-core).
-*Note: The code is a __exact__ port of the original [graphql-relay js implementation](https://github.com/graphql/graphql-relay-js)
-from Facebook*
+It allows the easy creation of Relay-compliant servers using GraphQL-core.
+
+GraphQL-Relay-Py is a Python port of
+[graphql-relay-js](https://github.com/graphql/graphql-relay-js),
+while GraphQL-Core is a Python port of
+[GraphQL.js](https://github.com/graphql/graphql-js),
+the reference implementation of GraphQL for JavaScript.
+
+Since version 3, GraphQL-Relay-Py and GraphQL-Core support Python 3.6 and above only.
+For older versions of Python, you can use version 2 of these libraries.
[![PyPI version](https://badge.fury.io/py/graphql-relay.svg)](https://badge.fury.io/py/graphql-relay)
-[![Build Status](https://travis-ci.org/graphql-python/graphql-relay-py.svg?branch=master)](https://travis-ci.org/graphql-python/graphql-relay-py)
-[![Coverage Status](https://coveralls.io/repos/graphql-python/graphql-relay-py/badge.svg?branch=master&service=github)](https://coveralls.io/github/graphql-python/graphql-relay-py?branch=master)
+![Test Status](https://github.com/graphql-python/graphql-relay-py/actions/workflows/test.yml/badge.svg)
+![Lint Status](https://github.com/graphql-python/graphql-relay-py/actions/workflows/lint.yml/badge.svg)
+[![Dependency Updates](https://pyup.io/repos/github/graphql-python/graphql-relay-py/shield.svg)](https://pyup.io/repos/github/graphql-python/graphql-relay-py/)
+[![Python 3 Status](https://pyup.io/repos/github/graphql-python/graphql-relay-py/python-3-shield.svg)](https://pyup.io/repos/github/graphql-python/graphql-relay-py/)
+[![Code Style](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/ambv/black)
## Getting Started
@@ -20,81 +30,89 @@ An overview of GraphQL in general is available in the
[README](https://github.com/graphql-python/graphql-core/blob/master/README.md) for the
[Specification for GraphQL](https://github.com/graphql-python/graphql-core).
-This library is designed to work with the
-the [GraphQL Python](https://github.com/graphql-python/graphql-core) reference implementation
-of a GraphQL server.
+This library is designed to work with the
+the [GraphQL-Core](https://github.com/graphql-python/graphql-core)
+Python reference implementation of a GraphQL server.
-An overview of the functionality that a Relay-compliant GraphQL server should
-provide is in the [GraphQL Relay Specification](https://facebook.github.io/relay/docs/graphql-relay-specification.html)
-on the [Relay website](https://facebook.github.io/relay/). That overview
-describes a simple set of examples that exist as [tests](tests) in this
-repository. A good way to get started with this repository is to walk through
-that documentation and the corresponding tests in this library together.
+An overview of the functionality that a Relay-compliant GraphQL server should provide
+is in the [GraphQL Relay Specification](https://facebook.github.io/relay/docs/graphql-relay-specification.html)
+on the [Relay website](https://facebook.github.io/relay/).
+That overview describes a simple set of examples that exist
+as [tests](tests) in this repository.
+A good way to get started with this repository is to walk through that documentation
+and the corresponding tests in this library together.
## Using Relay Library for GraphQL Python (graphql-core)
Install Relay Library for GraphQL Python
```sh
-pip install "graphql-core>=2,<3" # use version 2.x of graphql-core
+pip install graphql-core
pip install graphql-relay
```
When building a schema for [GraphQL](https://github.com/graphql-python/graphql-core),
-the provided library functions can be used to simplify the creation of Relay
-patterns.
+the provided library functions can be used to simplify the creation of Relay patterns.
+
+All the functions that are explained in the following sections must be
+imported from the top level of the `graphql_relay` package, like this:
+```python
+from graphql_relay import connection_definitions
+```
-### Connections
+### Connections
Helper functions are provided for both building the GraphQL types
-for connections and for implementing the `resolver` method for fields
+for connections and for implementing the `resolve` method for fields
returning those types.
- `connection_args` returns the arguments that fields should provide when
-they return a connection type.
+they return a connection type that supports bidirectional pagination.
+ - `forward_connection_args` returns the arguments that fields should provide when
+they return a connection type that only supports forward pagination.
+ - `backward_connection_args` returns the arguments that fields should provide when
+they return a connection type that only supports backward pagination.
- `connection_definitions` returns a `connection_type` and its associated
`edgeType`, given a name and a node type.
- - `connection_from_list` is a helper method that takes a list and the
+ - `connection_from_array` is a helper method that takes an array and the
arguments from `connection_args`, does pagination and filtering, and returns
-an object in the shape expected by a `connection_type`'s `resolver` function.
- - `connection_from_promised_list` is similar to `connection_from_list`, but
-it takes a promise that resolves to an array, and returns a promise that
-resolves to the expected shape by `connection_type`.
- - `cursor_for_object_in_connection` is a helper method that takes a list and a
+an object in the shape expected by a `connection_type`'s `resolve` function.
+ - `cursor_for_object_in_connection` is a helper method that takes an array and a
member object, and returns a cursor for use in the mutation payload.
+ - `offset_to_cursor` takes the index of a member object in an array
+ and returns an opaque cursor for use in the mutation payload.
+ - `cursor_to_offset` takes an opaque cursor (created with `offset_to_cursor`)
+and returns the corresponding array index.
-An example usage of these methods from the [test schema](tests/starwars/schema.py):
+An example usage of these methods from the [test schema](tests/star_wars_schema.py):
```python
-ship_edge, ship_connection = connection_definitions('Ship', shipType)
-
-factionType = GraphQLObjectType(
- name='Faction',
- description='A faction in the Star Wars saga',
- fields= lambda: {
- 'id': global_id_field('Faction'),
- 'name': GraphQLField(
- GraphQLString,
- description='The name of the faction.',
- ),
- 'ships': GraphQLField(
+ship_edge, ship_connection = connection_definitions(ship_type, "Ship")
+
+faction_type = GraphQLObjectType(
+ name="Faction",
+ description="A faction in the Star Wars saga",
+ fields=lambda: {
+ "id": global_id_field("Faction"),
+ "name": GraphQLField(GraphQLString, description="The name of the faction."),
+ "ships": GraphQLField(
ship_connection,
- description='The ships used by the faction.',
+ description="The ships used by the faction.",
args=connection_args,
- resolver=lambda faction, _info, **args: connection_from_list(
- [getShip(ship) for ship in faction.ships], args
+ resolve=lambda faction, _info, **args: connection_from_array(
+ [get_ship(ship) for ship in faction.ships], args
),
- )
+ ),
},
- interfaces=[node_interface]
+ interfaces=[node_interface],
)
```
This shows adding a `ships` field to the `Faction` object that is a connection.
-It uses `connection_definitions({name: 'Ship', nodeType: shipType})` to create
-the connection type, adds `connection_args` as arguments on this function, and
-then implements the resolver function by passing the list of ships and the
-arguments to `connection_from_list`.
+It uses `connection_definitions(ship_type, "Ship")` to create the connection
+type, adds `connection_args` as arguments on this function, and then implements
+the resolver function by passing the array of ships and the arguments to
+`connection_from_array`.
### Object Identification
@@ -102,57 +120,66 @@ Helper functions are provided for both building the GraphQL types
for nodes and for implementing global IDs around local IDs.
- `node_definitions` returns the `Node` interface that objects can implement,
-and returns the `node` root field to include on the query type. To implement
-this, it takes a function to resolve an ID to an object, and to determine
-the type of a given object.
+ and returns the `node` root field to include on the query type.
+ To implement this, it takes a function to resolve an ID to an object,
+ and to determine the type of a given object.
- `to_global_id` takes a type name and an ID specific to that type name,
-and returns a "global ID" that is unique among all types.
- - `from_global_id` takes the "global ID" created by `to_global_id`, and returns
-the type name and ID used to create it.
+ and returns a "global ID" that is unique among all types.
+ - `from_global_id` takes the "global ID" created by `to_global_id`, and
+ returns the type name and ID used to create it.
- `global_id_field` creates the configuration for an `id` field on a node.
- `plural_identifying_root_field` creates a field that accepts a list of
-non-ID identifiers (like a username) and maps then to their corresponding
-objects.
+ non-ID identifiers (like a username) and maps then to their corresponding
+ objects.
-An example usage of these methods from the [test schema](tests/starwars/schema.py):
+An example usage of these methods from the [test schema](tests/star_wars_schema.py):
```python
def get_node(global_id, _info):
type_, id_ = from_global_id(global_id)
- if type_ == 'Faction':
- return getFaction(id_)
- elif type_ == 'Ship':
- return getShip(id_)
- else:
- return None
-
-def get_node_type(obj, _info):
+ if type_ == "Faction":
+ return get_faction(id_)
+ if type_ == "Ship":
+ return get_ship(id_)
+ return None # pragma: no cover
+
+def get_node_type(obj, _info, _type):
if isinstance(obj, Faction):
- return factionType
- else:
- return shipType
+ return faction_type.name
+ return ship_type.name
-node_interface, node_field = node_definitions(get_node, get_node_type)
+node_interface, node_field = node_definitions(get_node, get_node_type)[:2]
-factionType = GraphQLObjectType(
- name= 'Faction',
- description= 'A faction in the Star Wars saga',
- fields= lambda: {
- 'id': global_id_field('Faction'),
+faction_type = GraphQLObjectType(
+ name="Faction",
+ description="A faction in the Star Wars saga",
+ fields=lambda: {
+ "id": global_id_field("Faction"),
+ "name": GraphQLField(GraphQLString, description="The name of the faction."),
+ "ships": GraphQLField(
+ ship_connection,
+ description="The ships used by the faction.",
+ args=connection_args,
+ resolve=lambda faction, _info, **args: connection_from_array(
+ [get_ship(ship) for ship in faction.ships], args
+ ),
+ ),
},
- interfaces= [node_interface]
+ interfaces=[node_interface],
)
-queryType = GraphQLObjectType(
- name= 'Query',
- fields= lambda: {
- 'node': node_field
- }
+query_type = GraphQLObjectType(
+ name="Query",
+ fields=lambda: {
+ "rebels": GraphQLField(faction_type, resolve=lambda _obj, _info: get_rebels()),
+ "empire": GraphQLField(faction_type, resolve=lambda _obj, _info: get_empire()),
+ "node": node_field,
+ },
)
```
This uses `node_definitions` to construct the `Node` interface and the `node`
-field; it uses `from_global_id` to resolve the IDs passed in in the implementation
+field; it uses `from_global_id` to resolve the IDs passed in the implementation
of the function mapping ID to object. It then uses the `global_id_field` method to
create the `id` field on `Faction`, which also ensures implements the
`node_interface`. Finally, it adds the `node` field to the query type, using the
@@ -168,72 +195,85 @@ and a mutation method to map from the input fields to the output fields,
performing the mutation along the way. It then creates and returns a field
configuration that can be used as a top-level field on the mutation type.
-An example usage of these methods from the [test schema](tests/starwars/schema.py):
+An example usage of these methods from the [test schema](tests/star_wars_schema.py):
```python
-class IntroduceShipMutation(object):
+class IntroduceShipMutation:
+
def __init__(self, shipId, factionId, clientMutationId=None):
self.shipId = shipId
self.factionId = factionId
self.clientMutationId = clientMutationId
def mutate_and_get_payload(_info, shipName, factionId, **_input):
- newShip = createShip(shipName, factionId)
- return IntroduceShipMutation(
- shipId=newShip.id,
- factionId=factionId,
- )
-
-shipMutation = mutation_with_client_mutation_id(
- 'IntroduceShip',
+ new_ship = create_ship(shipName, factionId)
+ return IntroduceShipMutation(shipId=new_ship.id, factionId=factionId)
+
+ship_mutation = mutation_with_client_mutation_id(
+ "IntroduceShip",
input_fields={
- 'shipName': GraphQLField(
- GraphQLNonNull(GraphQLString)
- ),
- 'factionId': GraphQLField(
- GraphQLNonNull(GraphQLID)
- )
+ "shipName": GraphQLInputField(GraphQLNonNull(GraphQLString)),
+ "factionId": GraphQLInputField(GraphQLNonNull(GraphQLID)),
},
- output_fields= {
- 'ship': GraphQLField(
- shipType,
- resolver=lambda payload, _info: getShip(payload.shipId)
+ output_fields={
+ "ship": GraphQLField(
+ ship_type, resolve=lambda payload, _info: get_ship(payload.shipId)
+ ),
+ "faction": GraphQLField(
+ faction_type, resolve=lambda payload, _info: get_faction(payload.factionId)
),
- 'faction': GraphQLField(
- factionType,
- resolver=lambda payload, _info: getFaction(payload.factionId)
- )
},
- mutate_and_get_payload=mutate_and_get_payload
+ mutate_and_get_payload=mutate_and_get_payload,
)
-mutationType = GraphQLObjectType(
- 'Mutation',
- fields=lambda: {
- 'introduceShip': shipMutation
- }
+mutation_type = GraphQLObjectType(
+ "Mutation", fields=lambda: {"introduceShip": ship_mutation}
)
```
This code creates a mutation named `IntroduceShip`, which takes a faction
ID and a ship name as input. It outputs the `Faction` and the `Ship` in
-question. `mutate_and_get_payload` then gets an object with a property for
-each input field, performs the mutation by constructing the new ship, then
-returns an object that will be resolved by the output fields.
+question. `mutate_and_get_payload` then gets each input field as keyword
+parameter, performs the mutation by constructing the new ship, then returns
+an object that will be resolved by the output fields.
Our mutation type then creates the `introduceShip` field using the return
value of `mutation_with_client_mutation_id`.
## Contributing
-After cloning this repo, ensure dependencies are installed by running:
+After cloning this repository from GitHub,
+we recommend using [Poetry](https://poetry.eustace.io/)
+to create a test environment. With poetry installed,
+you do this with the following command:
+
+```sh
+poetry install
+```
+
+You can then run the complete test suite like this:
+
+```sh
+poetry run pytest
+```
+
+In order to run only a part of the tests with increased verbosity,
+you can add pytest options, like this:
+
+```sh
+poetry run pytest tests/node -vv
+```
+
+In order to check the code style with flake8, use this:
```sh
-python setup.py install
+poetry run flake8
```
-After developing, the full test suite can be evaluated by running:
+Use the `tox` command to run the test suite with different
+Python versions and perform all additional source code checks.
+You can also restrict tox to an individual environment, like this:
```sh
-python setup.py test # Use --pytest-args="-v -s" for verbose mode
+poetry run tox -e py39
```
diff --git a/debian/changelog b/debian/changelog
index f6401ef..afee66e 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,9 @@
+graphql-relay (3.2.0-1) UNRELEASED; urgency=low
+
+ * New upstream release.
+
+ -- Debian Janitor <janitor@jelmer.uk> Sat, 25 Feb 2023 16:45:08 -0000
+
graphql-relay (2.0.1-1) unstable; urgency=medium
* Upload to unstable
diff --git a/graphql_relay/__init__.py b/graphql_relay/__init__.py
deleted file mode 100644
index 39ee980..0000000
--- a/graphql_relay/__init__.py
+++ /dev/null
@@ -1,32 +0,0 @@
-from .connection.connection import (
- connection_args,
- connection_definitions
-)
-from .connection.arrayconnection import (
- connection_from_list,
- connection_from_promised_list,
- cursor_for_object_in_connection
-)
-from .node.node import (
- node_definitions,
- from_global_id,
- to_global_id,
- global_id_field,
-)
-from .mutation.mutation import (
- mutation_with_client_mutation_id
-)
-
-__all__ = [
- # Helpers for creating connection types in the schema
- 'connection_args', 'connection_definitions',
- # Helpers for creating connections from arrays
- 'connection_from_list', 'connection_from_promised_list',
- 'cursor_for_object_in_connection',
- # Helper for creating node definitions
- 'node_definitions',
- # Utilities for creating global IDs in systems that don't have them
- 'from_global_id', 'to_global_id', 'global_id_field',
- # Helper for creating mutations with client mutation IDs
- 'mutation_with_client_mutation_id'
-]
diff --git a/graphql_relay/connection/__init__.py b/graphql_relay/connection/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/graphql_relay/connection/arrayconnection.py b/graphql_relay/connection/arrayconnection.py
deleted file mode 100644
index ea951f5..0000000
--- a/graphql_relay/connection/arrayconnection.py
+++ /dev/null
@@ -1,156 +0,0 @@
-from ..utils import base64, unbase64, is_str
-from .connectiontypes import Connection, PageInfo, Edge
-
-
-def connection_from_list(data, args=None, **kwargs):
- """
- A simple function that accepts an array and connection arguments, and returns
- a connection object for use in GraphQL. It uses array offsets as pagination,
- so pagination will only work if the array is static.
- """
- _len = len(data)
- return connection_from_list_slice(
- data,
- args,
- slice_start=0,
- list_length=_len,
- list_slice_length=_len,
- **kwargs
- )
-
-
-def connection_from_promised_list(data_promise, args=None, **kwargs):
- """
- A version of `connectionFromArray` that takes a promised array, and returns a
- promised connection.
- """
- return data_promise.then(lambda data: connection_from_list(data, args, **kwargs))
-
-
-def connection_from_list_slice(list_slice, args=None, connection_type=None,
- edge_type=None, pageinfo_type=None,
- slice_start=0, list_length=0, list_slice_length=None):
- """
- Given a slice (subset) of an array, returns a connection object for use in
- GraphQL.
- This function is similar to `connectionFromArray`, but is intended for use
- cases where you know the cardinality of the connection, consider it too large
- to materialize the entire array, and instead wish pass in a slice of the
- total result large enough to cover the range specified in `args`.
- """
- connection_type = connection_type or Connection
- edge_type = edge_type or Edge
- pageinfo_type = pageinfo_type or PageInfo
-
- args = args or {}
-
- before = args.get('before')
- after = args.get('after')
- first = args.get('first')
- last = args.get('last')
- if list_slice_length is None:
- list_slice_length = len(list_slice)
- slice_end = slice_start + list_slice_length
- before_offset = get_offset_with_default(before, list_length)
- after_offset = get_offset_with_default(after, -1)
-
- start_offset = max(
- slice_start - 1,
- after_offset,
- -1
- ) + 1
- end_offset = min(
- slice_end,
- before_offset,
- list_length
- )
- if isinstance(first, int):
- end_offset = min(
- end_offset,
- start_offset + first
- )
- if isinstance(last, int):
- start_offset = max(
- start_offset,
- end_offset - last
- )
-
- # If supplied slice is too large, trim it down before mapping over it.
- _slice = list_slice[
- max(start_offset - slice_start, 0):
- list_slice_length - (slice_end - end_offset)
- ]
- edges = [
- edge_type(
- node=node,
- cursor=offset_to_cursor(start_offset + i)
- )
- for i, node in enumerate(_slice)
- ]
-
- first_edge_cursor = edges[0].cursor if edges else None
- last_edge_cursor = edges[-1].cursor if edges else None
- lower_bound = after_offset + 1 if after else 0
- upper_bound = before_offset if before else list_length
-
- return connection_type(
- edges=edges,
- page_info=pageinfo_type(
- start_cursor=first_edge_cursor,
- end_cursor=last_edge_cursor,
- has_previous_page=isinstance(last, int) and start_offset > lower_bound,
- has_next_page=isinstance(first, int) and end_offset < upper_bound
- )
- )
-
-
-PREFIX = 'arrayconnection:'
-
-
-def connection_from_promised_list_slice(data_promise, args=None, **kwargs):
- return data_promise.then(
- lambda data: connection_from_list_slice(data, args, **kwargs))
-
-
-def offset_to_cursor(offset):
- """
- Creates the cursor string from an offset.
- """
- return base64(PREFIX + str(offset))
-
-
-def cursor_to_offset(cursor):
- """
- Rederives the offset from the cursor string.
- """
- try:
- return int(unbase64(cursor)[len(PREFIX):])
- except Exception:
- return None
-
-
-def cursor_for_object_in_connection(data, _object):
- """
- Return the cursor associated with an object in an array.
- """
- if _object not in data:
- return None
-
- offset = data.index(_object)
- return offset_to_cursor(offset)
-
-
-def get_offset_with_default(cursor=None, default_offset=0):
- """
- Given an optional cursor and a default offset, returns the offset
- to use; if the cursor contains a valid offset, that will be used,
- otherwise it will be the default.
- """
- if not is_str(cursor):
- return default_offset
-
- offset = cursor_to_offset(cursor)
- try:
- return int(offset)
- except Exception:
- return default_offset
diff --git a/graphql_relay/connection/connection.py b/graphql_relay/connection/connection.py
deleted file mode 100644
index 3ae6678..0000000
--- a/graphql_relay/connection/connection.py
+++ /dev/null
@@ -1,86 +0,0 @@
-from collections import OrderedDict
-
-from graphql.type import (
- GraphQLArgument,
- GraphQLBoolean,
- GraphQLInt,
- GraphQLNonNull,
- GraphQLList,
- GraphQLObjectType,
- GraphQLString,
- GraphQLField
-)
-from ..utils import resolve_maybe_thunk
-
-
-connection_args = OrderedDict((
- ('before', GraphQLArgument(GraphQLString)),
- ('after', GraphQLArgument(GraphQLString)),
- ('first', GraphQLArgument(GraphQLInt)),
- ('last', GraphQLArgument(GraphQLInt)),
-))
-
-
-def connection_definitions(
- name, node_type, resolve_node=None, resolve_cursor=None,
- edge_fields=None, connection_fields=None):
- edge_fields = edge_fields or OrderedDict()
- connection_fields = connection_fields or OrderedDict()
- edge_type = GraphQLObjectType(
- name + 'Edge',
- description='An edge in a connection.',
- fields=lambda: OrderedDict((
- ('node', GraphQLField(
- node_type,
- resolver=resolve_node,
- description='The item at the end of the edge',
- )),
- ('cursor', GraphQLField(
- GraphQLNonNull(GraphQLString),
- resolver=resolve_cursor,
- description='A cursor for use in pagination',
- )),
- ), **resolve_maybe_thunk(edge_fields))
- )
-
- connection_type = GraphQLObjectType(
- name + 'Connection',
- description='A connection to a list of items.',
- fields=lambda: OrderedDict((
- ('pageInfo', GraphQLField(
- GraphQLNonNull(page_info_type),
- description='The Information to aid in pagination',
- )),
- ('edges', GraphQLField(
- GraphQLList(edge_type),
- description='A list of edges.',
- )),
- ), **resolve_maybe_thunk(connection_fields))
- )
-
- return edge_type, connection_type
-
-
-# The common page info type used by all connections.
-page_info_type = GraphQLObjectType(
- 'PageInfo',
- description='Information about pagination in a connection.',
- fields=lambda: OrderedDict((
- ('hasNextPage', GraphQLField(
- GraphQLNonNull(GraphQLBoolean),
- description='When paginating forwards, are there more items?',
- )),
- ('hasPreviousPage', GraphQLField(
- GraphQLNonNull(GraphQLBoolean),
- description='When paginating backwards, are there more items?',
- )),
- ('startCursor', GraphQLField(
- GraphQLString,
- description='When paginating backwards, the cursor to continue.',
- )),
- ('endCursor', GraphQLField(
- GraphQLString,
- description='When paginating forwards, the cursor to continue.',
- )),
- ))
-)
diff --git a/graphql_relay/connection/connectiontypes.py b/graphql_relay/connection/connectiontypes.py
deleted file mode 100644
index 28c8ccb..0000000
--- a/graphql_relay/connection/connectiontypes.py
+++ /dev/null
@@ -1,42 +0,0 @@
-class Connection(object):
-
- def __init__(self, edges, page_info):
- self.edges = edges
- self.page_info = page_info
-
- def to_dict(self):
- return {
- 'edges': [e.to_dict() for e in self.edges],
- 'pageInfo': self.page_info.to_dict(),
- }
-
-
-class PageInfo(object):
-
- def __init__(self, start_cursor="", end_cursor="",
- has_previous_page=False, has_next_page=False):
- self.startCursor = start_cursor
- self.endCursor = end_cursor
- self.hasPreviousPage = has_previous_page
- self.hasNextPage = has_next_page
-
- def to_dict(self):
- return {
- 'startCursor': self.startCursor,
- 'endCursor': self.endCursor,
- 'hasPreviousPage': self.hasPreviousPage,
- 'hasNextPage': self.hasNextPage,
- }
-
-
-class Edge(object):
-
- def __init__(self, node, cursor):
- self.node = node
- self.cursor = cursor
-
- def to_dict(self):
- return {
- 'node': self.node,
- 'cursor': self.cursor,
- }
diff --git a/graphql_relay/connection/tests/__init__.py b/graphql_relay/connection/tests/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/graphql_relay/connection/tests/test_arrayconnection.py b/graphql_relay/connection/tests/test_arrayconnection.py
deleted file mode 100644
index 810558d..0000000
--- a/graphql_relay/connection/tests/test_arrayconnection.py
+++ /dev/null
@@ -1,837 +0,0 @@
-from promise import Promise
-
-from ..arrayconnection import (
- connection_from_list,
- connection_from_list_slice,
- connection_from_promised_list,
- connection_from_promised_list_slice,
- cursor_for_object_in_connection
-)
-
-letters = ['A', 'B', 'C', 'D', 'E']
-letters_promise = Promise.resolve(letters)
-
-
-def test_returns_all_elements_without_filters():
- c = connection_from_list(letters)
- expected = {
- 'edges': [
- {
- 'node': 'A',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- },
- {
- 'node': 'B',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- },
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- {
- 'node': 'D',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- },
- {
- 'node': 'E',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjQ=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjQ=',
- 'hasPreviousPage': False,
- 'hasNextPage': False,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_respects_a_smaller_first():
- c = connection_from_list(letters, dict(first=2))
- expected = {
- 'edges': [
- {
- 'node': 'A',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- },
- {
- 'node': 'B',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- 'hasPreviousPage': False,
- 'hasNextPage': True,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_respects_an_overly_large_first():
- c = connection_from_list(letters, dict(first=10))
- expected = {
- 'edges': [
- {
- 'node': 'A',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- },
- {
- 'node': 'B',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- },
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- {
- 'node': 'D',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- },
- {
- 'node': 'E',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjQ=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjQ=',
- 'hasPreviousPage': False,
- 'hasNextPage': False,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_respects_a_smaller_last():
- c = connection_from_list(letters, dict(last=2))
- expected = {
- 'edges': [
- {
- 'node': 'D',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- },
- {
- 'node': 'E',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjQ=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjQ=',
- 'hasPreviousPage': True,
- 'hasNextPage': False,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_respects_an_overly_large_last():
- c = connection_from_list(letters, dict(last=10))
- expected = {
- 'edges': [
- {
- 'node': 'A',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- },
- {
- 'node': 'B',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- },
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- {
- 'node': 'D',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- },
- {
- 'node': 'E',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjQ=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjQ=',
- 'hasPreviousPage': False,
- 'hasNextPage': False,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_pagination_respects_first_after():
- c = connection_from_list(letters, dict(first=2, after='YXJyYXljb25uZWN0aW9uOjE='))
- expected = {
- 'edges': [
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- {
- 'node': 'D',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- 'hasPreviousPage': False,
- 'hasNextPage': True,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_pagination_respects_longfirst_after():
- c = connection_from_list(
- letters, dict(first=10, after='YXJyYXljb25uZWN0aW9uOjE='))
- expected = {
- 'edges': [
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- {
- 'node': 'D',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- },
- {
- 'node': 'E',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjQ=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjQ=',
- 'hasPreviousPage': False,
- 'hasNextPage': False,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_pagination_respects_last_before():
- c = connection_from_list(letters, dict(last=2, before='YXJyYXljb25uZWN0aW9uOjM='))
- expected = {
- 'edges': [
- {
- 'node': 'B',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- },
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- 'hasPreviousPage': True,
- 'hasNextPage': False,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_pagination_respects_longlast_before():
- c = connection_from_list(
- letters, dict(last=10, before='YXJyYXljb25uZWN0aW9uOjM='))
- expected = {
- 'edges': [
- {
- 'node': 'A',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- },
- {
- 'node': 'B',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- },
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- 'hasPreviousPage': False,
- 'hasNextPage': False,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_first_after_before_few():
- c = connection_from_list(letters, dict(
- first=2, after='YXJyYXljb25uZWN0aW9uOjA=', before='YXJyYXljb25uZWN0aW9uOjQ=',
- ))
- expected = {
- 'edges': [
- {
- 'node': 'B',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- },
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- 'hasPreviousPage': False,
- 'hasNextPage': True,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_first_after_before_many():
- c = connection_from_list(letters, dict(
- first=4, after='YXJyYXljb25uZWN0aW9uOjA=', before='YXJyYXljb25uZWN0aW9uOjQ=',
- ))
- expected = {
- 'edges': [
- {
- 'node': 'B',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- },
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- {
- 'node': 'D',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- 'hasPreviousPage': False,
- 'hasNextPage': False,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_first_after_before_exact():
- c = connection_from_list(letters, dict(
- first=3, after='YXJyYXljb25uZWN0aW9uOjA=', before='YXJyYXljb25uZWN0aW9uOjQ=',
- ))
- expected = {
- 'edges': [
- {
- 'node': 'B',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- },
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- {
- 'node': 'D',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- 'hasPreviousPage': False,
- 'hasNextPage': False,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_last_after_before_few():
- c = connection_from_list(letters, dict(
- last=2, after='YXJyYXljb25uZWN0aW9uOjA=', before='YXJyYXljb25uZWN0aW9uOjQ=',
- ))
- expected = {
- 'edges': [
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- {
- 'node': 'D',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- 'hasPreviousPage': True,
- 'hasNextPage': False,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_last_after_before_many():
- c = connection_from_list(letters, dict(
- last=4, after='YXJyYXljb25uZWN0aW9uOjA=', before='YXJyYXljb25uZWN0aW9uOjQ=',
- ))
- expected = {
- 'edges': [
- {
- 'node': 'B',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- },
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- {
- 'node': 'D',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- 'hasPreviousPage': False,
- 'hasNextPage': False,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_last_after_before_exact():
- c = connection_from_list(letters, dict(
- last=3, after='YXJyYXljb25uZWN0aW9uOjA=', before='YXJyYXljb25uZWN0aW9uOjQ=',
- ))
- expected = {
- 'edges': [
- {
- 'node': 'B',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- },
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- {
- 'node': 'D',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- 'hasPreviousPage': False,
- 'hasNextPage': False,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_no_elements_first_0():
- c = connection_from_list(letters, dict(first=0))
- expected = {
- 'edges': [
- ],
- 'pageInfo': {
- 'startCursor': None,
- 'endCursor': None,
- 'hasPreviousPage': False,
- 'hasNextPage': True,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_all_elements_invalid_cursors():
- c = connection_from_list(letters, dict(before='invalid', after='invalid'))
- expected = {
- 'edges': [
- {
- 'node': 'A',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- },
- {
- 'node': 'B',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- },
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- {
- 'node': 'D',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- },
- {
- 'node': 'E',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjQ=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjQ=',
- 'hasPreviousPage': False,
- 'hasNextPage': False,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_all_elements_cursor_outside():
- c = connection_from_list(letters, dict(
- before='YXJyYXljb25uZWN0aW9uOjYK', after='YXJyYXljb25uZWN0aW9uOi0xCg=='
- ))
- expected = {
- 'edges': [
- {
- 'node': 'A',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- },
- {
- 'node': 'B',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- },
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- {
- 'node': 'D',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- },
- {
- 'node': 'E',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjQ=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjQ=',
- 'hasPreviousPage': False,
- 'hasNextPage': False,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_no_elements_cursors_cross():
- c = connection_from_list(letters, dict(
- before='YXJyYXljb25uZWN0aW9uOjI=', after='YXJyYXljb25uZWN0aW9uOjQ='
- ))
- expected = {
- 'edges': [
- ],
- 'pageInfo': {
- 'startCursor': None,
- 'endCursor': None,
- 'hasPreviousPage': False,
- 'hasNextPage': False,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_cursor_for_object_in_connection_member_object():
- letter_b_cursor = cursor_for_object_in_connection(letters, 'B')
- assert letter_b_cursor == 'YXJyYXljb25uZWN0aW9uOjE='
-
-
-def test_cursor_for_object_in_connection_non_member_object():
- letter_b_cursor = cursor_for_object_in_connection(letters, 'F')
- assert letter_b_cursor is None
-
-
-def test_promised_list_returns_all_elements_without_filters():
- c = connection_from_promised_list(letters_promise)
- expected = {
- 'edges': [
- {
- 'node': 'A',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- },
- {
- 'node': 'B',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- },
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- {
- 'node': 'D',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- },
- {
- 'node': 'E',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjQ=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjQ=',
- 'hasPreviousPage': False,
- 'hasNextPage': False,
- }
- }
- assert c.value.to_dict() == expected
-
-
-def test_promised_list_respects_a_smaller_first():
- c = connection_from_promised_list(letters_promise, dict(first=2))
- expected = {
- 'edges': [
- {
- 'node': 'A',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- },
- {
- 'node': 'B',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- 'hasPreviousPage': False,
- 'hasNextPage': True,
- }
- }
- assert c.value.to_dict() == expected
-
-
-def test_list_slice_works_with_a_just_right_array_slice():
- c = connection_from_list_slice(
- letters[1:3],
- dict(
- first=2,
- after='YXJyYXljb25uZWN0aW9uOjA=',
- ),
- slice_start=1,
- list_length=5
- )
- expected = {
- 'edges': [
- {
- 'node': 'B',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- },
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- 'hasPreviousPage': False,
- 'hasNextPage': True,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_list_slice_works_with_an_oversized_array_slice_left_side():
- c = connection_from_list_slice(
- letters[0:3],
- dict(
- first=2,
- after='YXJyYXljb25uZWN0aW9uOjA=',
- ),
- slice_start=0,
- list_length=5
- )
- expected = {
- 'edges': [
- {
- 'node': 'B',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- },
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- 'hasPreviousPage': False,
- 'hasNextPage': True,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_list_slice_works_with_an_oversized_array_slice_right_side():
- c = connection_from_list_slice(
- letters[2:4],
- dict(
- first=1,
- after='YXJyYXljb25uZWN0aW9uOjE=',
- ),
- slice_start=2,
- list_length=5
- )
- expected = {
- 'edges': [
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- 'hasPreviousPage': False,
- 'hasNextPage': True,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_list_slice_works_with_an_oversized_array_slice_both_sides():
- c = connection_from_list_slice(
- letters[1:4],
- dict(
- first=1,
- after='YXJyYXljb25uZWN0aW9uOjE=',
- ),
- slice_start=1,
- list_length=5
- )
- expected = {
- 'edges': [
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- 'hasPreviousPage': False,
- 'hasNextPage': True,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_list_slice_works_with_an_undersized_array_slice_left_side():
- c = connection_from_list_slice(
- letters[3:5],
- dict(
- first=3,
- after='YXJyYXljb25uZWN0aW9uOjE=',
- ),
- slice_start=3,
- list_length=5
- )
- expected = {
- 'edges': [
- {
- 'node': 'D',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- },
- {
- 'node': 'E',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjQ=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjQ=',
- 'hasPreviousPage': False,
- 'hasNextPage': False,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_list_slice_works_with_an_undersized_array_slice_right_side():
- c = connection_from_list_slice(
- letters[2:4],
- dict(
- first=3,
- after='YXJyYXljb25uZWN0aW9uOjE=',
- ),
- slice_start=2,
- list_length=5
- )
- expected = {
- 'edges': [
- {
- 'node': 'C',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- },
- {
- 'node': 'D',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjI=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- 'hasPreviousPage': False,
- 'hasNextPage': True,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_list_slice_works_with_an_undersized_array_slice_both_sides():
- c = connection_from_list_slice(
- letters[3:4],
- dict(
- first=3,
- after='YXJyYXljb25uZWN0aW9uOjE=',
- ),
- slice_start=3,
- list_length=5
- )
- expected = {
- 'edges': [
- {
- 'node': 'D',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjM=',
- 'hasPreviousPage': False,
- 'hasNextPage': True,
- }
- }
- assert c.to_dict() == expected
-
-
-def test_promised_list_slice_respects_a_smaller_first():
- letters_promise_slice = Promise.resolve(letters[:3])
- c = connection_from_promised_list_slice(
- letters_promise_slice,
- dict(first=2),
- slice_start=0,
- list_length=5
- )
- expected = {
- 'edges': [
- {
- 'node': 'A',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- },
- {
- 'node': 'B',
- 'cursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- },
- ],
- 'pageInfo': {
- 'startCursor': 'YXJyYXljb25uZWN0aW9uOjA=',
- 'endCursor': 'YXJyYXljb25uZWN0aW9uOjE=',
- 'hasPreviousPage': False,
- 'hasNextPage': True,
- }
- }
- assert c.value.to_dict() == expected
diff --git a/graphql_relay/connection/tests/test_connection.py b/graphql_relay/connection/tests/test_connection.py
deleted file mode 100644
index f571cfe..0000000
--- a/graphql_relay/connection/tests/test_connection.py
+++ /dev/null
@@ -1,185 +0,0 @@
-from collections import namedtuple
-
-from graphql import graphql
-from graphql.type import (
- GraphQLSchema,
- GraphQLObjectType,
- GraphQLField,
- GraphQLInt,
- GraphQLString,
-)
-
-from ..arrayconnection import connection_from_list
-from ..connection import (
- connection_args,
- connection_definitions
-)
-
-User = namedtuple('User', ['name', 'friends'])
-
-allUsers = [
- User(name='Dan', friends=[1, 2, 3, 4]),
- User(name='Nick', friends=[0, 2, 3, 4]),
- User(name='Lee', friends=[0, 1, 3, 4]),
- User(name='Joe', friends=[0, 1, 2, 4]),
- User(name='Tim', friends=[0, 1, 2, 3]),
-]
-
-userType = GraphQLObjectType(
- 'User',
- fields=lambda: {
- 'name': GraphQLField(GraphQLString),
- 'friends': GraphQLField(
- friendConnection,
- args=connection_args,
- resolver=lambda user, _info, **args:
- connection_from_list(user.friends, args),
- ),
- },
-)
-
-friendEdge, friendConnection = connection_definitions(
- 'Friend',
- userType,
- resolve_node=lambda edge, _info: allUsers[edge.node],
- edge_fields=lambda: {
- 'friendshipTime': GraphQLField(
- GraphQLString,
- resolver=lambda _user, _info: 'Yesterday'
- ),
- },
- connection_fields=lambda: {
- 'totalCount': GraphQLField(
- GraphQLInt,
- resolver=lambda _user, _info: len(allUsers) - 1
- ),
- }
-)
-
-queryType = GraphQLObjectType(
- 'Query',
- fields=lambda: {
- 'user': GraphQLField(
- userType,
- resolver=lambda _root, _info: allUsers[0]
- ),
- }
-)
-
-schema = GraphQLSchema(query=queryType)
-
-
-def test_include_connections_and_edge_types():
- query = '''
- query FriendsQuery {
- user {
- friends(first: 2) {
- totalCount
- edges {
- friendshipTime
- node {
- name
- }
- }
- }
- }
- }
- '''
- expected = {
- 'user': {
- 'friends': {
- 'totalCount': 4,
- 'edges': [
- {
- 'friendshipTime': 'Yesterday',
- 'node': {
- 'name': 'Nick'
- }
- },
- {
- 'friendshipTime': 'Yesterday',
- 'node': {
- 'name': 'Lee'
- }
- },
- ]
- }
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_works_with_forward_connection_args():
- query = '''
- query FriendsQuery {
- user {
- friendsForward: friends(first: 2) {
- edges {
- node {
- name
- }
- }
- }
- }
- }
- '''
- expected = {
- 'user': {
- 'friendsForward': {
- 'edges': [
- {
- 'node': {
- 'name': 'Nick'
- }
- },
- {
- 'node': {
- 'name': 'Lee'
- }
- },
- ]
- }
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_works_with_backward_connection_args():
- query = '''
- query FriendsQuery {
- user {
- friendsBackward: friends(last: 2) {
- edges {
- node {
- name
- }
- }
- }
- }
- }
- '''
- expected = {
- 'user': {
- 'friendsBackward': {
- 'edges': [
- {
- 'node': {
- 'name': 'Joe'
- }
- },
- {
- 'node': {
- 'name': 'Tim'
- }
- },
- ]
- }
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
diff --git a/graphql_relay/mutation/__init__.py b/graphql_relay/mutation/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/graphql_relay/mutation/mutation.py b/graphql_relay/mutation/mutation.py
deleted file mode 100644
index f9fcc5c..0000000
--- a/graphql_relay/mutation/mutation.py
+++ /dev/null
@@ -1,63 +0,0 @@
-from collections import OrderedDict
-
-from promise import Promise
-
-from graphql.type import (
- GraphQLArgument,
- GraphQLInputObjectField,
- GraphQLInputObjectType,
- GraphQLNonNull,
- GraphQLObjectType,
- GraphQLString,
- GraphQLField,
-)
-from graphql.error import GraphQLError
-
-from ..utils import resolve_maybe_thunk
-
-
-def mutation_with_client_mutation_id(
- name, input_fields, output_fields, mutate_and_get_payload):
- augmented_input_fields = OrderedDict(
- resolve_maybe_thunk(input_fields),
- clientMutationId=GraphQLInputObjectField(
- GraphQLNonNull(GraphQLString)
- )
- )
- augmented_output_fields = OrderedDict(
- resolve_maybe_thunk(output_fields),
- clientMutationId=GraphQLField(
- GraphQLNonNull(GraphQLString)
- )
- )
-
- input_type = GraphQLInputObjectType(
- name + 'Input',
- fields=augmented_input_fields,
- )
- output_type = GraphQLObjectType(
- name + 'Payload',
- fields=augmented_output_fields,
- )
-
- def resolver(_root, info, **args):
- input_ = args.get('input')
-
- def on_resolve(payload):
- try:
- payload.clientMutationId = input_['clientMutationId']
- except Exception:
- raise GraphQLError(
- 'Cannot set clientMutationId in the payload object {}'.format(
- repr(payload)))
- return payload
-
- return Promise.resolve(mutate_and_get_payload(info, **input_)).then(on_resolve)
-
- return GraphQLField(
- output_type,
- args=OrderedDict((
- ('input', GraphQLArgument(GraphQLNonNull(input_type))),
- )),
- resolver=resolver
- )
diff --git a/graphql_relay/mutation/tests/__init__.py b/graphql_relay/mutation/tests/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/graphql_relay/mutation/tests/test_mutation.py b/graphql_relay/mutation/tests/test_mutation.py
deleted file mode 100644
index f0d5aec..0000000
--- a/graphql_relay/mutation/tests/test_mutation.py
+++ /dev/null
@@ -1,418 +0,0 @@
-from promise import Promise
-
-from graphql import graphql
-from graphql.type import (
- GraphQLSchema,
- GraphQLObjectType,
- GraphQLInt,
- GraphQLField,
- GraphQLInputObjectField
-)
-
-from ..mutation import mutation_with_client_mutation_id
-
-
-class Result(object):
-
- def __init__(self, result, clientMutationId=None):
- self.clientMutationId = clientMutationId
- self.result = result
-
-
-simpleMutation = mutation_with_client_mutation_id(
- 'SimpleMutation',
- input_fields={},
- output_fields={
- 'result': GraphQLField(GraphQLInt)
- },
- mutate_and_get_payload=lambda _info, **_input: Result(result=1)
-)
-
-simpleMutationWithThunkFields = mutation_with_client_mutation_id(
- 'SimpleMutationWithThunkFields',
- input_fields=lambda: {
- 'inputData': GraphQLInputObjectField(GraphQLInt)
- },
- output_fields=lambda: {
- 'result': GraphQLField(GraphQLInt)
- },
- mutate_and_get_payload=lambda _info, **input_: Result(result=input_['inputData'])
-)
-
-simplePromiseMutation = mutation_with_client_mutation_id(
- 'SimplePromiseMutation',
- input_fields={},
- output_fields={
- 'result': GraphQLField(GraphQLInt)
- },
- mutate_and_get_payload=lambda _info, **_input: Promise.resolve(Result(result=1))
-)
-
-simpleRootValueMutation = mutation_with_client_mutation_id(
- 'SimpleRootValueMutation',
- input_fields={},
- output_fields={
- 'result': GraphQLField(GraphQLInt)
- },
- mutate_and_get_payload=lambda info, **_input: info.root_value
-)
-
-mutation = GraphQLObjectType(
- 'Mutation',
- fields={
- 'simpleMutation': simpleMutation,
- 'simpleMutationWithThunkFields': simpleMutationWithThunkFields,
- 'simplePromiseMutation': simplePromiseMutation,
- 'simpleRootValueMutation': simpleRootValueMutation
- }
-)
-
-schema = GraphQLSchema(
- query=mutation,
- mutation=mutation
-)
-
-
-def test_requires_an_argument():
- query = '''
- mutation M {
- simpleMutation {
- result
- }
- }
- '''
- result = graphql(schema, query)
- assert len(result.errors) == 1
-
-
-def test_returns_the_same_client_mutation_id():
- query = '''
- mutation M {
- simpleMutation(input: {clientMutationId: "abc"}) {
- result
- clientMutationId
- }
- }
- '''
- expected = {
- 'simpleMutation': {
- 'result': 1,
- 'clientMutationId': 'abc'
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_supports_thunks_as_input_and_output_fields():
- query = '''
- mutation M {
- simpleMutationWithThunkFields(
- input: {inputData: 1234, clientMutationId: "abc"}) {
- result
- clientMutationId
- }
- }
- '''
- expected = {
- 'simpleMutationWithThunkFields': {
- 'result': 1234,
- 'clientMutationId': 'abc'
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_supports_promise_mutations():
- query = '''
- mutation M {
- simplePromiseMutation(input: {clientMutationId: "abc"}) {
- result
- clientMutationId
- }
- }
- '''
- expected = {
- 'simplePromiseMutation': {
- 'result': 1,
- 'clientMutationId': 'abc'
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_can_access_root_value():
- query = '''
- mutation M {
- simpleRootValueMutation(input: {clientMutationId: "abc"}) {
- result
- clientMutationId
- }
- }
- '''
- expected = {
- 'simpleRootValueMutation': {
- 'result': 1,
- 'clientMutationId': 'abc'
- }
- }
- result = graphql(schema, query, root=Result(result=1))
- assert not result.errors
- assert result.data == expected
-
-
-def test_contains_correct_input():
- query = '''
- {
- __type(name: "SimpleMutationInput") {
- name
- kind
- inputFields {
- name
- type {
- name
- kind
- ofType {
- name
- kind
- }
- }
- }
- }
- }
- '''
- expected = {
- '__type': {
- 'name': 'SimpleMutationInput',
- 'kind': 'INPUT_OBJECT',
- 'inputFields': [
- {
- 'name': 'clientMutationId',
- 'type': {
- 'name': None,
- 'kind': 'NON_NULL',
- 'ofType': {
- 'name': 'String',
- 'kind': 'SCALAR'
- }
- }
- }
- ]
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_contains_correct_payload():
- query = '''
- {
- __type(name: "SimpleMutationPayload") {
- name
- kind
- fields {
- name
- type {
- name
- kind
- ofType {
- name
- kind
- }
- }
- }
- }
- }
- '''
- expected1 = {
- '__type': {
- 'name': 'SimpleMutationPayload',
- 'kind': 'OBJECT',
- 'fields': [
- {
- 'name': 'clientMutationId',
- 'type': {
- 'name': None,
- 'kind': 'NON_NULL',
- 'ofType': {
- 'name': 'String',
- 'kind': 'SCALAR'
- }
- }
- },
- {
- 'name': 'result',
- 'type': {
- 'name': 'Int',
- 'kind': 'SCALAR',
- 'ofType': None
- }
- },
- ]
- }
- }
- expected2 = {
- '__type': {
- 'name': 'SimpleMutationPayload',
- 'kind': 'OBJECT',
- 'fields': [
- {
- 'name': 'result',
- 'type': {
- 'name': 'Int',
- 'kind': 'SCALAR',
- 'ofType': None
- }
- },
- {
- 'name': 'clientMutationId',
- 'type': {
- 'name': None,
- 'kind': 'NON_NULL',
- 'ofType': {
- 'name': 'String',
- 'kind': 'SCALAR'
- }
- }
- },
- ]
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected1 or result.data == expected2
-
-
-def test_contains_correct_field():
- query = '''
- {
- __schema {
- mutationType {
- fields {
- name
- args {
- name
- type {
- name
- kind
- ofType {
- name
- kind
- }
- }
- }
- type {
- name
- kind
- }
- }
- }
- }
- }
- '''
- expected = {
- '__schema': {
- 'mutationType': {
- 'fields': [
- {
- 'name': 'simplePromiseMutation',
- 'args': [
- {
- 'name': 'input',
- 'type': {
- 'name': None,
- 'kind': 'NON_NULL',
- 'ofType': {
- 'name': 'SimplePromiseMutationInput',
- 'kind': 'INPUT_OBJECT'
- }
- },
- }
- ],
- 'type': {
- 'name': 'SimplePromiseMutationPayload',
- 'kind': 'OBJECT',
- }
- },
- {
- 'name': 'simpleRootValueMutation',
- 'args': [
- {
- 'name': 'input',
- 'type': {
- 'name': None,
- 'kind': 'NON_NULL',
- 'ofType': {
- 'name': 'SimpleRootValueMutationInput',
- 'kind': 'INPUT_OBJECT'
- }
- },
- }
- ],
- 'type': {
- 'name': 'SimpleRootValueMutationPayload',
- 'kind': 'OBJECT',
- }
- },
- {
- 'name': 'simpleMutation',
- 'args': [
- {
- 'name': 'input',
- 'type': {
- 'name': None,
- 'kind': 'NON_NULL',
- 'ofType': {
- 'name': 'SimpleMutationInput',
- 'kind': 'INPUT_OBJECT'
- }
- },
- }
- ],
- 'type': {
- 'name': 'SimpleMutationPayload',
- 'kind': 'OBJECT',
- }
- },
- {
- 'name': 'simpleMutationWithThunkFields',
- 'args': [
- {
- 'name': 'input',
- 'type': {
- 'name': None,
- 'kind': 'NON_NULL',
- 'ofType': {
- 'name': 'SimpleMutationWithThunkFieldsInput',
- 'kind': 'INPUT_OBJECT'
- }
- },
- }
- ],
- 'type': {
- 'name': 'SimpleMutationWithThunkFieldsPayload',
- 'kind': 'OBJECT',
- }
- },
- ]
- }
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- # ensure the ordering is correct for the assertion
- expected['__schema']['mutationType']['fields'] = sorted(
- expected['__schema']['mutationType']['fields'],
- key=lambda k: k['name']
- )
- result.data['__schema']['mutationType']['fields'] = sorted(
- result.data['__schema']['mutationType']['fields'],
- key=lambda k: k['name']
- )
- assert result.data == expected
diff --git a/graphql_relay/node/__init__.py b/graphql_relay/node/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/graphql_relay/node/node.py b/graphql_relay/node/node.py
deleted file mode 100644
index 410ad8a..0000000
--- a/graphql_relay/node/node.py
+++ /dev/null
@@ -1,85 +0,0 @@
-from collections import OrderedDict
-
-from six import text_type
-
-from graphql.type import (
- GraphQLArgument,
- GraphQLNonNull,
- GraphQLID,
- GraphQLField,
- GraphQLInterfaceType,
-)
-
-from ..utils import base64, unbase64
-
-
-def node_definitions(id_fetcher, type_resolver=None, id_resolver=None):
- """
- Given a function to map from an ID to an underlying object, and a function
- to map from an underlying object to the concrete GraphQLObjectType it
- corresponds to, constructs a `Node` interface that objects can implement,
- and a field config for a `node` root field.
-
- If the type_resolver is omitted, object resolution on the interface will be
- handled with the `isTypeOf` method on object types, as with any GraphQL
- interface without a provided `resolveType` method.
- """
- node_interface = GraphQLInterfaceType(
- 'Node',
- description='An object with an ID',
- fields=lambda: OrderedDict((
- ('id', GraphQLField(
- GraphQLNonNull(GraphQLID),
- description='The id of the object.',
- resolver=id_resolver,
- )),
- )),
- resolve_type=type_resolver
- )
- node_field = GraphQLField(
- node_interface,
- description='Fetches an object given its ID',
- args=OrderedDict((
- ('id', GraphQLArgument(
- GraphQLNonNull(GraphQLID),
- description='The ID of an object'
- )),
- )),
- resolver=lambda _obj, info, id: id_fetcher(id, info)
- )
- return node_interface, node_field
-
-
-def to_global_id(type, id):
- """
- Takes a type name and an ID specific to that type name, and returns a
- "global ID" that is unique among all types.
- """
- return base64(':'.join([type, text_type(id)]))
-
-
-def from_global_id(global_id):
- """
- Takes the "global ID" created by toGlobalID, and returns the type name and ID
- used to create it.
- """
- unbased_global_id = unbase64(global_id)
- _type, _id = unbased_global_id.split(':', 1)
- return _type, _id
-
-
-def global_id_field(type_name, id_fetcher=None):
- """
- Creates the configuration for an id field on a node, using `to_global_id` to
- construct the ID from the provided typename. The type-specific ID is fetcher
- by calling id_fetcher on the object, or if not provided, by accessing the `id`
- property on the object.
- """
- return GraphQLField(
- GraphQLNonNull(GraphQLID),
- description='The ID of an object',
- resolver=lambda obj, info, **args: to_global_id(
- type_name or info.parent_type.name,
- id_fetcher(obj, info) if id_fetcher else obj.id
- )
- )
diff --git a/graphql_relay/node/plural.py b/graphql_relay/node/plural.py
deleted file mode 100644
index 716a1eb..0000000
--- a/graphql_relay/node/plural.py
+++ /dev/null
@@ -1,35 +0,0 @@
-from collections import OrderedDict
-
-from promise import Promise
-
-from graphql.type import (
- GraphQLArgument,
- GraphQLList,
- GraphQLNonNull,
- GraphQLField
-)
-
-
-def plural_identifying_root_field(
- arg_name, input_type, output_type, resolve_single_input, description=None):
- input_args = OrderedDict()
- input_args[arg_name] = GraphQLArgument(
- GraphQLNonNull(
- GraphQLList(
- GraphQLNonNull(input_type)
- )
- )
- )
-
- def resolver(_obj, info, **args):
- inputs = args[arg_name]
- return Promise.all([
- resolve_single_input(info, input_) for input_ in inputs
- ])
-
- return GraphQLField(
- GraphQLList(output_type),
- description=description,
- args=input_args,
- resolver=resolver
- )
diff --git a/graphql_relay/node/tests/__init__.py b/graphql_relay/node/tests/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/graphql_relay/node/tests/test_global.py b/graphql_relay/node/tests/test_global.py
deleted file mode 100644
index 0fbc658..0000000
--- a/graphql_relay/node/tests/test_global.py
+++ /dev/null
@@ -1,143 +0,0 @@
-from collections import namedtuple
-from graphql import graphql
-from graphql.type import (
- GraphQLSchema,
- GraphQLObjectType,
- GraphQLField,
- GraphQLList,
- GraphQLInt,
- GraphQLString,
-)
-
-from graphql_relay.node.node import (
- from_global_id,
- global_id_field,
- node_definitions,
-)
-
-User = namedtuple('User', ['id', 'name'])
-Photo = namedtuple('Photo', ['photoId', 'width'])
-
-userData = {
- '1': User(id=1, name='John Doe'),
- '2': User(id=2, name='Jane Smith'),
-}
-
-photoData = {
- '1': Photo(photoId=1, width=300),
- '2': Photo(photoId=2, width=400),
-}
-
-
-def get_node(global_id, _info):
- _type, _id = from_global_id(global_id)
- if _type == 'User':
- return userData[_id]
- else:
- return photoData[_id]
-
-
-def get_node_type(obj, _info):
- if isinstance(obj, User):
- return userType
- else:
- return photoType
-
-
-node_interface, node_field = node_definitions(get_node, get_node_type)
-
-userType = GraphQLObjectType(
- 'User',
- fields=lambda: {
- 'id': global_id_field('User'),
- 'name': GraphQLField(GraphQLString),
- },
- interfaces=[node_interface]
-)
-
-photoType = GraphQLObjectType(
- 'Photo',
- fields=lambda: {
- 'id': global_id_field('Photo', lambda obj, *_: obj.photoId),
- 'width': GraphQLField(GraphQLInt),
- },
- interfaces=[node_interface]
-)
-
-queryType = GraphQLObjectType(
- 'Query',
- fields=lambda: {
- 'node': node_field,
- 'allObjects': GraphQLField(
- GraphQLList(node_interface),
- resolver=lambda _root, _info:
- [userData['1'], userData['2'], photoData['1'], photoData['2']]
- )
- }
-)
-
-schema = GraphQLSchema(
- query=queryType,
- types=[userType, photoType]
-)
-
-
-def test_gives_different_ids():
- query = '''
- {
- allObjects {
- id
- }
- }
- '''
- expected = {
- 'allObjects': [
- {
- 'id': 'VXNlcjox'
- },
- {
- 'id': 'VXNlcjoy'
- },
- {
- 'id': 'UGhvdG86MQ=='
- },
- {
- 'id': 'UGhvdG86Mg=='
- },
- ]
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_refetches_the_ids():
- query = '''
- {
- user: node(id: "VXNlcjox") {
- id
- ... on User {
- name
- }
- },
- photo: node(id: "UGhvdG86MQ==") {
- id
- ... on Photo {
- width
- }
- }
- }
- '''
- expected = {
- 'user': {
- 'id': 'VXNlcjox',
- 'name': 'John Doe'
- },
- 'photo': {
- 'id': 'UGhvdG86MQ==',
- 'width': 300
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
diff --git a/graphql_relay/node/tests/test_node.py b/graphql_relay/node/tests/test_node.py
deleted file mode 100644
index 3f47eca..0000000
--- a/graphql_relay/node/tests/test_node.py
+++ /dev/null
@@ -1,353 +0,0 @@
-from collections import namedtuple
-from graphql import graphql
-from graphql.type import (
- GraphQLSchema,
- GraphQLObjectType,
- GraphQLField,
- GraphQLNonNull,
- GraphQLInt,
- GraphQLString,
- GraphQLID,
-)
-
-from ..node import node_definitions, to_global_id, from_global_id
-
-User = namedtuple('User', ['id', 'name'])
-Photo = namedtuple('Photo', ['id', 'width'])
-
-userData = {
- '1': User(id='1', name='John Doe'),
- '2': User(id='2', name='Jane Smith'),
-}
-
-photoData = {
- '3': Photo(id='3', width=300),
- '4': Photo(id='4', width=400),
-}
-
-
-def get_node(id, info):
- assert info.schema == schema
- if id in userData:
- return userData.get(id)
- else:
- return photoData.get(id)
-
-
-def get_node_type(obj, _info):
- if obj.id in userData:
- return userType
- else:
- return photoType
-
-
-node_interface, node_field = node_definitions(get_node, get_node_type)
-
-userType = GraphQLObjectType(
- 'User',
- fields=lambda: {
- 'id': GraphQLField(GraphQLNonNull(GraphQLID)),
- 'name': GraphQLField(GraphQLString),
- },
- interfaces=[node_interface]
-)
-
-photoType = GraphQLObjectType(
- 'Photo',
- fields=lambda: {
- 'id': GraphQLField(GraphQLNonNull(GraphQLID)),
- 'width': GraphQLField(GraphQLInt),
- },
- interfaces=[node_interface]
-)
-
-queryType = GraphQLObjectType(
- 'Query',
- fields=lambda: {
- 'node': node_field,
- }
-)
-
-schema = GraphQLSchema(
- query=queryType,
- types=[userType, photoType]
-)
-
-
-def test_gets_the_correct_id_for_users():
- query = '''
- {
- node(id: "1") {
- id
- }
- }
- '''
- expected = {
- 'node': {
- 'id': '1',
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_gets_the_correct_id_for_photos():
- query = '''
- {
- node(id: "4") {
- id
- }
- }
- '''
- expected = {
- 'node': {
- 'id': '4',
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_gets_the_correct_name_for_users():
- query = '''
- {
- node(id: "1") {
- id
- ... on User {
- name
- }
- }
- }
- '''
- expected = {
- 'node': {
- 'id': '1',
- 'name': 'John Doe'
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_gets_the_correct_width_for_photos():
- query = '''
- {
- node(id: "4") {
- id
- ... on Photo {
- width
- }
- }
- }
- '''
- expected = {
- 'node': {
- 'id': '4',
- 'width': 400
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_gets_the_correct_typename_for_users():
- query = '''
- {
- node(id: "1") {
- id
- __typename
- }
- }
- '''
- expected = {
- 'node': {
- 'id': '1',
- '__typename': 'User'
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_gets_the_correct_typename_for_photos():
- query = '''
- {
- node(id: "4") {
- id
- __typename
- }
- }
- '''
- expected = {
- 'node': {
- 'id': '4',
- '__typename': 'Photo'
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_ignores_photo_fragments_on_user():
- query = '''
- {
- node(id: "1") {
- id
- ... on Photo {
- width
- }
- }
- }
- '''
- expected = {
- 'node': {
- 'id': '1',
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_returns_null_for_bad_ids():
- query = '''
- {
- node(id: "5") {
- id
- }
- }
- '''
- expected = {
- 'node': None
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_have_correct_node_interface():
- query = '''
- {
- __type(name: "Node") {
- name
- kind
- fields {
- name
- type {
- kind
- ofType {
- name
- kind
- }
- }
- }
- }
- }
- '''
- expected = {
- '__type': {
- 'name': 'Node',
- 'kind': 'INTERFACE',
- 'fields': [
- {
- 'name': 'id',
- 'type': {
- 'kind': 'NON_NULL',
- 'ofType': {
- 'name': 'ID',
- 'kind': 'SCALAR'
- }
- }
- }
- ]
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_has_correct_node_root_field():
- query = '''
- {
- __schema {
- queryType {
- fields {
- name
- type {
- name
- kind
- }
- args {
- name
- type {
- kind
- ofType {
- name
- kind
- }
- }
- }
- }
- }
- }
- }
- '''
- expected = {
- '__schema': {
- 'queryType': {
- 'fields': [
- {
- 'name': 'node',
- 'type': {
- 'name': 'Node',
- 'kind': 'INTERFACE'
- },
- 'args': [
- {
- 'name': 'id',
- 'type': {
- 'kind': 'NON_NULL',
- 'ofType': {
- 'name': 'ID',
- 'kind': 'SCALAR'
- }
- }
- }
- ]
- }
- ]
- }
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_to_global_id_converts_unicode_strings_correctly():
- my_unicode_id = u'\xfb\xf1\xf6'
- g_id = to_global_id('MyType', my_unicode_id)
- assert g_id == 'TXlUeXBlOsO7w7HDtg=='
-
- my_unicode_id = u'\u06ED'
- g_id = to_global_id('MyType', my_unicode_id)
- assert g_id == 'TXlUeXBlOtut'
-
-
-def test_from_global_id_converts_unicode_strings_correctly():
- my_unicode_id = u'\xfb\xf1\xf6'
- my_type, my_id = from_global_id('TXlUeXBlOsO7w7HDtg==')
- assert my_type == 'MyType'
- assert my_id == my_unicode_id
-
- my_unicode_id = u'\u06ED'
- my_type, my_id = from_global_id('TXlUeXBlOtut')
- assert my_type == 'MyType'
- assert my_id == my_unicode_id
diff --git a/graphql_relay/node/tests/test_plural.py b/graphql_relay/node/tests/test_plural.py
deleted file mode 100644
index a26d1b6..0000000
--- a/graphql_relay/node/tests/test_plural.py
+++ /dev/null
@@ -1,150 +0,0 @@
-from collections import namedtuple
-
-from graphql import graphql
-from graphql.type import (
- GraphQLSchema,
- GraphQLObjectType,
- GraphQLField,
- GraphQLString,
-)
-
-from graphql_relay.node.plural import plural_identifying_root_field
-
-userType = GraphQLObjectType(
- 'User',
- fields=lambda: {
- 'username': GraphQLField(GraphQLString),
- 'url': GraphQLField(GraphQLString),
- }
-)
-User = namedtuple('User', ['username', 'url'])
-
-
-queryType = GraphQLObjectType(
- 'Query',
- fields=lambda: {
- 'usernames': plural_identifying_root_field(
- 'usernames',
- description='Map from a username to the user',
- input_type=GraphQLString,
- output_type=userType,
- resolve_single_input=lambda info, username: User(
- username=username,
- url='www.facebook.com/' + username + '?lang=' + info.root_value.lang
- )
- )
- }
-)
-
-
-class RootValue:
- lang = 'en'
-
-
-schema = GraphQLSchema(query=queryType)
-
-
-def test_allows_fetching():
- query = '''
- {
- usernames(usernames:["dschafer", "leebyron", "schrockn"]) {
- username
- url
- }
- }
- '''
- expected = {
- 'usernames': [
- {
- 'username': 'dschafer',
- 'url': 'www.facebook.com/dschafer?lang=en'
- },
- {
- 'username': 'leebyron',
- 'url': 'www.facebook.com/leebyron?lang=en'
- },
- {
- 'username': 'schrockn',
- 'url': 'www.facebook.com/schrockn?lang=en'
- },
- ]
- }
- result = graphql(schema, query, root=RootValue())
- assert not result.errors
- assert result.data == expected
-
-
-def test_correctly_introspects():
- query = '''
- {
- __schema {
- queryType {
- fields {
- name
- args {
- name
- type {
- kind
- ofType {
- kind
- ofType {
- kind
- ofType {
- name
- kind
- }
- }
- }
- }
- }
- type {
- kind
- ofType {
- name
- kind
- }
- }
- }
- }
- }
- }
- '''
- expected = {
- '__schema': {
- 'queryType': {
- 'fields': [
- {
- 'name': 'usernames',
- 'args': [
- {
- 'name': 'usernames',
- 'type': {
- 'kind': 'NON_NULL',
- 'ofType': {
- 'kind': 'LIST',
- 'ofType': {
- 'kind': 'NON_NULL',
- 'ofType': {
- 'name': 'String',
- 'kind': 'SCALAR',
- }
- }
- }
- }
- }
- ],
- 'type': {
- 'kind': 'LIST',
- 'ofType': {
- 'name': 'User',
- 'kind': 'OBJECT',
- }
- }
- }
- ]
- }
- }
- }
- result = graphql(schema, query)
- assert not result.errors
- assert result.data == expected
diff --git a/graphql_relay/tests/__init__.py b/graphql_relay/tests/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/graphql_relay/tests/test_utils.py b/graphql_relay/tests/test_utils.py
deleted file mode 100644
index 8897239..0000000
--- a/graphql_relay/tests/test_utils.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import base64
-
-from .. import utils
-
-
-def test_base64_encode_unicode_strings_correctly():
- my_unicode = u'\xfb\xf1\xf6'
- my_base64 = utils.base64(my_unicode)
- assert my_base64 == base64.b64encode(my_unicode.encode('utf-8')).decode('utf-8')
-
- my_unicode = u'\u06ED'
- my_base64 = utils.base64(my_unicode)
- assert my_base64 == base64.b64encode(my_unicode.encode('utf-8')).decode('utf-8')
-
-
-def test_base64_encode_strings_correctly():
- my_string = 'abc'
- my_base64 = utils.base64(my_string)
- assert my_base64 == base64.b64encode(my_string.encode('utf-8')).decode('utf-8')
-
-
-def test_unbase64_decodes_unicode_strings_correctly():
- my_unicode = u'\xfb\xf1\xf6'
- my_converted_unicode = utils.unbase64(utils.base64(my_unicode))
- assert my_unicode == my_converted_unicode
-
-
-def test_unbase64_decodes_strings_correctly():
- my_string = 'abc'
- my_converted_string = utils.unbase64(utils.base64(my_string))
- assert my_string == my_converted_string
diff --git a/graphql_relay/utils.py b/graphql_relay/utils.py
deleted file mode 100644
index 1cba2c9..0000000
--- a/graphql_relay/utils.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from base64 import b64encode as _base64, b64decode as _unbase64
-
-from six import string_types
-
-
-def base64(s):
- return _base64(s.encode('utf-8')).decode('utf-8')
-
-
-def unbase64(s):
- return _unbase64(s).decode('utf-8')
-
-
-def is_str(s):
- return isinstance(s, string_types)
-
-
-def resolve_maybe_thunk(f):
- if callable(f):
- return f()
- return f
diff --git a/poetry.lock b/poetry.lock
new file mode 100644
index 0000000..5070bcf
--- /dev/null
+++ b/poetry.lock
@@ -0,0 +1,916 @@
+[[package]]
+name = "appdirs"
+version = "1.4.4"
+description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "atomicwrites"
+version = "1.4.0"
+description = "Atomic file writes."
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
+[[package]]
+name = "attrs"
+version = "21.4.0"
+description = "Classes Without Boilerplate"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+
+[package.extras]
+dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
+docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
+tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
+tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"]
+
+[[package]]
+name = "black"
+version = "20.8b1"
+description = "The uncompromising code formatter."
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+appdirs = "*"
+click = ">=7.1.2"
+dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""}
+mypy-extensions = ">=0.4.3"
+pathspec = ">=0.6,<1"
+regex = ">=2020.1.8"
+toml = ">=0.10.1"
+typed-ast = ">=1.4.0"
+typing-extensions = ">=3.7.4"
+
+[package.extras]
+colorama = ["colorama (>=0.4.3)"]
+d = ["aiohttp (>=3.3.2)", "aiohttp-cors"]
+
+[[package]]
+name = "black"
+version = "22.3.0"
+description = "The uncompromising code formatter."
+category = "dev"
+optional = false
+python-versions = ">=3.6.2"
+
+[package.dependencies]
+click = ">=8.0.0"
+dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""}
+mypy-extensions = ">=0.4.3"
+pathspec = ">=0.9.0"
+platformdirs = ">=2"
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""}
+typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
+
+[package.extras]
+colorama = ["colorama (>=0.4.3)"]
+d = ["aiohttp (>=3.7.4)"]
+jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
+uvloop = ["uvloop (>=0.15.2)"]
+
+[[package]]
+name = "build"
+version = "0.7.0"
+description = "A simple, correct PEP517 package builder"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+colorama = {version = "*", markers = "os_name == \"nt\""}
+importlib-metadata = {version = ">=0.22", markers = "python_version < \"3.8\""}
+packaging = ">=19.0"
+pep517 = ">=0.9.1"
+tomli = ">=1.0.0"
+
+[package.extras]
+docs = ["furo (>=2020.11.19b18)", "sphinx (>=3.0,<4.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)"]
+test = ["filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "toml (>=0.10.0)", "wheel (>=0.36.0)"]
+typing = ["importlib-metadata (>=4.6.4)", "mypy (==0.910)", "typing-extensions (>=3.7.4.3)"]
+virtualenv = ["virtualenv (>=20.0.35)"]
+
+[[package]]
+name = "bump2version"
+version = "1.0.1"
+description = "Version-bump your software with a single command!"
+category = "dev"
+optional = false
+python-versions = ">=3.5"
+
+[[package]]
+name = "check-manifest"
+version = "0.48"
+description = "Check MANIFEST.in in a Python source package for completeness"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+build = ">=0.1"
+tomli = "*"
+
+[package.extras]
+test = ["mock (>=3.0.0)", "pytest"]
+
+[[package]]
+name = "click"
+version = "8.0.4"
+description = "Composable command line interface toolkit"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
+
+[[package]]
+name = "colorama"
+version = "0.4.4"
+description = "Cross-platform colored terminal text."
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+
+[[package]]
+name = "coverage"
+version = "6.2"
+description = "Code coverage measurement for Python"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+tomli = {version = "*", optional = true, markers = "extra == \"toml\""}
+
+[package.extras]
+toml = ["tomli"]
+
+[[package]]
+name = "dataclasses"
+version = "0.8"
+description = "A backport of the dataclasses module for Python 3.6"
+category = "dev"
+optional = false
+python-versions = ">=3.6, <3.7"
+
+[[package]]
+name = "distlib"
+version = "0.3.4"
+description = "Distribution utilities"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "filelock"
+version = "3.4.1"
+description = "A platform independent file lock."
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.extras]
+docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"]
+testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"]
+
+[[package]]
+name = "flake8"
+version = "4.0.1"
+description = "the modular source code checker: pep8 pyflakes and co"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+importlib-metadata = {version = "<4.3", markers = "python_version < \"3.8\""}
+mccabe = ">=0.6.0,<0.7.0"
+pycodestyle = ">=2.8.0,<2.9.0"
+pyflakes = ">=2.4.0,<2.5.0"
+
+[[package]]
+name = "graphql-core"
+version = "3.2.1"
+description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL."
+category = "main"
+optional = false
+python-versions = ">=3.6,<4"
+
+[[package]]
+name = "importlib-metadata"
+version = "4.2.0"
+description = "Read metadata from Python packages"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
+zipp = ">=0.5"
+
+[package.extras]
+docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
+testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"]
+
+[[package]]
+name = "importlib-resources"
+version = "5.4.0"
+description = "Read resources from Python packages"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
+
+[package.extras]
+docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
+testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"]
+
+[[package]]
+name = "iniconfig"
+version = "1.1.1"
+description = "iniconfig: brain-dead simple config-ini parsing"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "mccabe"
+version = "0.6.1"
+description = "McCabe checker, plugin for flake8"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "mypy"
+version = "0.942"
+description = "Optional static typing for Python"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+mypy-extensions = ">=0.4.3"
+tomli = ">=1.1.0"
+typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""}
+typing-extensions = ">=3.10"
+
+[package.extras]
+dmypy = ["psutil (>=4.0)"]
+python2 = ["typed-ast (>=1.4.0,<2)"]
+reports = ["lxml"]
+
+[[package]]
+name = "mypy-extensions"
+version = "0.4.3"
+description = "Experimental type system extensions for programs checked with the mypy typechecker."
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "packaging"
+version = "21.3"
+description = "Core utilities for Python packages"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
+
+[[package]]
+name = "pathspec"
+version = "0.9.0"
+description = "Utility library for gitignore style pattern matching of file paths."
+category = "dev"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+
+[[package]]
+name = "pep517"
+version = "0.12.0"
+description = "Wrappers to build Python packages using PEP 517 hooks"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+importlib_metadata = {version = "*", markers = "python_version < \"3.8\""}
+tomli = {version = ">=1.1.0", markers = "python_version >= \"3.6\""}
+zipp = {version = "*", markers = "python_version < \"3.8\""}
+
+[[package]]
+name = "platformdirs"
+version = "2.4.0"
+description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.extras]
+docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"]
+test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"]
+
+[[package]]
+name = "pluggy"
+version = "1.0.0"
+description = "plugin and hook calling mechanisms for python"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
+
+[package.extras]
+dev = ["pre-commit", "tox"]
+testing = ["pytest", "pytest-benchmark"]
+
+[[package]]
+name = "py"
+version = "1.11.0"
+description = "library with cross-python path, ini-parsing, io, code, log facilities"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+
+[[package]]
+name = "pycodestyle"
+version = "2.8.0"
+description = "Python style guide checker"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+
+[[package]]
+name = "pyflakes"
+version = "2.4.0"
+description = "passive checker of Python programs"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
+[[package]]
+name = "pyparsing"
+version = "3.0.7"
+description = "Python parsing module"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.extras]
+diagrams = ["jinja2", "railroad-diagrams"]
+
+[[package]]
+name = "pytest"
+version = "6.2.5"
+description = "pytest: simple powerful testing with Python"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
+attrs = ">=19.2.0"
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
+iniconfig = "*"
+packaging = "*"
+pluggy = ">=0.12,<2.0"
+py = ">=1.8.2"
+toml = "*"
+
+[package.extras]
+testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
+
+[[package]]
+name = "pytest-asyncio"
+version = "0.16.0"
+description = "Pytest support for asyncio."
+category = "dev"
+optional = false
+python-versions = ">= 3.6"
+
+[package.dependencies]
+pytest = ">=5.4.0"
+
+[package.extras]
+testing = ["coverage", "hypothesis (>=5.7.1)"]
+
+[[package]]
+name = "pytest-asyncio"
+version = "0.18.3"
+description = "Pytest support for asyncio"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+pytest = ">=6.1.0"
+typing-extensions = {version = ">=3.7.2", markers = "python_version < \"3.8\""}
+
+[package.extras]
+testing = ["coverage (==6.2)", "hypothesis (>=5.7.1)", "flaky (>=3.5.0)", "mypy (==0.931)", "pytest-trio (>=0.7.0)"]
+
+[[package]]
+name = "pytest-cov"
+version = "3.0.0"
+description = "Pytest plugin for measuring coverage."
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+coverage = {version = ">=5.2.1", extras = ["toml"]}
+pytest = ">=4.6"
+
+[package.extras]
+testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"]
+
+[[package]]
+name = "pytest-describe"
+version = "2.0.1"
+description = "Describe-style plugin for pytest"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+pytest = ">=4.0.0"
+
+[[package]]
+name = "regex"
+version = "2022.3.15"
+description = "Alternative regular expression module, to replace re."
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[[package]]
+name = "six"
+version = "1.16.0"
+description = "Python 2 and 3 compatibility utilities"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+
+[[package]]
+name = "toml"
+version = "0.10.2"
+description = "Python Library for Tom's Obvious, Minimal Language"
+category = "dev"
+optional = false
+python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+
+[[package]]
+name = "tomli"
+version = "1.2.3"
+description = "A lil' TOML parser"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[[package]]
+name = "tox"
+version = "3.25.0"
+description = "tox is a generic virtualenv management and test command line tool"
+category = "dev"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+
+[package.dependencies]
+colorama = {version = ">=0.4.1", markers = "platform_system == \"Windows\""}
+filelock = ">=3.0.0"
+importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
+packaging = ">=14"
+pluggy = ">=0.12.0"
+py = ">=1.4.17"
+six = ">=1.14.0"
+toml = ">=0.9.4"
+virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,<20.0.3 || >20.0.3,<20.0.4 || >20.0.4,<20.0.5 || >20.0.5,<20.0.6 || >20.0.6,<20.0.7 || >20.0.7"
+
+[package.extras]
+docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"]
+testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "psutil (>=5.6.1)", "pathlib2 (>=2.3.3)"]
+
+[[package]]
+name = "typed-ast"
+version = "1.5.2"
+description = "a fork of Python 2 and 3 ast modules with type comment support"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[[package]]
+name = "typing-extensions"
+version = "4.1.1"
+description = "Backported and Experimental Type Hints for Python 3.6+"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[[package]]
+name = "virtualenv"
+version = "20.14.1"
+description = "Virtual Python Environment builder"
+category = "dev"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+
+[package.dependencies]
+distlib = ">=0.3.1,<1"
+filelock = ">=3.2,<4"
+importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
+importlib-resources = {version = ">=1.0", markers = "python_version < \"3.7\""}
+platformdirs = ">=2,<3"
+six = ">=1.9.0,<2"
+
+[package.extras]
+docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"]
+testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"]
+
+[[package]]
+name = "zipp"
+version = "3.6.0"
+description = "Backport of pathlib-compatible object wrapper for zip files"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.extras]
+docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
+testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"]
+
+[metadata]
+lock-version = "1.1"
+python-versions = "^3.6"
+content-hash = "a697c403fc34533863e70972dfabd69751fc6a8acece291d4b40d9d631312cc0"
+
+[metadata.files]
+appdirs = [
+ {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
+ {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
+]
+atomicwrites = [
+ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
+ {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
+]
+attrs = [
+ {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
+ {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
+]
+black = [
+ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"},
+ {file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"},
+ {file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"},
+ {file = "black-22.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a"},
+ {file = "black-22.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968"},
+ {file = "black-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d"},
+ {file = "black-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce"},
+ {file = "black-22.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82"},
+ {file = "black-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b"},
+ {file = "black-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015"},
+ {file = "black-22.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b"},
+ {file = "black-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a"},
+ {file = "black-22.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163"},
+ {file = "black-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464"},
+ {file = "black-22.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0"},
+ {file = "black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176"},
+ {file = "black-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0"},
+ {file = "black-22.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20"},
+ {file = "black-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a"},
+ {file = "black-22.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad"},
+ {file = "black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21"},
+ {file = "black-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265"},
+ {file = "black-22.3.0-py3-none-any.whl", hash = "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"},
+ {file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"},
+]
+build = [
+ {file = "build-0.7.0-py3-none-any.whl", hash = "sha256:21b7ebbd1b22499c4dac536abc7606696ea4d909fd755e00f09f3c0f2c05e3c8"},
+ {file = "build-0.7.0.tar.gz", hash = "sha256:1aaadcd69338252ade4f7ec1265e1a19184bf916d84c9b7df095f423948cb89f"},
+]
+bump2version = [
+ {file = "bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410"},
+ {file = "bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"},
+]
+check-manifest = [
+ {file = "check-manifest-0.48.tar.gz", hash = "sha256:3b575f1dade7beb3078ef4bf33a94519834457c7281dbc726b15c5466b55c657"},
+ {file = "check_manifest-0.48-py3-none-any.whl", hash = "sha256:b1923685f98c1c2468601a1a7bed655db549a25d43c583caded3860ad8308f8c"},
+]
+click = [
+ {file = "click-8.0.4-py3-none-any.whl", hash = "sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1"},
+ {file = "click-8.0.4.tar.gz", hash = "sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb"},
+]
+colorama = [
+ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
+ {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
+]
+coverage = [
+ {file = "coverage-6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b"},
+ {file = "coverage-6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0"},
+ {file = "coverage-6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:92b8c845527eae547a2a6617d336adc56394050c3ed8a6918683646328fbb6da"},
+ {file = "coverage-6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c7912d1526299cb04c88288e148c6c87c0df600eca76efd99d84396cfe00ef1d"},
+ {file = "coverage-6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5d2033d5db1d58ae2d62f095e1aefb6988af65b4b12cb8987af409587cc0739"},
+ {file = "coverage-6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3feac4084291642165c3a0d9eaebedf19ffa505016c4d3db15bfe235718d4971"},
+ {file = "coverage-6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:276651978c94a8c5672ea60a2656e95a3cce2a3f31e9fb2d5ebd4c215d095840"},
+ {file = "coverage-6.2-cp310-cp310-win32.whl", hash = "sha256:f506af4f27def639ba45789fa6fde45f9a217da0be05f8910458e4557eed020c"},
+ {file = "coverage-6.2-cp310-cp310-win_amd64.whl", hash = "sha256:3f7c17209eef285c86f819ff04a6d4cbee9b33ef05cbcaae4c0b4e8e06b3ec8f"},
+ {file = "coverage-6.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:13362889b2d46e8d9f97c421539c97c963e34031ab0cb89e8ca83a10cc71ac76"},
+ {file = "coverage-6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22e60a3ca5acba37d1d4a2ee66e051f5b0e1b9ac950b5b0cf4aa5366eda41d47"},
+ {file = "coverage-6.2-cp311-cp311-win_amd64.whl", hash = "sha256:b637c57fdb8be84e91fac60d9325a66a5981f8086c954ea2772efe28425eaf64"},
+ {file = "coverage-6.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f467bbb837691ab5a8ca359199d3429a11a01e6dfb3d9dcc676dc035ca93c0a9"},
+ {file = "coverage-6.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2641f803ee9f95b1f387f3e8f3bf28d83d9b69a39e9911e5bfee832bea75240d"},
+ {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1219d760ccfafc03c0822ae2e06e3b1248a8e6d1a70928966bafc6838d3c9e48"},
+ {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9a2b5b52be0a8626fcbffd7e689781bf8c2ac01613e77feda93d96184949a98e"},
+ {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8e2c35a4c1f269704e90888e56f794e2d9c0262fb0c1b1c8c4ee44d9b9e77b5d"},
+ {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b09c972ce9200264c35a1d53d43ca55ef61836d9ec60f0d44273a31aa9f17"},
+ {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e3db840a4dee542e37e09f30859f1612da90e1c5239a6a2498c473183a50e781"},
+ {file = "coverage-6.2-cp36-cp36m-win32.whl", hash = "sha256:4e547122ca2d244f7c090fe3f4b5a5861255ff66b7ab6d98f44a0222aaf8671a"},
+ {file = "coverage-6.2-cp36-cp36m-win_amd64.whl", hash = "sha256:01774a2c2c729619760320270e42cd9e797427ecfddd32c2a7b639cdc481f3c0"},
+ {file = "coverage-6.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb8b8ee99b3fffe4fd86f4c81b35a6bf7e4462cba019997af2fe679365db0c49"},
+ {file = "coverage-6.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:619346d57c7126ae49ac95b11b0dc8e36c1dd49d148477461bb66c8cf13bb521"},
+ {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a7726f74ff63f41e95ed3a89fef002916c828bb5fcae83b505b49d81a066884"},
+ {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cfd9386c1d6f13b37e05a91a8583e802f8059bebfccde61a418c5808dea6bbfa"},
+ {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:17e6c11038d4ed6e8af1407d9e89a2904d573be29d51515f14262d7f10ef0a64"},
+ {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c254b03032d5a06de049ce8bca8338a5185f07fb76600afff3c161e053d88617"},
+ {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dca38a21e4423f3edb821292e97cec7ad38086f84313462098568baedf4331f8"},
+ {file = "coverage-6.2-cp37-cp37m-win32.whl", hash = "sha256:600617008aa82032ddeace2535626d1bc212dfff32b43989539deda63b3f36e4"},
+ {file = "coverage-6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:bf154ba7ee2fd613eb541c2bc03d3d9ac667080a737449d1a3fb342740eb1a74"},
+ {file = "coverage-6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f9afb5b746781fc2abce26193d1c817b7eb0e11459510fba65d2bd77fe161d9e"},
+ {file = "coverage-6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edcada2e24ed68f019175c2b2af2a8b481d3d084798b8c20d15d34f5c733fa58"},
+ {file = "coverage-6.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9c8c4283e17690ff1a7427123ffb428ad6a52ed720d550e299e8291e33184dc"},
+ {file = "coverage-6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f614fc9956d76d8a88a88bb41ddc12709caa755666f580af3a688899721efecd"},
+ {file = "coverage-6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9365ed5cce5d0cf2c10afc6add145c5037d3148585b8ae0e77cc1efdd6aa2953"},
+ {file = "coverage-6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8bdfe9ff3a4ea37d17f172ac0dff1e1c383aec17a636b9b35906babc9f0f5475"},
+ {file = "coverage-6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:63c424e6f5b4ab1cf1e23a43b12f542b0ec2e54f99ec9f11b75382152981df57"},
+ {file = "coverage-6.2-cp38-cp38-win32.whl", hash = "sha256:49dbff64961bc9bdd2289a2bda6a3a5a331964ba5497f694e2cbd540d656dc1c"},
+ {file = "coverage-6.2-cp38-cp38-win_amd64.whl", hash = "sha256:9a29311bd6429be317c1f3fe4bc06c4c5ee45e2fa61b2a19d4d1d6111cb94af2"},
+ {file = "coverage-6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03b20e52b7d31be571c9c06b74746746d4eb82fc260e594dc662ed48145e9efd"},
+ {file = "coverage-6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:215f8afcc02a24c2d9a10d3790b21054b58d71f4b3c6f055d4bb1b15cecce685"},
+ {file = "coverage-6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a4bdeb0a52d1d04123b41d90a4390b096f3ef38eee35e11f0b22c2d031222c6c"},
+ {file = "coverage-6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c332d8f8d448ded473b97fefe4a0983265af21917d8b0cdcb8bb06b2afe632c3"},
+ {file = "coverage-6.2-cp39-cp39-win32.whl", hash = "sha256:6e1394d24d5938e561fbeaa0cd3d356207579c28bd1792f25a068743f2d5b282"},
+ {file = "coverage-6.2-cp39-cp39-win_amd64.whl", hash = "sha256:86f2e78b1eff847609b1ca8050c9e1fa3bd44ce755b2ec30e70f2d3ba3844644"},
+ {file = "coverage-6.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de"},
+ {file = "coverage-6.2.tar.gz", hash = "sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8"},
+]
+dataclasses = [
+ {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"},
+ {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"},
+]
+distlib = [
+ {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"},
+ {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"},
+]
+filelock = [
+ {file = "filelock-3.4.1-py3-none-any.whl", hash = "sha256:a4bc51381e01502a30e9f06dd4fa19a1712eab852b6fb0f84fd7cce0793d8ca3"},
+ {file = "filelock-3.4.1.tar.gz", hash = "sha256:0f12f552b42b5bf60dba233710bf71337d35494fc8bdd4fd6d9f6d082ad45e06"},
+]
+flake8 = [
+ {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"},
+ {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"},
+]
+graphql-core = [
+ {file = "graphql-core-3.2.1.tar.gz", hash = "sha256:9d1bf141427b7d54be944587c8349df791ce60ade2e3cccaf9c56368c133c201"},
+ {file = "graphql_core-3.2.1-py3-none-any.whl", hash = "sha256:f83c658e4968998eed1923a2e3e3eddd347e005ac0315fbb7ca4d70ea9156323"},
+]
+importlib-metadata = [
+ {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"},
+ {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"},
+]
+importlib-resources = [
+ {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"},
+ {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"},
+]
+iniconfig = [
+ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
+ {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
+]
+mccabe = [
+ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
+ {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
+]
+mypy = [
+ {file = "mypy-0.942-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5bf44840fb43ac4074636fd47ee476d73f0039f4f54e86d7265077dc199be24d"},
+ {file = "mypy-0.942-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dcd955f36e0180258a96f880348fbca54ce092b40fbb4b37372ae3b25a0b0a46"},
+ {file = "mypy-0.942-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6776e5fa22381cc761df53e7496a805801c1a751b27b99a9ff2f0ca848c7eca0"},
+ {file = "mypy-0.942-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:edf7237137a1a9330046dbb14796963d734dd740a98d5e144a3eb1d267f5f9ee"},
+ {file = "mypy-0.942-cp310-cp310-win_amd64.whl", hash = "sha256:64235137edc16bee6f095aba73be5334677d6f6bdb7fa03cfab90164fa294a17"},
+ {file = "mypy-0.942-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b840cfe89c4ab6386c40300689cd8645fc8d2d5f20101c7f8bd23d15fca14904"},
+ {file = "mypy-0.942-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2b184db8c618c43c3a31b32ff00cd28195d39e9c24e7c3b401f3db7f6e5767f5"},
+ {file = "mypy-0.942-cp36-cp36m-win_amd64.whl", hash = "sha256:1a0459c333f00e6a11cbf6b468b870c2b99a906cb72d6eadf3d1d95d38c9352c"},
+ {file = "mypy-0.942-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4c3e497588afccfa4334a9986b56f703e75793133c4be3a02d06a3df16b67a58"},
+ {file = "mypy-0.942-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6f6ad963172152e112b87cc7ec103ba0f2db2f1cd8997237827c052a3903eaa6"},
+ {file = "mypy-0.942-cp37-cp37m-win_amd64.whl", hash = "sha256:0e2dd88410937423fba18e57147dd07cd8381291b93d5b1984626f173a26543e"},
+ {file = "mypy-0.942-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:246e1aa127d5b78488a4a0594bd95f6d6fb9d63cf08a66dafbff8595d8891f67"},
+ {file = "mypy-0.942-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d8d3ba77e56b84cd47a8ee45b62c84b6d80d32383928fe2548c9a124ea0a725c"},
+ {file = "mypy-0.942-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2bc249409a7168d37c658e062e1ab5173300984a2dada2589638568ddc1db02b"},
+ {file = "mypy-0.942-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9521c1265ccaaa1791d2c13582f06facf815f426cd8b07c3a485f486a8ffc1f3"},
+ {file = "mypy-0.942-cp38-cp38-win_amd64.whl", hash = "sha256:e865fec858d75b78b4d63266c9aff770ecb6a39dfb6d6b56c47f7f8aba6baba8"},
+ {file = "mypy-0.942-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6ce34a118d1a898f47def970a2042b8af6bdcc01546454726c7dd2171aa6dfca"},
+ {file = "mypy-0.942-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:10daab80bc40f84e3f087d896cdb53dc811a9f04eae4b3f95779c26edee89d16"},
+ {file = "mypy-0.942-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3841b5433ff936bff2f4dc8d54cf2cdbfea5d8e88cedfac45c161368e5770ba6"},
+ {file = "mypy-0.942-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6f7106cbf9cc2f403693bf50ed7c9fa5bb3dfa9007b240db3c910929abe2a322"},
+ {file = "mypy-0.942-cp39-cp39-win_amd64.whl", hash = "sha256:7742d2c4e46bb5017b51c810283a6a389296cda03df805a4f7869a6f41246534"},
+ {file = "mypy-0.942-py3-none-any.whl", hash = "sha256:a1b383fe99678d7402754fe90448d4037f9512ce70c21f8aee3b8bf48ffc51db"},
+ {file = "mypy-0.942.tar.gz", hash = "sha256:17e44649fec92e9f82102b48a3bf7b4a5510ad0cd22fa21a104826b5db4903e2"},
+]
+mypy-extensions = [
+ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
+ {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
+]
+packaging = [
+ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
+ {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
+]
+pathspec = [
+ {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"},
+ {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
+]
+pep517 = [
+ {file = "pep517-0.12.0-py2.py3-none-any.whl", hash = "sha256:dd884c326898e2c6e11f9e0b64940606a93eb10ea022a2e067959f3a110cf161"},
+ {file = "pep517-0.12.0.tar.gz", hash = "sha256:931378d93d11b298cf511dd634cf5ea4cb249a28ef84160b3247ee9afb4e8ab0"},
+]
+platformdirs = [
+ {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"},
+ {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"},
+]
+pluggy = [
+ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
+ {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
+]
+py = [
+ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
+ {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
+]
+pycodestyle = [
+ {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"},
+ {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"},
+]
+pyflakes = [
+ {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"},
+ {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"},
+]
+pyparsing = [
+ {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"},
+ {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"},
+]
+pytest = [
+ {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"},
+ {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"},
+]
+pytest-asyncio = [
+ {file = "pytest-asyncio-0.16.0.tar.gz", hash = "sha256:7496c5977ce88c34379df64a66459fe395cd05543f0a2f837016e7144391fcfb"},
+ {file = "pytest_asyncio-0.16.0-py3-none-any.whl", hash = "sha256:5f2a21273c47b331ae6aa5b36087047b4899e40f03f18397c0e65fa5cca54e9b"},
+ {file = "pytest-asyncio-0.18.3.tar.gz", hash = "sha256:7659bdb0a9eb9c6e3ef992eef11a2b3e69697800ad02fb06374a210d85b29f91"},
+ {file = "pytest_asyncio-0.18.3-1-py3-none-any.whl", hash = "sha256:16cf40bdf2b4fb7fc8e4b82bd05ce3fbcd454cbf7b92afc445fe299dabb88213"},
+ {file = "pytest_asyncio-0.18.3-py3-none-any.whl", hash = "sha256:8fafa6c52161addfd41ee7ab35f11836c5a16ec208f93ee388f752bea3493a84"},
+]
+pytest-cov = [
+ {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"},
+ {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"},
+]
+pytest-describe = [
+ {file = "pytest-describe-2.0.1.tar.gz", hash = "sha256:e5cbaa31169f0060348ad5ca0191027e5f1f41f3f27fdeef208365e09c55eb9a"},
+ {file = "pytest_describe-2.0.1-py3-none-any.whl", hash = "sha256:ea347838bdf774b498ee7cb4a0b802a40be89e667a399fb63d860e3223bf4183"},
+]
+regex = [
+ {file = "regex-2022.3.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:42eb13b93765c6698a5ab3bcd318d8c39bb42e5fa8a7fcf7d8d98923f3babdb1"},
+ {file = "regex-2022.3.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9beb03ff6fe509d6455971c2489dceb31687b38781206bcec8e68bdfcf5f1db2"},
+ {file = "regex-2022.3.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0a5a1fdc9f148a8827d55b05425801acebeeefc9e86065c7ac8b8cc740a91ff"},
+ {file = "regex-2022.3.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cb374a2a4dba7c4be0b19dc7b1adc50e6c2c26c3369ac629f50f3c198f3743a4"},
+ {file = "regex-2022.3.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c33ce0c665dd325200209340a88438ba7a470bd5f09f7424e520e1a3ff835b52"},
+ {file = "regex-2022.3.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04c09b9651fa814eeeb38e029dc1ae83149203e4eeb94e52bb868fadf64852bc"},
+ {file = "regex-2022.3.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab5d89cfaf71807da93c131bb7a19c3e19eaefd613d14f3bce4e97de830b15df"},
+ {file = "regex-2022.3.15-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e2630ae470d6a9f8e4967388c1eda4762706f5750ecf387785e0df63a4cc5af"},
+ {file = "regex-2022.3.15-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:df037c01d68d1958dad3463e2881d3638a0d6693483f58ad41001aa53a83fcea"},
+ {file = "regex-2022.3.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:940570c1a305bac10e8b2bc934b85a7709c649317dd16520471e85660275083a"},
+ {file = "regex-2022.3.15-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7f63877c87552992894ea1444378b9c3a1d80819880ae226bb30b04789c0828c"},
+ {file = "regex-2022.3.15-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3e265b388cc80c7c9c01bb4f26c9e536c40b2c05b7231fbb347381a2e1c8bf43"},
+ {file = "regex-2022.3.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:058054c7a54428d5c3e3739ac1e363dc9347d15e64833817797dc4f01fb94bb8"},
+ {file = "regex-2022.3.15-cp310-cp310-win32.whl", hash = "sha256:76435a92e444e5b8f346aed76801db1c1e5176c4c7e17daba074fbb46cb8d783"},
+ {file = "regex-2022.3.15-cp310-cp310-win_amd64.whl", hash = "sha256:174d964bc683b1e8b0970e1325f75e6242786a92a22cedb2a6ec3e4ae25358bd"},
+ {file = "regex-2022.3.15-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6e1d8ed9e61f37881c8db383a124829a6e8114a69bd3377a25aecaeb9b3538f8"},
+ {file = "regex-2022.3.15-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b52771f05cff7517f7067fef19ffe545b1f05959e440d42247a17cd9bddae11b"},
+ {file = "regex-2022.3.15-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:673f5a393d603c34477dbad70db30025ccd23996a2d0916e942aac91cc42b31a"},
+ {file = "regex-2022.3.15-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8923e1c5231549fee78ff9b2914fad25f2e3517572bb34bfaa3aea682a758683"},
+ {file = "regex-2022.3.15-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:764e66a0e382829f6ad3bbce0987153080a511c19eb3d2f8ead3f766d14433ac"},
+ {file = "regex-2022.3.15-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd00859291658fe1fda48a99559fb34da891c50385b0bfb35b808f98956ef1e7"},
+ {file = "regex-2022.3.15-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:aa2ce79f3889720b46e0aaba338148a1069aea55fda2c29e0626b4db20d9fcb7"},
+ {file = "regex-2022.3.15-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:34bb30c095342797608727baf5c8aa122406aa5edfa12107b8e08eb432d4c5d7"},
+ {file = "regex-2022.3.15-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:25ecb1dffc5e409ca42f01a2b2437f93024ff1612c1e7983bad9ee191a5e8828"},
+ {file = "regex-2022.3.15-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:aa5eedfc2461c16a092a2fabc5895f159915f25731740c9152a1b00f4bcf629a"},
+ {file = "regex-2022.3.15-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:7d1a6e403ac8f1d91d8f51c441c3f99367488ed822bda2b40836690d5d0059f5"},
+ {file = "regex-2022.3.15-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:3e4d710ff6539026e49f15a3797c6b1053573c2b65210373ef0eec24480b900b"},
+ {file = "regex-2022.3.15-cp36-cp36m-win32.whl", hash = "sha256:0100f0ded953b6b17f18207907159ba9be3159649ad2d9b15535a74de70359d3"},
+ {file = "regex-2022.3.15-cp36-cp36m-win_amd64.whl", hash = "sha256:f320c070dea3f20c11213e56dbbd7294c05743417cde01392148964b7bc2d31a"},
+ {file = "regex-2022.3.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fc8c7958d14e8270171b3d72792b609c057ec0fa17d507729835b5cff6b7f69a"},
+ {file = "regex-2022.3.15-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ca6dcd17f537e9f3793cdde20ac6076af51b2bd8ad5fe69fa54373b17b48d3c"},
+ {file = "regex-2022.3.15-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0214ff6dff1b5a4b4740cfe6e47f2c4c92ba2938fca7abbea1359036305c132f"},
+ {file = "regex-2022.3.15-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a98ae493e4e80b3ded6503ff087a8492db058e9c68de371ac3df78e88360b374"},
+ {file = "regex-2022.3.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b1cc70e31aacc152a12b39245974c8fccf313187eead559ee5966d50e1b5817"},
+ {file = "regex-2022.3.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4829db3737480a9d5bfb1c0320c4ee13736f555f53a056aacc874f140e98f64"},
+ {file = "regex-2022.3.15-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:303b15a3d32bf5fe5a73288c316bac5807587f193ceee4eb6d96ee38663789fa"},
+ {file = "regex-2022.3.15-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:dc7b7c16a519d924c50876fb152af661a20749dcbf653c8759e715c1a7a95b18"},
+ {file = "regex-2022.3.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ce3057777a14a9a1399b81eca6a6bfc9612047811234398b84c54aeff6d536ea"},
+ {file = "regex-2022.3.15-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:48081b6bff550fe10bcc20c01cf6c83dbca2ccf74eeacbfac240264775fd7ecf"},
+ {file = "regex-2022.3.15-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dcbb7665a9db9f8d7642171152c45da60e16c4f706191d66a1dc47ec9f820aed"},
+ {file = "regex-2022.3.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c155a1a80c5e7a8fa1d9bb1bf3c8a953532b53ab1196092749bafb9d3a7cbb60"},
+ {file = "regex-2022.3.15-cp37-cp37m-win32.whl", hash = "sha256:04b5ee2b6d29b4a99d38a6469aa1db65bb79d283186e8460542c517da195a8f6"},
+ {file = "regex-2022.3.15-cp37-cp37m-win_amd64.whl", hash = "sha256:797437e6024dc1589163675ae82f303103063a0a580c6fd8d0b9a0a6708da29e"},
+ {file = "regex-2022.3.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8afcd1c2297bc989dceaa0379ba15a6df16da69493635e53431d2d0c30356086"},
+ {file = "regex-2022.3.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0066a6631c92774391f2ea0f90268f0d82fffe39cb946f0f9c6b382a1c61a5e5"},
+ {file = "regex-2022.3.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8248f19a878c72d8c0a785a2cd45d69432e443c9f10ab924c29adda77b324ae"},
+ {file = "regex-2022.3.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8d1f3ea0d1924feb4cf6afb2699259f658a08ac6f8f3a4a806661c2dfcd66db1"},
+ {file = "regex-2022.3.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:794a6bc66c43db8ed06698fc32aaeaac5c4812d9f825e9589e56f311da7becd9"},
+ {file = "regex-2022.3.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d1445824944e642ffa54c4f512da17a953699c563a356d8b8cbdad26d3b7598"},
+ {file = "regex-2022.3.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f553a1190ae6cd26e553a79f6b6cfba7b8f304da2071052fa33469da075ea625"},
+ {file = "regex-2022.3.15-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:75a5e6ce18982f0713c4bac0704bf3f65eed9b277edd3fb9d2b0ff1815943327"},
+ {file = "regex-2022.3.15-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f16cf7e4e1bf88fecf7f41da4061f181a6170e179d956420f84e700fb8a3fd6b"},
+ {file = "regex-2022.3.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dad3991f0678facca1a0831ec1ddece2eb4d1dd0f5150acb9440f73a3b863907"},
+ {file = "regex-2022.3.15-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:491fc754428514750ab21c2d294486223ce7385446f2c2f5df87ddbed32979ae"},
+ {file = "regex-2022.3.15-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:6504c22c173bb74075d7479852356bb7ca80e28c8e548d4d630a104f231e04fb"},
+ {file = "regex-2022.3.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:01c913cf573d1da0b34c9001a94977273b5ee2fe4cb222a5d5b320f3a9d1a835"},
+ {file = "regex-2022.3.15-cp38-cp38-win32.whl", hash = "sha256:029e9e7e0d4d7c3446aa92474cbb07dafb0b2ef1d5ca8365f059998c010600e6"},
+ {file = "regex-2022.3.15-cp38-cp38-win_amd64.whl", hash = "sha256:947a8525c0a95ba8dc873191f9017d1b1e3024d4dc757f694e0af3026e34044a"},
+ {file = "regex-2022.3.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:591d4fba554f24bfa0421ba040cd199210a24301f923ed4b628e1e15a1001ff4"},
+ {file = "regex-2022.3.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9809404528a999cf02a400ee5677c81959bc5cb938fdc696b62eb40214e3632"},
+ {file = "regex-2022.3.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f08a7e4d62ea2a45557f561eea87c907222575ca2134180b6974f8ac81e24f06"},
+ {file = "regex-2022.3.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a86cac984da35377ca9ac5e2e0589bd11b3aebb61801204bd99c41fac516f0d"},
+ {file = "regex-2022.3.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:286908cbe86b1a0240a867aecfe26a439b16a1f585d2de133540549831f8e774"},
+ {file = "regex-2022.3.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b7494df3fdcc95a1f76cf134d00b54962dd83189520fd35b8fcd474c0aa616d"},
+ {file = "regex-2022.3.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b1ceede92400b3acfebc1425937454aaf2c62cd5261a3fabd560c61e74f6da3"},
+ {file = "regex-2022.3.15-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0317eb6331146c524751354ebef76a7a531853d7207a4d760dfb5f553137a2a4"},
+ {file = "regex-2022.3.15-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9c144405220c5ad3f5deab4c77f3e80d52e83804a6b48b6bed3d81a9a0238e4c"},
+ {file = "regex-2022.3.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5b2e24f3ae03af3d8e8e6d824c891fea0ca9035c5d06ac194a2700373861a15c"},
+ {file = "regex-2022.3.15-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f2c53f3af011393ab5ed9ab640fa0876757498aac188f782a0c620e33faa2a3d"},
+ {file = "regex-2022.3.15-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:060f9066d2177905203516c62c8ea0066c16c7342971d54204d4e51b13dfbe2e"},
+ {file = "regex-2022.3.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:530a3a16e57bd3ea0dff5ec2695c09632c9d6c549f5869d6cf639f5f7153fb9c"},
+ {file = "regex-2022.3.15-cp39-cp39-win32.whl", hash = "sha256:78ce90c50d0ec970bd0002462430e00d1ecfd1255218d52d08b3a143fe4bde18"},
+ {file = "regex-2022.3.15-cp39-cp39-win_amd64.whl", hash = "sha256:c5adc854764732dbd95a713f2e6c3e914e17f2ccdc331b9ecb777484c31f73b6"},
+ {file = "regex-2022.3.15.tar.gz", hash = "sha256:0a7b75cc7bb4cc0334380053e4671c560e31272c9d2d5a6c4b8e9ae2c9bd0f82"},
+]
+six = [
+ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
+toml = [
+ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
+ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
+]
+tomli = [
+ {file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"},
+ {file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"},
+]
+tox = [
+ {file = "tox-3.25.0-py2.py3-none-any.whl", hash = "sha256:0805727eb4d6b049de304977dfc9ce315a1938e6619c3ab9f38682bb04662a5a"},
+ {file = "tox-3.25.0.tar.gz", hash = "sha256:37888f3092aa4e9f835fc8cc6dadbaaa0782651c41ef359e3a5743fcb0308160"},
+]
+typed-ast = [
+ {file = "typed_ast-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:183b183b7771a508395d2cbffd6db67d6ad52958a5fdc99f450d954003900266"},
+ {file = "typed_ast-1.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:676d051b1da67a852c0447621fdd11c4e104827417bf216092ec3e286f7da596"},
+ {file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc2542e83ac8399752bc16e0b35e038bdb659ba237f4222616b4e83fb9654985"},
+ {file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74cac86cc586db8dfda0ce65d8bcd2bf17b58668dfcc3652762f3ef0e6677e76"},
+ {file = "typed_ast-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:18fe320f354d6f9ad3147859b6e16649a0781425268c4dde596093177660e71a"},
+ {file = "typed_ast-1.5.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:31d8c6b2df19a777bc8826770b872a45a1f30cfefcfd729491baa5237faae837"},
+ {file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:963a0ccc9a4188524e6e6d39b12c9ca24cc2d45a71cfdd04a26d883c922b4b78"},
+ {file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0eb77764ea470f14fcbb89d51bc6bbf5e7623446ac4ed06cbd9ca9495b62e36e"},
+ {file = "typed_ast-1.5.2-cp36-cp36m-win_amd64.whl", hash = "sha256:294a6903a4d087db805a7656989f613371915fc45c8cc0ddc5c5a0a8ad9bea4d"},
+ {file = "typed_ast-1.5.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:26a432dc219c6b6f38be20a958cbe1abffcc5492821d7e27f08606ef99e0dffd"},
+ {file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7407cfcad702f0b6c0e0f3e7ab876cd1d2c13b14ce770e412c0c4b9728a0f88"},
+ {file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f30ddd110634c2d7534b2d4e0e22967e88366b0d356b24de87419cc4410c41b7"},
+ {file = "typed_ast-1.5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8c08d6625bb258179b6e512f55ad20f9dfef019bbfbe3095247401e053a3ea30"},
+ {file = "typed_ast-1.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:90904d889ab8e81a956f2c0935a523cc4e077c7847a836abee832f868d5c26a4"},
+ {file = "typed_ast-1.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bbebc31bf11762b63bf61aaae232becb41c5bf6b3461b80a4df7e791fabb3aca"},
+ {file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29dd9a3a9d259c9fa19d19738d021632d673f6ed9b35a739f48e5f807f264fb"},
+ {file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:58ae097a325e9bb7a684572d20eb3e1809802c5c9ec7108e85da1eb6c1a3331b"},
+ {file = "typed_ast-1.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:da0a98d458010bf4fe535f2d1e367a2e2060e105978873c04c04212fb20543f7"},
+ {file = "typed_ast-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:33b4a19ddc9fc551ebabca9765d54d04600c4a50eda13893dadf67ed81d9a098"},
+ {file = "typed_ast-1.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1098df9a0592dd4c8c0ccfc2e98931278a6c6c53cb3a3e2cf7e9ee3b06153344"},
+ {file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42c47c3b43fe3a39ddf8de1d40dbbfca60ac8530a36c9b198ea5b9efac75c09e"},
+ {file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f290617f74a610849bd8f5514e34ae3d09eafd521dceaa6cf68b3f4414266d4e"},
+ {file = "typed_ast-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:df05aa5b241e2e8045f5f4367a9f6187b09c4cdf8578bb219861c4e27c443db5"},
+ {file = "typed_ast-1.5.2.tar.gz", hash = "sha256:525a2d4088e70a9f75b08b3f87a51acc9cde640e19cc523c7e41aa355564ae27"},
+]
+typing-extensions = [
+ {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"},
+ {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"},
+]
+virtualenv = [
+ {file = "virtualenv-20.14.1-py2.py3-none-any.whl", hash = "sha256:e617f16e25b42eb4f6e74096b9c9e37713cf10bf30168fb4a739f3fa8f898a3a"},
+ {file = "virtualenv-20.14.1.tar.gz", hash = "sha256:ef589a79795589aada0c1c5b319486797c03b67ac3984c48c669c0e4f50df3a5"},
+]
+zipp = [
+ {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"},
+ {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"},
+]
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..d573d19
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,71 @@
+[tool.poetry]
+name = "graphql-relay"
+version = "3.2.0"
+description = """
+Relay library for graphql-core"""
+license="MIT"
+authors = [
+ "Syrus Akbary <me@syrusakbary.com>",
+ "Christoph Zwerschke <cito@online.de>"
+]
+readme = "README.md"
+homepage = "https://github.com/graphql-python/graphql-relay-py"
+repository = "https://github.com/graphql-python/graphql-relay-py"
+documentation = "https://github.com/graphql-python/graphql-relay-py"
+keywords = ["graphql", "relay", "api"]
+classifiers = [
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "Topic :: Software Development :: Libraries",
+ "License :: OSI Approved :: MIT License",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10"
+]
+packages = [
+ { include = "graphql_relay", from = "src" },
+ { include = "tests", format = "sdist" },
+ { include = '.bumpversion.cfg', format = "sdist" },
+ { include = '.coveragerc', format = "sdist" },
+ { include = '.editorconfig', format = "sdist" },
+ { include = '.flake8', format = "sdist" },
+ { include = '.mypy.ini', format = "sdist" },
+ { include = 'poetry.lock', format = "sdist" },
+ { include = 'tox.ini', format = "sdist" },
+ { include = 'setup.cfg', format = "sdist" },
+ { include = 'CODEOWNERS', format = "sdist" },
+ { include = 'MANIFEST.in', format = "sdist" }
+]
+
+[tool.poetry.dependencies]
+python = "^3.6"
+graphql-core = "~3.2"
+typing-extensions = { version = "^4.1", python = "<3.8" }
+
+[tool.poetry.dev-dependencies]
+pytest = "^6.2"
+pytest-asyncio = [
+ {version=">=0.18,<1", python = ">=3.7" },
+ {version=">=0.16,<0.17", python = "<3.7" },
+]
+pytest-cov = "^3.0"
+pytest-describe = "^2.0"
+black = [
+ {version = "22.3.0", python = ">=3.6.2"},
+ {version = "20.8b1", python = "<3.6.2"}
+]
+flake8 = "^4.0"
+mypy = "0.942"
+check-manifest = ">=0.48,<1"
+bump2version = ">=1.0,<2"
+tox = "^3.24"
+
+[tool.black]
+target-version = ['py36', 'py37', 'py38', 'py39', 'py310']
+
+[build-system]
+requires = ["poetry_core>=1,<2", "setuptools>=59,<70"]
+build-backend = "poetry.core.masonry.api"
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..47cbe3a
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,17 @@
+[bdist_wheel]
+python-tag = py3
+
+[aliases]
+test = pytest
+
+[check-manifest]
+ignore =
+ .pyup.yml
+
+[tool:pytest]
+# Deactivate default name pattern for test classes (we use pytest_describe).
+python_classes = PyTest*
+# Handle all async fixtures and tests automatically by asyncio
+asyncio_mode = auto
+# Ignore config options not (yet) available in older Python versions.
+filterwarnings = ignore::pytest.PytestConfigWarning
diff --git a/setup.py b/setup.py
index 34a9bb3..2fc5171 100644
--- a/setup.py
+++ b/setup.py
@@ -1,69 +1,45 @@
-import sys
-
+from re import search
from setuptools import setup, find_packages
-from setuptools.command.test import test as TestCommand
-
-
-class PyTest(TestCommand):
- user_options = [('pytest-args=', 'a', "Arguments to pass to pytest")]
-
- def initialize_options(self):
- TestCommand.initialize_options(self)
- self.pytest_args = ""
- def run_tests(self):
- import shlex
-
- # import here, cause outside the eggs aren't loaded
- import pytest
-
- errno = pytest.main(shlex.split(self.pytest_args))
- sys.exit(errno)
+with open("src/graphql_relay/version.py") as version_file:
+ version = search('version = "(.*)"', version_file.read()).group(1)
+with open("README.md") as readme_file:
+ readme = readme_file.read()
setup(
- name='graphql-relay',
- version='2.0.1',
-
- description='Relay implementation for Python',
- long_description=open('README.md').read(),
+ name="graphql-relay",
+ version=version,
+ description="Relay library for graphql-core",
+ long_description=readme,
long_description_content_type="text/markdown",
-
- url='https://github.com/graphql-python/graphql-relay-py',
-
- author='Syrus Akbary',
- author_email='me@syrusakbary.com',
-
- license='MIT',
-
+ keywords="graphql relay api",
+ url="https://github.com/graphql-python/graphql-relay-py",
+ author="Syrus Akbary",
+ author_email="me@syrusakbary.com",
+ license="MIT",
classifiers=[
- 'Development Status :: 5 - Production/Stable',
- 'Intended Audience :: Developers',
- 'Topic :: Software Development :: Libraries',
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "Topic :: Software Development :: Libraries",
"License :: OSI Approved :: MIT License",
- 'Programming Language :: Python :: 2',
- 'Programming Language :: Python :: 2.7',
- 'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.4',
- 'Programming Language :: Python :: 3.5',
- 'Programming Language :: Python :: 3.6',
- 'Programming Language :: Python :: 3.7',
- 'Programming Language :: Python :: 3.8',
- 'Programming Language :: Python :: Implementation :: PyPy',
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: Implementation :: PyPy",
],
-
- keywords='api graphql protocol rest relay',
-
- packages=find_packages(exclude=['tests']),
-
install_requires=[
- 'six>=1.12',
- 'graphql-core>=2.2,<3',
- 'promise>=2.2,<3'
+ "graphql-core>=3.2,<3.3",
+ "typing-extensions>=4.1,<5; python_version < '3.8'",
],
- tests_require=['pytest>=4.6,<5', 'pytest-cov>=2.7,<3'],
- extras_require={
- },
-
- cmdclass={'test': PyTest},
+ python_requires=">=3.6,<4",
+ packages=find_packages("src"),
+ package_dir={"": "src"},
+ # PEP-561: https://www.python.org/dev/peps/pep-0561/
+ package_data={"graphql_relay": ["py.typed"]},
+ include_package_data=True,
+ zip_safe=False,
)
diff --git a/src/graphql_relay/__init__.py b/src/graphql_relay/__init__.py
new file mode 100644
index 0000000..bab0672
--- /dev/null
+++ b/src/graphql_relay/__init__.py
@@ -0,0 +1,100 @@
+"""The graphql_relay package"""
+
+# The graphql-relay and graphql-relay-js version info
+from .version import version, version_info, version_js, version_info_js
+
+# Types and helpers for creating connection types in the schema
+from .connection.connection import (
+ backward_connection_args,
+ connection_args,
+ connection_definitions,
+ forward_connection_args,
+ page_info_type,
+ Connection,
+ ConnectionArguments,
+ ConnectionConstructor,
+ ConnectionCursor,
+ ConnectionType,
+ Edge,
+ EdgeConstructor,
+ EdgeType,
+ GraphQLConnectionDefinitions,
+ PageInfo,
+ PageInfoConstructor,
+ PageInfoType,
+)
+
+# Helpers for creating connections from arrays
+from .connection.array_connection import (
+ connection_from_array,
+ connection_from_array_slice,
+ cursor_for_object_in_connection,
+ cursor_to_offset,
+ get_offset_with_default,
+ offset_to_cursor,
+ SizedSliceable,
+)
+
+# Helper for creating mutations with client mutation IDs
+from .mutation.mutation import (
+ mutation_with_client_mutation_id,
+ MutationFn,
+ MutationFnWithoutArgs,
+ NullResult,
+)
+
+# Helper for creating node definitions
+from .node.node import node_definitions, GraphQLNodeDefinitions
+
+# Helper for creating plural identifying root fields
+from .node.plural import plural_identifying_root_field
+
+# Utilities for creating global IDs in systems that don't have them
+from .node.node import from_global_id, global_id_field, to_global_id, ResolvedGlobalId
+
+__version__ = version
+__version_info__ = version_info
+__version_js__ = version_js
+__version_info_js__ = version_info_js
+
+__all__ = [
+ "backward_connection_args",
+ "Connection",
+ "ConnectionArguments",
+ "ConnectionConstructor",
+ "ConnectionCursor",
+ "ConnectionType",
+ "connection_args",
+ "connection_from_array",
+ "connection_from_array_slice",
+ "connection_definitions",
+ "cursor_for_object_in_connection",
+ "cursor_to_offset",
+ "Edge",
+ "EdgeConstructor",
+ "EdgeType",
+ "forward_connection_args",
+ "from_global_id",
+ "get_offset_with_default",
+ "global_id_field",
+ "GraphQLConnectionDefinitions",
+ "GraphQLNodeDefinitions",
+ "MutationFn",
+ "MutationFnWithoutArgs",
+ "mutation_with_client_mutation_id",
+ "node_definitions",
+ "NullResult",
+ "offset_to_cursor",
+ "PageInfo",
+ "PageInfoConstructor",
+ "PageInfoType",
+ "page_info_type",
+ "plural_identifying_root_field",
+ "ResolvedGlobalId",
+ "SizedSliceable",
+ "to_global_id",
+ "version",
+ "version_info",
+ "version_js",
+ "version_info_js",
+]
diff --git a/src/graphql_relay/connection/__init__.py b/src/graphql_relay/connection/__init__.py
new file mode 100644
index 0000000..7668785
--- /dev/null
+++ b/src/graphql_relay/connection/__init__.py
@@ -0,0 +1 @@
+"""graphql_relay.connection"""
diff --git a/src/graphql_relay/connection/array_connection.py b/src/graphql_relay/connection/array_connection.py
new file mode 100644
index 0000000..f25cdce
--- /dev/null
+++ b/src/graphql_relay/connection/array_connection.py
@@ -0,0 +1,215 @@
+from typing import Any, Iterator, Optional, Sequence
+
+try:
+ from typing import Protocol
+except ImportError: # Python < 3.8
+ from typing_extensions import Protocol # type: ignore
+
+from ..utils.base64 import base64, unbase64
+from .connection import (
+ Connection,
+ ConnectionArguments,
+ ConnectionConstructor,
+ ConnectionCursor,
+ ConnectionType,
+ Edge,
+ EdgeConstructor,
+ PageInfo,
+ PageInfoConstructor,
+)
+
+__all__ = [
+ "connection_from_array",
+ "connection_from_array_slice",
+ "cursor_for_object_in_connection",
+ "cursor_to_offset",
+ "get_offset_with_default",
+ "offset_to_cursor",
+ "SizedSliceable",
+]
+
+
+class SizedSliceable(Protocol):
+ def __getitem__(self, index: slice) -> Any:
+ ...
+
+ def __iter__(self) -> Iterator:
+ ...
+
+ def __len__(self) -> int:
+ ...
+
+
+def connection_from_array(
+ data: SizedSliceable,
+ args: Optional[ConnectionArguments] = None,
+ connection_type: ConnectionConstructor = Connection,
+ edge_type: EdgeConstructor = Edge,
+ page_info_type: PageInfoConstructor = PageInfo,
+) -> ConnectionType:
+ """Create a connection object from a sequence of objects.
+
+ Note that different from its JavaScript counterpart which expects an array,
+ this function accepts any kind of sliceable object with a length.
+
+ Given this `data` object representing the result set, and connection arguments,
+ this simple function returns a connection object for use in GraphQL. It uses
+ offsets as pagination, so pagination will only work if the data is static.
+
+ The result will use the default types provided in the `connectiontypes` module
+ if you don't pass custom types as arguments.
+ """
+ return connection_from_array_slice(
+ data,
+ args,
+ slice_start=0,
+ array_length=len(data),
+ connection_type=connection_type,
+ edge_type=edge_type,
+ page_info_type=page_info_type,
+ )
+
+
+def connection_from_array_slice(
+ array_slice: SizedSliceable,
+ args: Optional[ConnectionArguments] = None,
+ slice_start: int = 0,
+ array_length: Optional[int] = None,
+ array_slice_length: Optional[int] = None,
+ connection_type: ConnectionConstructor = Connection,
+ edge_type: EdgeConstructor = Edge,
+ page_info_type: PageInfoConstructor = PageInfo,
+) -> ConnectionType:
+ """Create a connection object from a slice of the result set.
+
+ Note that different from its JavaScript counterpart which expects an array,
+ this function accepts any kind of sliceable object. This object represents
+ a slice of the full result set. You need to pass the start position of the
+ slice as `slice start` and the length of the full result set as `array_length`.
+ If the `array_slice` does not have a length, you need to provide it separately
+ in `array_slice_length` as well.
+
+ This function is similar to `connection_from_array`, but is intended for use
+ cases where you know the cardinality of the connection, consider it too large
+ to materialize the entire result set, and instead wish to pass in only a slice
+ of the total result large enough to cover the range specified in `args`.
+
+ If you do not provide a `slice_start`, we assume that the slice starts at
+ the beginning of the result set, and if you do not provide an `array_length`,
+ we assume that the slice ends at the end of the result set.
+ """
+ args = args or {}
+ before = args.get("before")
+ after = args.get("after")
+ first = args.get("first")
+ last = args.get("last")
+ if array_slice_length is None:
+ array_slice_length = len(array_slice)
+ slice_end = slice_start + array_slice_length
+ if array_length is None:
+ array_length = slice_end
+
+ start_offset = max(slice_start, 0)
+ end_offset = min(slice_end, array_length)
+
+ after_offset = get_offset_with_default(after, -1)
+ if 0 <= after_offset < array_length:
+ start_offset = max(start_offset, after_offset + 1)
+
+ before_offset = get_offset_with_default(before, end_offset)
+ if 0 <= before_offset < array_length:
+ end_offset = min(end_offset, before_offset)
+
+ if isinstance(first, int):
+ if first < 0:
+ raise ValueError("Argument 'first' must be a non-negative integer.")
+
+ end_offset = min(end_offset, start_offset + first)
+ if isinstance(last, int):
+ if last < 0:
+ raise ValueError("Argument 'last' must be a non-negative integer.")
+
+ start_offset = max(start_offset, end_offset - last)
+
+ # If supplied slice is too large, trim it down before mapping over it.
+ trimmed_slice = array_slice[start_offset - slice_start : end_offset - slice_start]
+
+ edges = [
+ edge_type(node=value, cursor=offset_to_cursor(start_offset + index))
+ for index, value in enumerate(trimmed_slice)
+ ]
+
+ first_edge_cursor = edges[0].cursor if edges else None
+ last_edge_cursor = edges[-1].cursor if edges else None
+ lower_bound = after_offset + 1 if after else 0
+ upper_bound = before_offset if before else array_length
+
+ return connection_type(
+ edges=edges,
+ pageInfo=page_info_type(
+ startCursor=first_edge_cursor,
+ endCursor=last_edge_cursor,
+ hasPreviousPage=isinstance(last, int) and start_offset > lower_bound,
+ hasNextPage=isinstance(first, int) and end_offset < upper_bound,
+ ),
+ )
+
+
+PREFIX = "arrayconnection:"
+
+
+def offset_to_cursor(offset: int) -> ConnectionCursor:
+ """Create the cursor string from an offset."""
+ return base64(f"{PREFIX}{offset}")
+
+
+def cursor_to_offset(cursor: ConnectionCursor) -> Optional[int]:
+ """Extract the offset from the cursor string."""
+ try:
+ return int(unbase64(cursor)[len(PREFIX) :])
+ except ValueError:
+ return None
+
+
+def cursor_for_object_in_connection(
+ data: Sequence, obj: Any
+) -> Optional[ConnectionCursor]:
+ """Return the cursor associated with an object in a sequence.
+
+ This function uses the `index` method of the sequence if it exists,
+ otherwise searches the object by iterating via the `__getitem__` method.
+ """
+ try:
+ offset = data.index(obj)
+ except AttributeError:
+ # data does not have an index method
+ offset = 0
+ try:
+ while True:
+ if data[offset] == obj:
+ break
+ offset += 1
+ except IndexError:
+ return None
+ else:
+ return offset_to_cursor(offset)
+ except ValueError:
+ return None
+ else:
+ return offset_to_cursor(offset)
+
+
+def get_offset_with_default(
+ cursor: Optional[ConnectionCursor] = None, default_offset: int = 0
+) -> int:
+ """Get offset from a given cursor and a default.
+
+ Given an optional cursor and a default offset, return the offset to use;
+ if the cursor contains a valid offset, that will be used,
+ otherwise it will be the default.
+ """
+ if not isinstance(cursor, str):
+ return default_offset
+
+ offset = cursor_to_offset(cursor)
+ return default_offset if offset is None else offset
diff --git a/src/graphql_relay/connection/arrayconnection.py b/src/graphql_relay/connection/arrayconnection.py
new file mode 100644
index 0000000..efae32e
--- /dev/null
+++ b/src/graphql_relay/connection/arrayconnection.py
@@ -0,0 +1,29 @@
+import warnings
+
+# noinspection PyDeprecation
+from .array_connection import (
+ connection_from_array,
+ connection_from_array_slice,
+ cursor_for_object_in_connection,
+ cursor_to_offset,
+ get_offset_with_default,
+ offset_to_cursor,
+ SizedSliceable,
+)
+
+warnings.warn(
+ "The 'arrayconnection' module is deprecated. "
+ "Functions should be imported from the top-level package instead.",
+ DeprecationWarning,
+ stacklevel=2,
+)
+
+__all__ = [
+ "connection_from_array",
+ "connection_from_array_slice",
+ "cursor_for_object_in_connection",
+ "cursor_to_offset",
+ "get_offset_with_default",
+ "offset_to_cursor",
+ "SizedSliceable",
+]
diff --git a/src/graphql_relay/connection/connection.py b/src/graphql_relay/connection/connection.py
new file mode 100644
index 0000000..2058baa
--- /dev/null
+++ b/src/graphql_relay/connection/connection.py
@@ -0,0 +1,257 @@
+from typing import Any, Dict, List, NamedTuple, Optional, Union
+
+from graphql import (
+ get_named_type,
+ resolve_thunk,
+ GraphQLArgument,
+ GraphQLArgumentMap,
+ GraphQLBoolean,
+ GraphQLField,
+ GraphQLFieldResolver,
+ GraphQLInt,
+ GraphQLList,
+ GraphQLNonNull,
+ GraphQLObjectType,
+ GraphQLString,
+ ThunkMapping,
+)
+
+from graphql import GraphQLNamedOutputType
+
+try:
+ from typing import Protocol
+except ImportError: # Python < 3.8
+ from typing_extensions import Protocol # type: ignore
+
+__all__ = [
+ "backward_connection_args",
+ "connection_args",
+ "connection_definitions",
+ "forward_connection_args",
+ "page_info_type",
+ "Connection",
+ "ConnectionArguments",
+ "ConnectionConstructor",
+ "ConnectionCursor",
+ "ConnectionType",
+ "Edge",
+ "EdgeConstructor",
+ "EdgeType",
+ "GraphQLConnectionDefinitions",
+ "PageInfo",
+ "PageInfoConstructor",
+ "PageInfoType",
+]
+
+
+# Returns a GraphQLArgumentMap appropriate to include on a field
+# whose return type is a connection type with forward pagination.
+forward_connection_args: GraphQLArgumentMap = {
+ "after": GraphQLArgument(
+ GraphQLString,
+ description="Returns the items in the list"
+ " that come after the specified cursor.",
+ ),
+ "first": GraphQLArgument(
+ GraphQLInt,
+ description="Returns the first n items from the list.",
+ ),
+}
+
+# Returns a GraphQLArgumentMap appropriate to include on a field
+# whose return type is a connection type with backward pagination.
+backward_connection_args: GraphQLArgumentMap = {
+ "before": GraphQLArgument(
+ GraphQLString,
+ description="Returns the items in the list"
+ " that come before the specified cursor.",
+ ),
+ "last": GraphQLArgument(
+ GraphQLInt, description="Returns the last n items from the list."
+ ),
+}
+
+# Returns a GraphQLArgumentMap appropriate to include on a field
+# whose return type is a connection type with bidirectional pagination.
+connection_args = {**forward_connection_args, **backward_connection_args}
+
+
+class GraphQLConnectionDefinitions(NamedTuple):
+ edge_type: GraphQLObjectType
+ connection_type: GraphQLObjectType
+
+
+"""A type alias for cursors in this implementation."""
+ConnectionCursor = str
+
+
+"""A type describing the arguments a connection field receives in GraphQL.
+
+The following kinds of arguments are expected (all optional):
+
+ before: ConnectionCursor
+ after: ConnectionCursor
+ first: int
+ last: int
+"""
+ConnectionArguments = Dict[str, Any]
+
+
+def connection_definitions(
+ node_type: Union[GraphQLNamedOutputType, GraphQLNonNull[GraphQLNamedOutputType]],
+ name: Optional[str] = None,
+ resolve_node: Optional[GraphQLFieldResolver] = None,
+ resolve_cursor: Optional[GraphQLFieldResolver] = None,
+ edge_fields: Optional[ThunkMapping[GraphQLField]] = None,
+ connection_fields: Optional[ThunkMapping[GraphQLField]] = None,
+) -> GraphQLConnectionDefinitions:
+ """Return GraphQLObjectTypes for a connection with the given name.
+
+ The nodes of the returned object types will be of the specified type.
+ """
+ name = name or get_named_type(node_type).name
+
+ edge_type = GraphQLObjectType(
+ name + "Edge",
+ description="An edge in a connection.",
+ fields=lambda: {
+ "node": GraphQLField(
+ node_type,
+ resolve=resolve_node,
+ description="The item at the end of the edge",
+ ),
+ "cursor": GraphQLField(
+ GraphQLNonNull(GraphQLString),
+ resolve=resolve_cursor,
+ description="A cursor for use in pagination",
+ ),
+ **resolve_thunk(edge_fields or {}),
+ },
+ )
+
+ connection_type = GraphQLObjectType(
+ name + "Connection",
+ description="A connection to a list of items.",
+ fields=lambda: {
+ "pageInfo": GraphQLField(
+ GraphQLNonNull(page_info_type),
+ description="Information to aid in pagination.",
+ ),
+ "edges": GraphQLField(
+ GraphQLList(edge_type), description="A list of edges."
+ ),
+ **resolve_thunk(connection_fields or {}),
+ },
+ )
+
+ return GraphQLConnectionDefinitions(edge_type, connection_type)
+
+
+class PageInfoType(Protocol):
+ @property
+ def startCursor(self) -> Optional[ConnectionCursor]:
+ ...
+
+ def endCursor(self) -> Optional[ConnectionCursor]:
+ ...
+
+ def hasPreviousPage(self) -> bool:
+ ...
+
+ def hasNextPage(self) -> bool:
+ ...
+
+
+class PageInfoConstructor(Protocol):
+ def __call__(
+ self,
+ *,
+ startCursor: Optional[ConnectionCursor],
+ endCursor: Optional[ConnectionCursor],
+ hasPreviousPage: bool,
+ hasNextPage: bool,
+ ) -> PageInfoType:
+ ...
+
+
+class PageInfo(NamedTuple):
+ """A type designed to be exposed as `PageInfo` over GraphQL."""
+
+ startCursor: Optional[ConnectionCursor]
+ endCursor: Optional[ConnectionCursor]
+ hasPreviousPage: bool
+ hasNextPage: bool
+
+
+class EdgeType(Protocol):
+ @property
+ def node(self) -> Any:
+ ...
+
+ @property
+ def cursor(self) -> ConnectionCursor:
+ ...
+
+
+class EdgeConstructor(Protocol):
+ def __call__(self, *, node: Any, cursor: ConnectionCursor) -> EdgeType:
+ ...
+
+
+class Edge(NamedTuple):
+ """A type designed to be exposed as a `Edge` over GraphQL."""
+
+ node: Any
+ cursor: ConnectionCursor
+
+
+class ConnectionType(Protocol):
+ @property
+ def edges(self) -> List[EdgeType]:
+ ...
+
+ @property
+ def pageInfo(self) -> PageInfoType:
+ ...
+
+
+class ConnectionConstructor(Protocol):
+ def __call__(
+ self,
+ *,
+ edges: List[EdgeType],
+ pageInfo: PageInfoType,
+ ) -> ConnectionType:
+ ...
+
+
+class Connection(NamedTuple):
+ """A type designed to be exposed as a `Connection` over GraphQL."""
+
+ edges: List[Edge]
+ pageInfo: PageInfo
+
+
+# The common page info type used by all connections.
+page_info_type = GraphQLObjectType(
+ "PageInfo",
+ description="Information about pagination in a connection.",
+ fields=lambda: {
+ "hasNextPage": GraphQLField(
+ GraphQLNonNull(GraphQLBoolean),
+ description="When paginating forwards, are there more items?",
+ ),
+ "hasPreviousPage": GraphQLField(
+ GraphQLNonNull(GraphQLBoolean),
+ description="When paginating backwards, are there more items?",
+ ),
+ "startCursor": GraphQLField(
+ GraphQLString,
+ description="When paginating backwards, the cursor to continue.",
+ ),
+ "endCursor": GraphQLField(
+ GraphQLString,
+ description="When paginating forwards, the cursor to continue.",
+ ),
+ },
+)
diff --git a/src/graphql_relay/mutation/__init__.py b/src/graphql_relay/mutation/__init__.py
new file mode 100644
index 0000000..b198fb1
--- /dev/null
+++ b/src/graphql_relay/mutation/__init__.py
@@ -0,0 +1 @@
+"""graphql_relay.mutation"""
diff --git a/src/graphql_relay/mutation/mutation.py b/src/graphql_relay/mutation/mutation.py
new file mode 100644
index 0000000..f927bec
--- /dev/null
+++ b/src/graphql_relay/mutation/mutation.py
@@ -0,0 +1,119 @@
+from collections.abc import Mapping
+from inspect import iscoroutinefunction
+from typing import Any, Callable, Dict, Optional
+
+from graphql import (
+ resolve_thunk,
+ GraphQLArgument,
+ GraphQLField,
+ GraphQLFieldMap,
+ GraphQLInputField,
+ GraphQLInputFieldMap,
+ GraphQLInputObjectType,
+ GraphQLNonNull,
+ GraphQLObjectType,
+ GraphQLResolveInfo,
+ GraphQLString,
+ ThunkMapping,
+)
+from graphql.pyutils import AwaitableOrValue
+
+__all__ = [
+ "mutation_with_client_mutation_id",
+ "MutationFn",
+ "MutationFnWithoutArgs",
+ "NullResult",
+]
+
+# Note: Contrary to the Javascript implementation of MutationFn,
+# the context is passed as part of the GraphQLResolveInfo and any arguments
+# are passed individually as keyword arguments.
+MutationFnWithoutArgs = Callable[[GraphQLResolveInfo], AwaitableOrValue[Any]]
+# Unfortunately there is currently no syntax to indicate optional or keyword
+# arguments in Python, so we also allow any other Callable as a workaround:
+MutationFn = Callable[..., AwaitableOrValue[Any]]
+
+
+class NullResult:
+ def __init__(self, clientMutationId: Optional[str] = None) -> None:
+ self.clientMutationId = clientMutationId
+
+
+def mutation_with_client_mutation_id(
+ name: str,
+ input_fields: ThunkMapping[GraphQLInputField],
+ output_fields: ThunkMapping[GraphQLField],
+ mutate_and_get_payload: MutationFn,
+ description: Optional[str] = None,
+ deprecation_reason: Optional[str] = None,
+ extensions: Optional[Dict[str, Any]] = None,
+) -> GraphQLField:
+ """
+ Returns a GraphQLFieldConfig for the specified mutation.
+
+ The input_fields and output_fields should not include `clientMutationId`,
+ as this will be provided automatically.
+
+ An input object will be created containing the input fields, and an
+ object will be created containing the output fields.
+
+ mutate_and_get_payload will receive a GraphQLResolveInfo as first argument,
+ and the input fields as keyword arguments, and it should return an object
+ (or a dict) with an attribute (or a key) for each output field.
+ It may return synchronously or asynchronously.
+ """
+
+ def augmented_input_fields() -> GraphQLInputFieldMap:
+ return dict(
+ resolve_thunk(input_fields),
+ clientMutationId=GraphQLInputField(GraphQLString),
+ )
+
+ def augmented_output_fields() -> GraphQLFieldMap:
+ return dict(
+ resolve_thunk(output_fields),
+ clientMutationId=GraphQLField(GraphQLString),
+ )
+
+ output_type = GraphQLObjectType(name + "Payload", fields=augmented_output_fields)
+
+ input_type = GraphQLInputObjectType(name + "Input", fields=augmented_input_fields)
+
+ if iscoroutinefunction(mutate_and_get_payload):
+
+ # noinspection PyShadowingBuiltins
+ async def resolve(_root: Any, info: GraphQLResolveInfo, input: Dict) -> Any:
+ payload = await mutate_and_get_payload(info, **input)
+ clientMutationId = input.get("clientMutationId")
+ if payload is None:
+ return NullResult(clientMutationId)
+ if isinstance(payload, Mapping):
+ payload["clientMutationId"] = clientMutationId # type: ignore
+ else:
+ payload.clientMutationId = clientMutationId
+ return payload
+
+ else:
+
+ # noinspection PyShadowingBuiltins
+ def resolve( # type: ignore
+ _root: Any, info: GraphQLResolveInfo, input: Dict
+ ) -> Any:
+ payload = mutate_and_get_payload(info, **input)
+ clientMutationId = input.get("clientMutationId")
+ if payload is None:
+ return NullResult(clientMutationId)
+ if isinstance(payload, Mapping):
+ payload["clientMutationId"] = clientMutationId # type: ignore
+ else:
+ payload.clientMutationId = clientMutationId # type: ignore
+ return payload
+
+ return GraphQLField(
+ output_type,
+ description=description,
+ deprecation_reason=deprecation_reason,
+ args={"input": GraphQLArgument(GraphQLNonNull(input_type))},
+ resolve=resolve,
+ extensions=extensions,
+ )
diff --git a/src/graphql_relay/node/__init__.py b/src/graphql_relay/node/__init__.py
new file mode 100644
index 0000000..21b42c6
--- /dev/null
+++ b/src/graphql_relay/node/__init__.py
@@ -0,0 +1 @@
+"""graphql_relay.node"""
diff --git a/src/graphql_relay/node/node.py b/src/graphql_relay/node/node.py
new file mode 100644
index 0000000..ad062a5
--- /dev/null
+++ b/src/graphql_relay/node/node.py
@@ -0,0 +1,132 @@
+from typing import Any, Callable, NamedTuple, Optional, Union
+
+from graphql_relay.utils.base64 import base64, unbase64
+
+from graphql import (
+ GraphQLArgument,
+ GraphQLNonNull,
+ GraphQLID,
+ GraphQLField,
+ GraphQLInterfaceType,
+ GraphQLList,
+ GraphQLResolveInfo,
+ GraphQLTypeResolver,
+)
+
+__all__ = [
+ "from_global_id",
+ "global_id_field",
+ "node_definitions",
+ "to_global_id",
+ "GraphQLNodeDefinitions",
+ "ResolvedGlobalId",
+]
+
+
+class GraphQLNodeDefinitions(NamedTuple):
+
+ node_interface: GraphQLInterfaceType
+ node_field: GraphQLField
+ nodes_field: GraphQLField
+
+
+def node_definitions(
+ fetch_by_id: Callable[[str, GraphQLResolveInfo], Any],
+ type_resolver: Optional[GraphQLTypeResolver] = None,
+) -> GraphQLNodeDefinitions:
+ """
+ Given a function to map from an ID to an underlying object, and a function
+ to map from an underlying object to the concrete GraphQLObjectType it
+ corresponds to, constructs a `Node` interface that objects can implement,
+ and a field object to be used as a `node` root field.
+
+ If the type_resolver is omitted, object resolution on the interface will be
+ handled with the `is_type_of` method on object types, as with any GraphQL
+ interface without a provided `resolve_type` method.
+ """
+ node_interface = GraphQLInterfaceType(
+ "Node",
+ description="An object with an ID",
+ fields=lambda: {
+ "id": GraphQLField(
+ GraphQLNonNull(GraphQLID), description="The id of the object."
+ )
+ },
+ resolve_type=type_resolver,
+ )
+
+ # noinspection PyShadowingBuiltins
+ node_field = GraphQLField(
+ node_interface,
+ description="Fetches an object given its ID",
+ args={
+ "id": GraphQLArgument(
+ GraphQLNonNull(GraphQLID), description="The ID of an object"
+ )
+ },
+ resolve=lambda _obj, info, id: fetch_by_id(id, info),
+ )
+
+ nodes_field = GraphQLField(
+ GraphQLNonNull(GraphQLList(node_interface)),
+ description="Fetches objects given their IDs",
+ args={
+ "ids": GraphQLArgument(
+ GraphQLNonNull(GraphQLList(GraphQLNonNull(GraphQLID))),
+ description="The IDs of objects",
+ )
+ },
+ resolve=lambda _obj, info, ids: [fetch_by_id(id_, info) for id_ in ids],
+ )
+
+ return GraphQLNodeDefinitions(node_interface, node_field, nodes_field)
+
+
+class ResolvedGlobalId(NamedTuple):
+
+ type: str
+ id: str
+
+
+def to_global_id(type_: str, id_: Union[str, int]) -> str:
+ """
+ Takes a type name and an ID specific to that type name, and returns a
+ "global ID" that is unique among all types.
+ """
+ return base64(f"{type_}:{GraphQLID.serialize(id_)}")
+
+
+def from_global_id(global_id: str) -> ResolvedGlobalId:
+ """
+ Takes the "global ID" created by to_global_id, and returns the type name and ID
+ used to create it.
+ """
+ global_id = unbase64(global_id)
+ if ":" not in global_id:
+ return ResolvedGlobalId("", global_id)
+ return ResolvedGlobalId(*global_id.split(":", 1))
+
+
+def global_id_field(
+ type_name: Optional[str] = None,
+ id_fetcher: Optional[Callable[[Any, GraphQLResolveInfo], str]] = None,
+) -> GraphQLField:
+ """
+ Creates the configuration for an id field on a node, using `to_global_id` to
+ construct the ID from the provided typename. The type-specific ID is fetched
+ by calling id_fetcher on the object, or if not provided, by accessing the `id`
+ attribute of the object, or the `id` if the object is a dict.
+ """
+
+ def resolve(obj: Any, info: GraphQLResolveInfo, **_args: Any) -> str:
+ type_ = type_name or info.parent_type.name
+ id_ = (
+ id_fetcher(obj, info)
+ if id_fetcher
+ else (obj["id"] if isinstance(obj, dict) else obj.id)
+ )
+ return to_global_id(type_, id_)
+
+ return GraphQLField(
+ GraphQLNonNull(GraphQLID), description="The ID of an object", resolve=resolve
+ )
diff --git a/src/graphql_relay/node/plural.py b/src/graphql_relay/node/plural.py
new file mode 100644
index 0000000..870c37f
--- /dev/null
+++ b/src/graphql_relay/node/plural.py
@@ -0,0 +1,41 @@
+from typing import Any, Callable, List, Optional
+
+from graphql import (
+ GraphQLArgument,
+ GraphQLField,
+ GraphQLInputType,
+ GraphQLOutputType,
+ GraphQLList,
+ GraphQLNonNull,
+ GraphQLResolveInfo,
+ get_nullable_type,
+)
+
+__all__ = ["plural_identifying_root_field"]
+
+
+def plural_identifying_root_field(
+ arg_name: str,
+ input_type: GraphQLInputType,
+ output_type: GraphQLOutputType,
+ resolve_single_input: Callable[[GraphQLResolveInfo, str], Any],
+ description: Optional[str] = None,
+) -> GraphQLField:
+ def resolve(_obj: Any, info: GraphQLResolveInfo, **args: Any) -> List:
+ inputs = args[arg_name]
+ return [resolve_single_input(info, input_) for input_ in inputs]
+
+ return GraphQLField(
+ GraphQLList(output_type),
+ description=description,
+ args={
+ arg_name: GraphQLArgument(
+ GraphQLNonNull(
+ GraphQLList(
+ GraphQLNonNull(get_nullable_type(input_type)) # type: ignore
+ )
+ )
+ )
+ },
+ resolve=resolve,
+ )
diff --git a/src/graphql_relay/py.typed b/src/graphql_relay/py.typed
new file mode 100644
index 0000000..eb0b539
--- /dev/null
+++ b/src/graphql_relay/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561. The graphql package uses inline types.
diff --git a/src/graphql_relay/utils/__init__.py b/src/graphql_relay/utils/__init__.py
new file mode 100644
index 0000000..2490b59
--- /dev/null
+++ b/src/graphql_relay/utils/__init__.py
@@ -0,0 +1,5 @@
+"""graphql_relay.utils"""
+
+from .base64 import base64, unbase64
+
+__all__ = ["base64", "unbase64"]
diff --git a/src/graphql_relay/utils/base64.py b/src/graphql_relay/utils/base64.py
new file mode 100644
index 0000000..a3ee9a6
--- /dev/null
+++ b/src/graphql_relay/utils/base64.py
@@ -0,0 +1,24 @@
+from base64 import b64encode, b64decode
+import binascii
+
+__all__ = ["base64", "unbase64"]
+
+Base64String = str
+
+
+def base64(s: str) -> Base64String:
+ """Encode the string s using Base64."""
+ b: bytes = s.encode("utf-8") if isinstance(s, str) else s
+ return b64encode(b).decode("ascii")
+
+
+def unbase64(s: Base64String) -> str:
+ """Decode the string s using Base64."""
+ try:
+ b: bytes = s.encode("ascii") if isinstance(s, str) else s
+ except UnicodeEncodeError:
+ return ""
+ try:
+ return b64decode(b).decode("utf-8")
+ except (binascii.Error, UnicodeDecodeError):
+ return ""
diff --git a/src/graphql_relay/version.py b/src/graphql_relay/version.py
new file mode 100644
index 0000000..1d53baa
--- /dev/null
+++ b/src/graphql_relay/version.py
@@ -0,0 +1,51 @@
+import re
+from typing import NamedTuple
+
+__all__ = ["version", "version_info", "version_js", "version_info_js"]
+
+version = "3.2.0"
+
+version_js = "0.10.0"
+
+
+_re_version = re.compile(r"(\d+)\.(\d+)\.(\d+)(\D*)(\d*)")
+
+
+class VersionInfo(NamedTuple):
+ major: int
+ minor: int
+ micro: int
+ releaselevel: str
+ serial: int
+
+ @classmethod
+ def from_str(cls, v: str) -> "VersionInfo":
+ groups = _re_version.match(v).groups() # type: ignore
+ major, minor, micro = map(int, groups[:3])
+ level = (groups[3] or "")[:1]
+ if level == "a":
+ level = "alpha"
+ elif level == "b":
+ level = "beta"
+ elif level in ("c", "r"):
+ level = "candidate"
+ else:
+ level = "final"
+ serial = groups[4]
+ serial = int(serial) if serial else 0
+ return cls(major, minor, micro, level, serial)
+
+ def __str__(self) -> str:
+ v = f"{self.major}.{self.minor}.{self.micro}"
+ level = self.releaselevel
+ if level and level != "final":
+ level = level[:1]
+ if level == "c":
+ level = "rc"
+ v = f"{v}{level}{self.serial}"
+ return v
+
+
+version_info = VersionInfo.from_str(version)
+
+version_info_js = VersionInfo.from_str(version_js)
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..537c3ef
--- /dev/null
+++ b/tests/__init__.py
@@ -0,0 +1 @@
+"""Tests for graphql_relay"""
diff --git a/tests/connection/__init__.py b/tests/connection/__init__.py
new file mode 100644
index 0000000..2bd2ffa
--- /dev/null
+++ b/tests/connection/__init__.py
@@ -0,0 +1 @@
+"""Tests for graphql_relay.connection"""
diff --git a/tests/connection/test_array_connection.py b/tests/connection/test_array_connection.py
new file mode 100644
index 0000000..33c89ee
--- /dev/null
+++ b/tests/connection/test_array_connection.py
@@ -0,0 +1,771 @@
+from typing import cast, Sequence
+
+from pytest import deprecated_call, raises
+
+from graphql_relay import (
+ connection_from_array,
+ connection_from_array_slice,
+ cursor_for_object_in_connection,
+ offset_to_cursor,
+ Connection,
+ Edge,
+ PageInfo,
+)
+
+array_abcde = ["A", "B", "C", "D", "E"]
+
+cursor_a = "YXJyYXljb25uZWN0aW9uOjA="
+cursor_b = "YXJyYXljb25uZWN0aW9uOjE="
+cursor_c = "YXJyYXljb25uZWN0aW9uOjI="
+cursor_d = "YXJyYXljb25uZWN0aW9uOjM="
+cursor_e = "YXJyYXljb25uZWN0aW9uOjQ="
+
+edge_a = Edge(node="A", cursor=cursor_a)
+edge_b = Edge(node="B", cursor=cursor_b)
+edge_c = Edge(node="C", cursor=cursor_c)
+edge_d = Edge(node="D", cursor=cursor_d)
+edge_e = Edge(node="E", cursor=cursor_e)
+
+
+def describe_connection_from_array():
+ def warns_for_deprecated_import():
+ from importlib import reload
+
+ with deprecated_call():
+ from graphql_relay.connection import arrayconnection as deprecated
+
+ # noinspection PyDeprecation
+ reload(deprecated)
+ # noinspection PyDeprecation
+ assert deprecated.connection_from_array is connection_from_array
+
+ def describe_basic_slicing():
+ def returns_all_elements_without_filters():
+ c = connection_from_array(array_abcde, {})
+ assert c == Connection(
+ edges=[edge_a, edge_b, edge_c, edge_d, edge_e],
+ pageInfo=PageInfo(
+ startCursor=cursor_a,
+ endCursor=cursor_e,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ ),
+ )
+
+ def respects_a_smaller_first():
+ c = connection_from_array(array_abcde, dict(first=2))
+ assert c == Connection(
+ edges=[
+ edge_a,
+ edge_b,
+ ],
+ pageInfo=PageInfo(
+ startCursor=cursor_a,
+ endCursor=cursor_b,
+ hasPreviousPage=False,
+ hasNextPage=True,
+ ),
+ )
+
+ def respects_an_overly_large_first():
+ c = connection_from_array(array_abcde, dict(first=10))
+ assert c == Connection(
+ edges=[edge_a, edge_b, edge_c, edge_d, edge_e],
+ pageInfo=PageInfo(
+ startCursor=cursor_a,
+ endCursor=cursor_e,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ ),
+ )
+
+ def respects_a_smaller_last():
+ c = connection_from_array(array_abcde, dict(last=2))
+ assert c == Connection(
+ edges=[edge_d, edge_e],
+ pageInfo=PageInfo(
+ startCursor=cursor_d,
+ endCursor=cursor_e,
+ hasPreviousPage=True,
+ hasNextPage=False,
+ ),
+ )
+
+ def respects_an_overly_large_last():
+ c = connection_from_array(array_abcde, dict(last=10))
+ assert c == Connection(
+ edges=[edge_a, edge_b, edge_c, edge_d, edge_e],
+ pageInfo=PageInfo(
+ startCursor=cursor_a,
+ endCursor=cursor_e,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ ),
+ )
+
+ def describe_pagination():
+ def respects_first_and_after():
+ c = connection_from_array(array_abcde, dict(first=2, after=cursor_b))
+ assert c == Connection(
+ edges=[edge_c, edge_d],
+ pageInfo=PageInfo(
+ startCursor=cursor_c,
+ endCursor=cursor_d,
+ hasPreviousPage=False,
+ hasNextPage=True,
+ ),
+ )
+
+ def respects_first_and_after_with_long_first():
+ c = connection_from_array(array_abcde, dict(first=10, after=cursor_b))
+ assert c == Connection(
+ edges=[edge_c, edge_d, edge_e],
+ pageInfo=PageInfo(
+ startCursor=cursor_c,
+ endCursor=cursor_e,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ ),
+ )
+
+ def respects_last_and_before():
+ c = connection_from_array(array_abcde, dict(last=2, before=cursor_d))
+ assert c == Connection(
+ edges=[edge_b, edge_c],
+ pageInfo=PageInfo(
+ startCursor=cursor_b,
+ endCursor=cursor_c,
+ hasPreviousPage=True,
+ hasNextPage=False,
+ ),
+ )
+
+ def respects_last_and_before_with_long_last():
+ c = connection_from_array(array_abcde, dict(last=10, before=cursor_d))
+ assert c == Connection(
+ edges=[edge_a, edge_b, edge_c],
+ pageInfo=PageInfo(
+ startCursor=cursor_a,
+ endCursor=cursor_c,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ ),
+ )
+
+ def respects_first_and_after_and_before_too_few():
+ c = connection_from_array(
+ array_abcde,
+ dict(first=2, after=cursor_a, before=cursor_e),
+ )
+ assert c == Connection(
+ edges=[edge_b, edge_c],
+ pageInfo=PageInfo(
+ startCursor=cursor_b,
+ endCursor=cursor_c,
+ hasPreviousPage=False,
+ hasNextPage=True,
+ ),
+ )
+
+ def respects_first_and_after_and_before_too_many():
+ c = connection_from_array(
+ array_abcde,
+ dict(first=4, after=cursor_a, before=cursor_e),
+ )
+ assert c == Connection(
+ edges=[edge_b, edge_c, edge_d],
+ pageInfo=PageInfo(
+ startCursor=cursor_b,
+ endCursor=cursor_d,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ ),
+ )
+
+ def respects_first_and_after_and_before_exactly_right():
+ c = connection_from_array(
+ array_abcde,
+ dict(first=3, after=cursor_a, before=cursor_e),
+ )
+ assert c == Connection(
+ edges=[edge_b, edge_c, edge_d],
+ pageInfo=PageInfo(
+ startCursor=cursor_b,
+ endCursor=cursor_d,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ ),
+ )
+
+ def respects_last_and_after_and_before_too_few():
+ c = connection_from_array(
+ array_abcde,
+ dict(last=2, after=cursor_a, before=cursor_e),
+ )
+ assert c == Connection(
+ edges=[edge_c, edge_d],
+ pageInfo=PageInfo(
+ startCursor=cursor_c,
+ endCursor=cursor_d,
+ hasPreviousPage=True,
+ hasNextPage=False,
+ ),
+ )
+
+ def respects_last_and_after_and_before_too_many():
+ c = connection_from_array(
+ array_abcde,
+ dict(last=4, after=cursor_a, before=cursor_e),
+ )
+ assert c == Connection(
+ edges=[edge_b, edge_c, edge_d],
+ pageInfo=PageInfo(
+ startCursor=cursor_b,
+ endCursor=cursor_d,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ ),
+ )
+
+ def respects_last_and_after_and_before_exactly_right():
+ c = connection_from_array(
+ array_abcde,
+ dict(last=3, after=cursor_a, before=cursor_e),
+ )
+ assert c == Connection(
+ edges=[edge_b, edge_c, edge_d],
+ pageInfo=PageInfo(
+ startCursor=cursor_b,
+ endCursor=cursor_d,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ ),
+ )
+
+ def describe_cursor_edge_cases():
+ def throws_an_error_if_first_smaller_than_zero():
+ with raises(ValueError) as exc_info:
+ connection_from_array(array_abcde, dict(first=-1))
+ assert str(exc_info.value) == (
+ "Argument 'first' must be a non-negative integer."
+ )
+
+ def throws_an_error_if_last_smaller_than_zero():
+ with raises(ValueError) as exc_info:
+ connection_from_array(array_abcde, dict(last=-1))
+ assert str(exc_info.value) == (
+ "Argument 'last' must be a non-negative integer."
+ )
+
+ def returns_all_elements_if_cursors_are_invalid():
+ c1 = connection_from_array(
+ array_abcde, dict(before="InvalidBase64", after="InvalidBase64")
+ )
+
+ invalid_unicode_in_base64 = "9JCAgA==" # U+110000
+ c2 = connection_from_array(
+ array_abcde,
+ dict(before=invalid_unicode_in_base64, after=invalid_unicode_in_base64),
+ )
+
+ assert c1 == c2
+ assert c1 == Connection(
+ edges=[edge_a, edge_b, edge_c, edge_d, edge_e],
+ pageInfo=PageInfo(
+ startCursor=cursor_a,
+ endCursor=cursor_e,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ ),
+ )
+
+ def returns_all_elements_if_cursors_are_on_the_outside():
+ all_edges = Connection(
+ edges=[edge_a, edge_b, edge_c, edge_d, edge_e],
+ pageInfo=PageInfo(
+ startCursor=cursor_a,
+ endCursor=cursor_e,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ ),
+ )
+
+ assert (
+ connection_from_array(array_abcde, dict(before=offset_to_cursor(6)))
+ == all_edges
+ )
+ assert (
+ connection_from_array(array_abcde, dict(before=offset_to_cursor(-1)))
+ == all_edges
+ )
+ assert (
+ connection_from_array(array_abcde, dict(after=offset_to_cursor(6)))
+ == all_edges
+ )
+ assert (
+ connection_from_array(array_abcde, dict(after=offset_to_cursor(-1)))
+ == all_edges
+ )
+
+ def returns_no_elements_if_cursors_cross():
+ c = connection_from_array(
+ array_abcde,
+ dict(before=cursor_c, after=cursor_e),
+ )
+ assert c == Connection(
+ edges=[],
+ pageInfo=PageInfo(
+ startCursor=None,
+ endCursor=None,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ ),
+ )
+
+ def describe_cursor_for_object_in_connection():
+ def returns_an_edges_cursor_given_an_array_and_a_member_object():
+ letter_b_cursor = cursor_for_object_in_connection(array_abcde, "B")
+ assert letter_b_cursor == cursor_b
+
+ def returns_null_given_an_array_and_a_non_member_object():
+ letter_f_cursor = cursor_for_object_in_connection(array_abcde, "F")
+ assert letter_f_cursor is None
+
+ def describe_extended_functionality():
+ """Test functionality that is not part of graphql-relay-js."""
+
+ def returns_an_edges_cursor_given_an_array_without_index_method():
+ class LettersWithoutIndex:
+ __getitem__ = array_abcde.__getitem__
+
+ letters_without_index = cast(Sequence, LettersWithoutIndex())
+
+ with raises(AttributeError):
+ letters_without_index.index("B")
+
+ letter_b_cursor = cursor_for_object_in_connection(
+ letters_without_index, "B"
+ )
+ assert letter_b_cursor == cursor_b
+
+ no_letter_cursor = cursor_for_object_in_connection(
+ letters_without_index, "="
+ )
+ assert no_letter_cursor is None
+
+ def describe_extended_functionality():
+ """Test functionality that is not part of graphql-relay-js."""
+
+ def does_not_require_args():
+ c = connection_from_array(array_abcde)
+ assert c == Connection(
+ edges=[edge_a, edge_b, edge_c, edge_d, edge_e],
+ pageInfo=PageInfo(
+ startCursor=cursor_a,
+ endCursor=cursor_e,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ ),
+ )
+
+ def uses_default_connection_types():
+ connection = connection_from_array(array_abcde[:1])
+ assert isinstance(connection, Connection)
+ edge = connection.edges[0]
+ assert isinstance(edge, Edge)
+ assert len(connection.edges) == 1
+ assert edge == edge_a
+ page_info = connection.pageInfo
+ assert isinstance(page_info, PageInfo)
+ assert page_info == PageInfo(
+ startCursor=cursor_a,
+ endCursor=cursor_a,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ )
+
+ def accepts_custom_connection_type():
+ class CustomConnection:
+ # noinspection PyPep8Naming
+ def __init__(self, edges, pageInfo):
+ self.edges = edges
+ self.page_info = pageInfo
+
+ connection = connection_from_array(
+ array_abcde[:1], connection_type=CustomConnection
+ )
+ assert isinstance(connection, CustomConnection)
+ edge = connection.edges[0]
+ assert isinstance(edge, Edge)
+ assert len(connection.edges) == 1
+ assert edge == edge_a
+ page_info = connection.page_info
+ assert isinstance(page_info, PageInfo)
+ assert page_info == PageInfo(
+ startCursor=cursor_a,
+ endCursor=cursor_a,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ )
+
+ def accepts_custom_edge_type():
+ class CustomEdge:
+ def __init__(self, node, cursor):
+ self.node = node
+ self.cursor = cursor
+
+ connection = connection_from_array(array_abcde[:1], edge_type=CustomEdge)
+ assert isinstance(connection, Connection)
+ assert isinstance(connection.edges, list)
+ assert len(connection.edges) == 1
+ edge = connection.edges[0]
+ assert isinstance(edge, CustomEdge)
+ assert edge.node == "A"
+ assert edge.cursor == cursor_a
+ page_info = connection.pageInfo
+ assert isinstance(page_info, PageInfo)
+ assert page_info == PageInfo(
+ startCursor=cursor_a,
+ endCursor=cursor_a,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ )
+
+ def accepts_custom_page_info_type():
+ class CustomPageInfo:
+ # noinspection PyPep8Naming
+ def __init__(
+ self, startCursor, endCursor, hasPreviousPage, hasNextPage
+ ):
+ self.startCursor = startCursor
+ self.endCursor = endCursor
+ self.hasPreviousPage = hasPreviousPage
+ self.hasNextPage = hasNextPage
+
+ connection = connection_from_array(
+ array_abcde[:1], page_info_type=CustomPageInfo
+ )
+ assert isinstance(connection, Connection)
+ assert isinstance(connection.edges, list)
+ assert len(connection.edges) == 1
+ edge = connection.edges[0]
+ assert isinstance(edge, Edge)
+ assert edge == edge_a
+ page_info = connection.pageInfo
+ assert isinstance(page_info, CustomPageInfo)
+ assert page_info.startCursor == cursor_a
+ assert page_info.endCursor == cursor_a
+ assert page_info.hasPreviousPage is False
+ assert page_info.hasNextPage is False
+
+
+def describe_connection_from_array_slice():
+ def warns_for_deprecated_import():
+ from importlib import reload
+
+ with deprecated_call():
+ from graphql_relay.connection import arrayconnection as deprecated
+
+ # noinspection PyDeprecation
+ reload(deprecated)
+ # noinspection PyDeprecation
+ assert deprecated.connection_from_array_slice is connection_from_array_slice
+
+ def works_with_a_just_right_array_slice():
+ c = connection_from_array_slice(
+ array_abcde[1:3],
+ dict(first=2, after=cursor_a),
+ slice_start=1,
+ array_length=5,
+ )
+ assert c == Connection(
+ edges=[edge_b, edge_c],
+ pageInfo=PageInfo(
+ startCursor=cursor_b,
+ endCursor=cursor_c,
+ hasPreviousPage=False,
+ hasNextPage=True,
+ ),
+ )
+
+ def works_with_an_oversized_array_slice_left_side():
+ c = connection_from_array_slice(
+ array_abcde[0:3],
+ dict(first=2, after=cursor_a),
+ slice_start=0,
+ array_length=5,
+ )
+ assert c == Connection(
+ edges=[edge_b, edge_c],
+ pageInfo=PageInfo(
+ startCursor=cursor_b,
+ endCursor=cursor_c,
+ hasPreviousPage=False,
+ hasNextPage=True,
+ ),
+ )
+
+ def works_with_an_oversized_array_slice_right_side():
+ c = connection_from_array_slice(
+ array_abcde[2:4],
+ dict(first=1, after=cursor_b),
+ slice_start=2,
+ array_length=5,
+ )
+ assert c == Connection(
+ edges=[edge_c],
+ pageInfo=PageInfo(
+ startCursor=cursor_c,
+ endCursor=cursor_c,
+ hasPreviousPage=False,
+ hasNextPage=True,
+ ),
+ )
+
+ def works_with_an_oversized_array_slice_both_sides():
+ c = connection_from_array_slice(
+ array_abcde[1:4],
+ dict(first=1, after=cursor_b),
+ slice_start=1,
+ array_length=5,
+ )
+ assert c == Connection(
+ edges=[edge_c],
+ pageInfo=PageInfo(
+ startCursor=cursor_c,
+ endCursor=cursor_c,
+ hasPreviousPage=False,
+ hasNextPage=True,
+ ),
+ )
+
+ def works_with_an_undersized_array_slice_left_side():
+ c = connection_from_array_slice(
+ array_abcde[3:5],
+ dict(first=3, after=cursor_b),
+ slice_start=3,
+ array_length=5,
+ )
+ assert c == Connection(
+ edges=[edge_d, edge_e],
+ pageInfo=PageInfo(
+ startCursor=cursor_d,
+ endCursor=cursor_e,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ ),
+ )
+
+ def works_with_an_undersized_array_slice_right_side():
+ c = connection_from_array_slice(
+ array_abcde[2:4],
+ dict(first=3, after=cursor_b),
+ slice_start=2,
+ array_length=5,
+ )
+ assert c == Connection(
+ edges=[edge_c, edge_d],
+ pageInfo=PageInfo(
+ startCursor=cursor_c,
+ endCursor=cursor_d,
+ hasPreviousPage=False,
+ hasNextPage=True,
+ ),
+ )
+
+ def works_with_an_undersized_array_slice_both_sides():
+ c = connection_from_array_slice(
+ array_abcde[3:4],
+ dict(first=3, after=cursor_b),
+ slice_start=3,
+ array_length=5,
+ )
+ assert c == Connection(
+ edges=[edge_d],
+ pageInfo=PageInfo(
+ startCursor=cursor_d,
+ endCursor=cursor_d,
+ hasPreviousPage=False,
+ hasNextPage=True,
+ ),
+ )
+
+ def describe_extended_functionality():
+ """Test functionality that is not part of graphql-relay-js."""
+
+ def does_not_require_args():
+ c = connection_from_array_slice(array_abcde, slice_start=0, array_length=5)
+ assert c == Connection(
+ edges=[edge_a, edge_b, edge_c, edge_d, edge_e],
+ pageInfo=PageInfo(
+ startCursor=cursor_a,
+ endCursor=cursor_e,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ ),
+ )
+
+ def uses_zero_as_default_for_slice_start():
+ c = connection_from_array_slice(
+ array_abcde[:1], dict(first=1), array_length=5
+ )
+ assert c == Connection(
+ edges=[edge_a],
+ pageInfo=PageInfo(
+ startCursor=cursor_a,
+ endCursor=cursor_a,
+ hasPreviousPage=False,
+ hasNextPage=True,
+ ),
+ )
+
+ def uses_slice_end_as_default_for_array_length():
+ c = connection_from_array_slice(
+ array_abcde[:1], dict(first=1), slice_start=0
+ )
+ assert c == Connection(
+ edges=[edge_a],
+ pageInfo=PageInfo(
+ startCursor=cursor_a,
+ endCursor=cursor_a,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ ),
+ )
+
+ def ignores_len_of_slice_if_array_slice_length_provided():
+ c = connection_from_array_slice(
+ array_abcde[:2], dict(first=2), array_length=2, array_slice_length=1
+ )
+ assert c == Connection(
+ edges=[edge_a],
+ pageInfo=PageInfo(
+ startCursor=cursor_a,
+ endCursor=cursor_a,
+ hasPreviousPage=False,
+ hasNextPage=True,
+ ),
+ )
+
+ def uses_array_slice_length_instead_of_len_function():
+ class LettersWithoutLen:
+ __getitem__ = array_abcde.__getitem__
+
+ letters_without_len = cast(Sequence, LettersWithoutLen())
+
+ with raises(TypeError):
+ len(letters_without_len)
+
+ with raises(TypeError):
+ connection_from_array_slice(letters_without_len)
+
+ c = connection_from_array_slice(letters_without_len, array_slice_length=5)
+ assert c == Connection(
+ edges=[edge_a, edge_b, edge_c, edge_d, edge_e],
+ pageInfo=PageInfo(
+ startCursor=cursor_a,
+ endCursor=cursor_e,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ ),
+ )
+
+ def uses_default_connection_types():
+ connection = connection_from_array_slice(
+ array_abcde[:1], slice_start=0, array_length=1
+ )
+ assert isinstance(connection, Connection)
+ edge = connection.edges[0]
+ assert isinstance(edge, Edge)
+ assert len(connection.edges) == 1
+ assert edge == edge_a
+ page_info = connection.pageInfo
+ assert isinstance(page_info, PageInfo)
+ assert page_info == PageInfo(
+ startCursor=cursor_a,
+ endCursor=cursor_a,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ )
+
+ def accepts_custom_connection_type():
+ class CustomConnection:
+ # noinspection PyPep8Naming
+ def __init__(self, edges, pageInfo):
+ self.edges = edges
+ self.page_info = pageInfo
+
+ connection = connection_from_array_slice(
+ array_abcde[:1],
+ slice_start=0,
+ array_length=1,
+ connection_type=CustomConnection,
+ )
+ assert isinstance(connection, CustomConnection)
+ edge = connection.edges[0]
+ assert isinstance(edge, Edge)
+ assert len(connection.edges) == 1
+ assert edge == edge_a
+ page_info = connection.page_info
+ assert isinstance(page_info, PageInfo)
+ assert page_info == PageInfo(
+ startCursor=cursor_a,
+ endCursor=cursor_a,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ )
+
+ def accepts_custom_edge_type():
+ class CustomEdge:
+ def __init__(self, node, cursor):
+ self.node = node
+ self.cursor = cursor
+
+ connection = connection_from_array_slice(
+ array_abcde[:1], slice_start=0, array_length=1, edge_type=CustomEdge
+ )
+ assert isinstance(connection, Connection)
+ assert isinstance(connection.edges, list)
+ assert len(connection.edges) == 1
+ edge = connection.edges[0]
+ assert isinstance(edge, CustomEdge)
+ assert edge.node == "A"
+ assert edge.cursor == cursor_a
+ page_info = connection.pageInfo
+ assert isinstance(page_info, PageInfo)
+ assert page_info == PageInfo(
+ startCursor=cursor_a,
+ endCursor=cursor_a,
+ hasPreviousPage=False,
+ hasNextPage=False,
+ )
+
+ def accepts_custom_page_info_type():
+ class CustomPageInfo:
+ # noinspection PyPep8Naming
+ def __init__(
+ self, startCursor, endCursor, hasPreviousPage, hasNextPage
+ ):
+ self.startCursor = startCursor
+ self.endCursor = endCursor
+ self.hasPreviousPage = hasPreviousPage
+ self.hasNextPage = hasNextPage
+
+ connection = connection_from_array_slice(
+ array_abcde[:1],
+ slice_start=0,
+ array_length=1,
+ page_info_type=CustomPageInfo,
+ )
+ assert isinstance(connection, Connection)
+ assert isinstance(connection.edges, list)
+ assert len(connection.edges) == 1
+ edge = connection.edges[0]
+ assert isinstance(edge, Edge)
+ assert edge == edge_a
+ page_info = connection.pageInfo
+ assert isinstance(page_info, CustomPageInfo)
+ assert page_info.startCursor == cursor_a
+ assert page_info.endCursor == cursor_a
+ assert page_info.hasPreviousPage is False
+ assert page_info.hasNextPage is False
diff --git a/tests/connection/test_connection.py b/tests/connection/test_connection.py
new file mode 100644
index 0000000..025cb2d
--- /dev/null
+++ b/tests/connection/test_connection.py
@@ -0,0 +1,274 @@
+from typing import List, NamedTuple
+
+from graphql import (
+ graphql_sync,
+ print_schema,
+ GraphQLField,
+ GraphQLInt,
+ GraphQLNonNull,
+ GraphQLObjectType,
+ GraphQLSchema,
+ GraphQLString,
+)
+
+from graphql_relay import (
+ backward_connection_args,
+ connection_args,
+ connection_definitions,
+ connection_from_array,
+ forward_connection_args,
+)
+
+from ..utils import dedent
+
+
+class User(NamedTuple):
+ name: str
+ friends: List[int]
+
+
+all_users = [
+ User(name="Dan", friends=[1, 2, 3, 4]),
+ User(name="Nick", friends=[0, 2, 3, 4]),
+ User(name="Lee", friends=[0, 1, 3, 4]),
+ User(name="Joe", friends=[0, 1, 2, 4]),
+ User(name="Tim", friends=[0, 1, 2, 3]),
+]
+
+friend_connection: GraphQLObjectType
+user_connection: GraphQLObjectType
+
+user_type = GraphQLObjectType(
+ "User",
+ fields=lambda: {
+ "name": GraphQLField(GraphQLString),
+ "friends": GraphQLField(
+ friend_connection,
+ args=connection_args,
+ resolve=lambda user, _info, **args: connection_from_array(
+ user.friends, args
+ ),
+ ),
+ "friendsForward": GraphQLField(
+ user_connection,
+ args=forward_connection_args,
+ resolve=lambda user, _info, **args: connection_from_array(
+ user.friends, args
+ ),
+ ),
+ "friendsBackward": GraphQLField(
+ user_connection,
+ args=backward_connection_args,
+ resolve=lambda user, _info, **args: connection_from_array(
+ user.friends, args
+ ),
+ ),
+ },
+)
+
+friend_connection = connection_definitions(
+ GraphQLNonNull(user_type),
+ name="Friend",
+ resolve_node=lambda edge, _info: all_users[edge.node],
+ edge_fields=lambda: {
+ "friendshipTime": GraphQLField(
+ GraphQLString, resolve=lambda user_, info_: "Yesterday"
+ )
+ },
+ connection_fields=lambda: {
+ "totalCount": GraphQLField(
+ GraphQLInt, resolve=lambda _user, _info: len(all_users) - 1
+ )
+ },
+).connection_type
+
+
+user_connection = connection_definitions(
+ GraphQLNonNull(user_type), resolve_node=lambda edge, _info: all_users[edge.node]
+).connection_type
+
+
+query_type = GraphQLObjectType(
+ "Query",
+ fields=lambda: {
+ "user": GraphQLField(user_type, resolve=lambda _root, _info: all_users[0])
+ },
+)
+
+schema = GraphQLSchema(query=query_type)
+
+
+def describe_connection_definition():
+ def includes_connection_and_edge_fields():
+ source = """
+ {
+ user {
+ friends(first: 2) {
+ totalCount
+ edges {
+ friendshipTime
+ node {
+ name
+ }
+ }
+ }
+ }
+ }
+ """
+ assert graphql_sync(schema, source) == (
+ {
+ "user": {
+ "friends": {
+ "totalCount": 4,
+ "edges": [
+ {"friendshipTime": "Yesterday", "node": {"name": "Nick"}},
+ {"friendshipTime": "Yesterday", "node": {"name": "Lee"}},
+ ],
+ }
+ }
+ },
+ None,
+ )
+
+ def works_with_forward_connection_args():
+ source = """
+ {
+ user {
+ friendsForward(first: 2) {
+ edges {
+ node {
+ name
+ }
+ }
+ }
+ }
+ }
+ """
+ assert graphql_sync(schema, source) == (
+ {
+ "user": {
+ "friendsForward": {
+ "edges": [{"node": {"name": "Nick"}}, {"node": {"name": "Lee"}}]
+ }
+ }
+ },
+ None,
+ )
+
+ def works_with_backward_connection_args():
+ source = """
+ {
+ user {
+ friendsBackward(last: 2) {
+ edges {
+ node {
+ name
+ }
+ }
+ }
+ }
+ }
+ """
+ assert graphql_sync(schema, source) == (
+ {
+ "user": {
+ "friendsBackward": {
+ "edges": [{"node": {"name": "Joe"}}, {"node": {"name": "Tim"}}]
+ }
+ }
+ },
+ None,
+ )
+
+ def generates_correct_types():
+ assert print_schema(schema) == dedent(
+ '''
+ type Query {
+ user: User
+ }
+
+ type User {
+ name: String
+ friends(
+ """Returns the items in the list that come after the specified cursor."""
+ after: String
+
+ """Returns the first n items from the list."""
+ first: Int
+
+ """Returns the items in the list that come before the specified cursor."""
+ before: String
+
+ """Returns the last n items from the list."""
+ last: Int
+ ): FriendConnection
+ friendsForward(
+ """Returns the items in the list that come after the specified cursor."""
+ after: String
+
+ """Returns the first n items from the list."""
+ first: Int
+ ): UserConnection
+ friendsBackward(
+ """Returns the items in the list that come before the specified cursor."""
+ before: String
+
+ """Returns the last n items from the list."""
+ last: Int
+ ): UserConnection
+ }
+
+ """A connection to a list of items."""
+ type FriendConnection {
+ """Information to aid in pagination."""
+ pageInfo: PageInfo!
+
+ """A list of edges."""
+ edges: [FriendEdge]
+ totalCount: Int
+ }
+
+ """Information about pagination in a connection."""
+ type PageInfo {
+ """When paginating forwards, are there more items?"""
+ hasNextPage: Boolean!
+
+ """When paginating backwards, are there more items?"""
+ hasPreviousPage: Boolean!
+
+ """When paginating backwards, the cursor to continue."""
+ startCursor: String
+
+ """When paginating forwards, the cursor to continue."""
+ endCursor: String
+ }
+
+ """An edge in a connection."""
+ type FriendEdge {
+ """The item at the end of the edge"""
+ node: User!
+
+ """A cursor for use in pagination"""
+ cursor: String!
+ friendshipTime: String
+ }
+
+ """A connection to a list of items."""
+ type UserConnection {
+ """Information to aid in pagination."""
+ pageInfo: PageInfo!
+
+ """A list of edges."""
+ edges: [UserEdge]
+ }
+
+ """An edge in a connection."""
+ type UserEdge {
+ """The item at the end of the edge"""
+ node: User!
+
+ """A cursor for use in pagination"""
+ cursor: String!
+ }
+ ''' # noqa: E501
+ )
diff --git a/tests/mutation/__init__.py b/tests/mutation/__init__.py
new file mode 100644
index 0000000..61f885c
--- /dev/null
+++ b/tests/mutation/__init__.py
@@ -0,0 +1 @@
+"""Tests for graphql_relay.mutation"""
diff --git a/tests/mutation/test_mutation.py b/tests/mutation/test_mutation.py
new file mode 100644
index 0000000..41792cb
--- /dev/null
+++ b/tests/mutation/test_mutation.py
@@ -0,0 +1,321 @@
+from pytest import mark
+
+from graphql import (
+ graphql,
+ graphql_sync,
+ print_schema,
+ print_type,
+ GraphQLField,
+ GraphQLFieldMap,
+ GraphQLInputField,
+ GraphQLInt,
+ GraphQLObjectType,
+ GraphQLSchema,
+)
+
+from graphql_relay import mutation_with_client_mutation_id
+
+from ..utils import dedent
+
+
+class Result:
+
+ # noinspection PyPep8Naming
+ def __init__(self, result, clientMutationId=None):
+ self.clientMutationId = clientMutationId
+ self.result = result
+
+
+def dummy_resolve(_info, inputData=None, clientMutationId=None):
+ return Result(inputData or 1, clientMutationId)
+
+
+async def dummy_resolve_async(_info, inputData=None, clientMutationId=None):
+ return Result(inputData or 1, clientMutationId)
+
+
+def wrap_in_schema(mutation_fields: GraphQLFieldMap) -> GraphQLSchema:
+ query_type = GraphQLObjectType("Query", fields={"dummy": GraphQLField(GraphQLInt)})
+ mutation_type = GraphQLObjectType("Mutation", fields=mutation_fields)
+ return GraphQLSchema(query_type, mutation_type)
+
+
+def describe_mutation_with_client_mutation_id():
+ def requires_an_argument():
+ some_mutation = mutation_with_client_mutation_id(
+ "SomeMutation", {}, {"result": GraphQLField(GraphQLInt)}, dummy_resolve
+ )
+ schema = wrap_in_schema({"someMutation": some_mutation})
+ source = """
+ mutation {
+ someMutation {
+ result
+ }
+ }
+ """
+ assert graphql_sync(schema, source) == (
+ None,
+ [
+ {
+ "message": "Field 'someMutation' argument 'input'"
+ " of type 'SomeMutationInput!' is required,"
+ " but it was not provided.",
+ "locations": [(3, 15)],
+ }
+ ],
+ )
+
+ def returns_the_same_client_mutation_id():
+ some_mutation = mutation_with_client_mutation_id(
+ "SomeMutation", {}, {"result": GraphQLField(GraphQLInt)}, dummy_resolve
+ )
+ schema = wrap_in_schema({"someMutation": some_mutation})
+ source = """
+ mutation {
+ someMutation(input: {clientMutationId: "abc"}) {
+ result
+ clientMutationId
+ }
+ }
+ """
+ assert graphql_sync(schema, source) == (
+ {"someMutation": {"result": 1, "clientMutationId": "abc"}},
+ None,
+ )
+
+ def supports_thunks_as_input_and_output_fields():
+ some_mutation = mutation_with_client_mutation_id(
+ "SomeMutation",
+ {"inputData": GraphQLInputField(GraphQLInt)},
+ {"result": GraphQLField(GraphQLInt)},
+ dummy_resolve,
+ )
+ schema = wrap_in_schema({"someMutation": some_mutation})
+ source = """
+ mutation {
+ someMutation(input: {inputData: 1234, clientMutationId: "abc"}) {
+ result
+ clientMutationId
+ }
+ }
+ """
+ assert graphql_sync(schema, source) == (
+ {
+ "someMutation": {
+ "result": 1234,
+ "clientMutationId": "abc",
+ }
+ },
+ None,
+ )
+
+ @mark.asyncio
+ async def supports_async_mutations():
+ some_mutation = mutation_with_client_mutation_id(
+ "SomeMutation",
+ {},
+ {"result": GraphQLField(GraphQLInt)},
+ dummy_resolve_async,
+ )
+ schema = wrap_in_schema({"someMutation": some_mutation})
+ source = """
+ mutation {
+ someMutation(input: {clientMutationId: "abc"}) {
+ result
+ clientMutationId
+ }
+ }
+ """
+ assert await graphql(schema, source) == (
+ {"someMutation": {"result": 1, "clientMutationId": "abc"}},
+ None,
+ )
+
+ def can_access_root_value():
+ some_mutation = mutation_with_client_mutation_id( # pragma: no cover
+ "SomeMutation",
+ {},
+ {"result": GraphQLField(GraphQLInt)},
+ lambda info, clientMutationId=None: Result(
+ info.root_value, clientMutationId
+ ),
+ )
+
+ wrapper_type = GraphQLObjectType("WrapperType", {"someMutation": some_mutation})
+ assert print_type(wrapper_type) == dedent(
+ """
+ type WrapperType {
+ someMutation(input: SomeMutationInput!): SomeMutationPayload
+ }
+ """
+ )
+
+ def supports_mutations_returning_null():
+ def null_resolve(_info, **_input):
+ return None
+
+ some_mutation = mutation_with_client_mutation_id(
+ "SomeMutation", {}, {"result": GraphQLField(GraphQLInt)}, null_resolve
+ )
+ schema = wrap_in_schema({"someMutation": some_mutation})
+ source = """
+ mutation {
+ someMutation(input: {clientMutationId: "abc"}) {
+ result
+ clientMutationId
+ }
+ }
+ """
+ assert graphql_sync(schema, source) == (
+ {"someMutation": {"result": None, "clientMutationId": "abc"}},
+ None,
+ )
+
+ @mark.asyncio
+ async def supports_async_mutations_returning_null():
+ async def null_resolve(_info, **_input):
+ return None
+
+ some_mutation = mutation_with_client_mutation_id(
+ "SomeMutation",
+ {},
+ {"result": GraphQLField(GraphQLInt)},
+ null_resolve,
+ )
+ schema = wrap_in_schema({"someMutation": some_mutation})
+ source = """
+ mutation {
+ someMutation(input: {clientMutationId: "abc"}) {
+ result
+ clientMutationId
+ }
+ }
+ """
+ assert await graphql(schema, source) == (
+ {"someMutation": {"result": None, "clientMutationId": "abc"}},
+ None,
+ )
+
+ def supports_mutations_returning_custom_classes():
+ class SomeClass:
+ @staticmethod
+ def get_some_generated_data():
+ return 1
+
+ @classmethod
+ def mutate(cls, _info, **_input):
+ return cls()
+
+ @classmethod
+ def resolve(cls, obj, _info):
+ assert isinstance(obj, cls)
+ return obj.get_some_generated_data()
+
+ some_mutation = mutation_with_client_mutation_id(
+ "SomeMutation",
+ {},
+ {"result": GraphQLField(GraphQLInt, resolve=SomeClass.resolve)},
+ SomeClass.mutate,
+ )
+ schema = wrap_in_schema({"someMutation": some_mutation})
+ source = """
+ mutation {
+ someMutation(input: {clientMutationId: "abc"}) {
+ result
+ clientMutationId
+ }
+ }
+ """
+ assert graphql_sync(schema, source) == (
+ {"someMutation": {"result": 1, "clientMutationId": "abc"}},
+ None,
+ )
+
+ def supports_mutations_returning_mappings():
+ def dict_mutate(_info, **_input):
+ return {"some_data": 1}
+
+ def dict_resolve(obj, _info):
+ return obj["some_data"]
+
+ some_mutation = mutation_with_client_mutation_id(
+ "SomeMutation",
+ {},
+ {"result": GraphQLField(GraphQLInt, resolve=dict_resolve)},
+ dict_mutate,
+ )
+ schema = wrap_in_schema({"someMutation": some_mutation})
+ source = """
+ mutation {
+ someMutation(input: {clientMutationId: "abc"}) {
+ result
+ clientMutationId
+ }
+ }
+ """
+ assert graphql_sync(schema, source) == (
+ {"someMutation": {"result": 1, "clientMutationId": "abc"}},
+ None,
+ )
+
+ @mark.asyncio
+ async def supports_async_mutations_returning_mappings():
+ async def dict_mutate(_info, **_input):
+ return {"some_data": 1}
+
+ async def dict_resolve(obj, _info):
+ return obj["some_data"]
+
+ some_mutation = mutation_with_client_mutation_id(
+ "SomeMutation",
+ {},
+ {"result": GraphQLField(GraphQLInt, resolve=dict_resolve)},
+ dict_mutate,
+ )
+ schema = wrap_in_schema({"someMutation": some_mutation})
+ source = """
+ mutation {
+ someMutation(input: {clientMutationId: "abc"}) {
+ result
+ clientMutationId
+ }
+ }
+ """
+ assert await graphql(schema, source) == (
+ {"someMutation": {"result": 1, "clientMutationId": "abc"}},
+ None,
+ )
+
+ def generates_correct_types():
+ some_mutation = mutation_with_client_mutation_id(
+ "SomeMutation",
+ description="Some Mutation Description",
+ input_fields={},
+ output_fields={"result": GraphQLField(GraphQLInt)},
+ mutate_and_get_payload=dummy_resolve,
+ deprecation_reason="Just because",
+ )
+
+ schema = wrap_in_schema({"someMutation": some_mutation})
+
+ assert print_schema(schema) == dedent(
+ '''
+ type Query {
+ dummy: Int
+ }
+
+ type Mutation {
+ """Some Mutation Description"""
+ someMutation(input: SomeMutationInput!): SomeMutationPayload @deprecated(reason: "Just because")
+ }
+
+ type SomeMutationPayload {
+ result: Int
+ clientMutationId: String
+ }
+
+ input SomeMutationInput {
+ clientMutationId: String
+ }
+ ''' # noqa: E501
+ )
diff --git a/tests/node/__init__.py b/tests/node/__init__.py
new file mode 100644
index 0000000..73e9152
--- /dev/null
+++ b/tests/node/__init__.py
@@ -0,0 +1 @@
+"""Tests for graphql_relay.node"""
diff --git a/tests/node/test_global.py b/tests/node/test_global.py
new file mode 100644
index 0000000..bbf6b3a
--- /dev/null
+++ b/tests/node/test_global.py
@@ -0,0 +1,216 @@
+from typing import Any, NamedTuple, Optional
+
+from pytest import fixture
+
+from graphql import (
+ graphql_sync,
+ GraphQLField,
+ GraphQLList,
+ GraphQLInt,
+ GraphQLObjectType,
+ GraphQLResolveInfo,
+ GraphQLSchema,
+ GraphQLString,
+)
+
+from graphql_relay import from_global_id, global_id_field, node_definitions
+
+
+class User(NamedTuple):
+ id: str
+ name: str
+
+
+class Photo(NamedTuple):
+ photo_id: str
+ width: int
+
+
+class Post(NamedTuple):
+ id: str
+ text: str
+
+
+@fixture(scope="module", params=["object_access", "dict_access"])
+def schema(request):
+ """Run each test with object access and dict access."""
+ use_dicts = request.param == "dict_access"
+
+ user_cls = dict if use_dicts else User
+ user_data = [
+ user_cls(id="1", name="John Doe"),
+ user_cls(id="2", name="Jane Smith"),
+ ]
+
+ photo_cls = dict if use_dicts else Photo
+ photo_data = [
+ photo_cls(photo_id="1", width=300),
+ photo_cls(photo_id="2", width=400),
+ ]
+
+ post_cls = dict if use_dicts else Post
+ post_data = [post_cls(id="1", text="lorem"), post_cls(id="2", text="ipsum")]
+
+ if use_dicts:
+
+ def get_node(global_id: str, info: GraphQLResolveInfo) -> Any:
+ assert info.schema is schema
+ type_, id_ = from_global_id(global_id)
+ if type_ == "User":
+ return next(filter(lambda obj: obj["id"] == id_, user_data), None)
+ if type_ == "Photo":
+ return next(
+ filter(lambda obj: obj["photo_id"] == id_, photo_data), None
+ )
+ if type_ == "Post":
+ return next(filter(lambda obj: obj["id"] == id_, post_data), None)
+ return None # pragma: no cover
+
+ def get_node_type(
+ obj: Any, info: GraphQLResolveInfo, _type: Any
+ ) -> Optional[str]:
+ assert info.schema is schema
+ if "name" in obj:
+ return user_type.name
+ if "photo_id" in obj:
+ return photo_type.name
+ if "text" in obj:
+ return post_type.name
+ return None # pragma: no cover
+
+ else:
+
+ def get_node(global_id: str, info: GraphQLResolveInfo) -> Any:
+ assert info.schema is schema
+ type_, id_ = from_global_id(global_id)
+ if type_ == "User":
+ return next(filter(lambda obj: obj.id == id_, user_data), None)
+ if type_ == "Photo":
+ return next(filter(lambda obj: obj.photo_id == id_, photo_data), None)
+ if type_ == "Post":
+ return next(filter(lambda obj: obj.id == id_, post_data), None)
+ return None # pragma: no cover
+
+ def get_node_type(
+ obj: Any, info: GraphQLResolveInfo, _type: Any
+ ) -> Optional[str]:
+ assert info.schema is schema
+ if isinstance(obj, User):
+ return user_type.name
+ if isinstance(obj, Photo):
+ return photo_type.name
+ if isinstance(obj, Post):
+ return post_type.name
+ return None # pragma: no cover
+
+ node_interface, node_field = node_definitions(get_node, get_node_type)[:2]
+
+ user_type = GraphQLObjectType(
+ "User",
+ fields=lambda: {
+ "id": global_id_field("User"),
+ "name": GraphQLField(GraphQLString),
+ },
+ interfaces=[node_interface],
+ )
+
+ photo_type = GraphQLObjectType(
+ "Photo",
+ fields=lambda: {
+ "id": global_id_field(
+ "Photo",
+ lambda obj, _info: obj["photo_id"] if use_dicts else obj.photo_id,
+ ),
+ "width": GraphQLField(GraphQLInt),
+ },
+ interfaces=[node_interface],
+ )
+
+ post_type = GraphQLObjectType(
+ "Post",
+ fields=lambda: {"id": global_id_field(), "text": GraphQLField(GraphQLString)},
+ interfaces=[node_interface],
+ )
+
+ query_type = GraphQLObjectType(
+ "Query",
+ fields=lambda: {
+ "node": node_field,
+ "allObjects": GraphQLField(
+ GraphQLList(node_interface),
+ resolve=lambda _root, _info: [*user_data, *photo_data, *post_data],
+ ),
+ },
+ )
+
+ schema = GraphQLSchema(query=query_type, types=[user_type, photo_type, post_type])
+
+ yield schema
+
+
+def describe_global_id_fields():
+ def gives_different_ids(schema):
+ source = """
+ {
+ allObjects {
+ id
+ }
+ }
+ """
+ assert graphql_sync(schema, source) == (
+ {
+ "allObjects": [
+ {"id": "VXNlcjox"},
+ {"id": "VXNlcjoy"},
+ {"id": "UGhvdG86MQ=="},
+ {"id": "UGhvdG86Mg=="},
+ {"id": "UG9zdDox"},
+ {"id": "UG9zdDoy"},
+ ]
+ },
+ None,
+ )
+
+ def allows_to_refetch_the_ids(schema):
+ source = """
+ {
+ user: node(id: "VXNlcjox") {
+ id
+ ... on User {
+ name
+ }
+ },
+ photo: node(id: "UGhvdG86MQ==") {
+ id
+ ... on Photo {
+ width
+ }
+ }
+ post: node(id: "UG9zdDox") {
+ id
+ ... on Post {
+ text
+ }
+ }
+ }
+ """
+ assert graphql_sync(schema, source) == (
+ {
+ "user": {"id": "VXNlcjox", "name": "John Doe"},
+ "photo": {"id": "UGhvdG86MQ==", "width": 300},
+ "post": {"id": "UG9zdDox", "text": "lorem"},
+ },
+ None,
+ )
+
+ def handles_valid_global_ids():
+ assert from_global_id("Zm9v") == ("", "foo")
+ assert from_global_id(b"Zm9v") == ("", "foo") # type: ignore
+ assert from_global_id("Zm9vOmJhcg==") == ("foo", "bar")
+ assert from_global_id(b"Zm9vOmJhcg==") == ("foo", "bar") # type: ignore
+
+ def handles_invalid_global_ids():
+ assert from_global_id("") == ("", "")
+ assert from_global_id("Og==") == ("", "")
+ assert from_global_id("bad!") == ("", "")
+ assert from_global_id("invalid") == ("", "")
diff --git a/tests/node/test_node.py b/tests/node/test_node.py
new file mode 100644
index 0000000..bec195f
--- /dev/null
+++ b/tests/node/test_node.py
@@ -0,0 +1,228 @@
+from itertools import chain
+from typing import Any, NamedTuple, Optional, Union
+
+from graphql import (
+ graphql_sync,
+ print_schema,
+ GraphQLField,
+ GraphQLID,
+ GraphQLInt,
+ GraphQLNonNull,
+ GraphQLObjectType,
+ GraphQLResolveInfo,
+ GraphQLSchema,
+ GraphQLString,
+)
+
+from graphql_relay import node_definitions
+
+from ..utils import dedent
+
+
+class User(NamedTuple):
+ id: str
+ name: str
+
+
+class Photo(NamedTuple):
+ id: str
+ width: int
+
+
+user_data = [User(id="1", name="John Doe"), User(id="2", name="Jane Smith")]
+
+photo_data = [Photo(id="3", width=300), Photo(id="4", width=400)]
+
+
+def get_node(id_: str, info: GraphQLResolveInfo) -> Optional[Union[User, Photo]]:
+ assert info.schema is schema
+ return next(
+ filter(
+ lambda obj: obj.id == id_, # type: ignore
+ chain(user_data, photo_data),
+ ),
+ None,
+ )
+
+
+def get_node_type(
+ obj: Union[User, Photo], info: GraphQLResolveInfo, _type: Any
+) -> Optional[str]:
+ assert info.schema is schema
+ if obj in user_data:
+ return user_type.name
+ if obj in photo_data:
+ return photo_type.name
+ return None # pragma: no cover
+
+
+node_interface, node_field, nodes_field = node_definitions(get_node, get_node_type)
+
+
+user_type = GraphQLObjectType(
+ "User",
+ lambda: {
+ "id": GraphQLField(GraphQLNonNull(GraphQLID)),
+ "name": GraphQLField(GraphQLString),
+ },
+ interfaces=[node_interface],
+)
+
+photo_type = GraphQLObjectType(
+ "Photo",
+ lambda: {
+ "id": GraphQLField(GraphQLNonNull(GraphQLID)),
+ "width": GraphQLField(GraphQLInt),
+ },
+ interfaces=[node_interface],
+)
+
+query_type = GraphQLObjectType(
+ "Query", lambda: {"node": node_field, "nodes": nodes_field}
+)
+
+schema = GraphQLSchema(query=query_type, types=[node_interface, user_type, photo_type])
+
+
+def describe_node_interface_and_fields():
+ def describe_ability_to_refetch():
+ def gets_the_correct_id_for_users():
+ source = """
+ {
+ node(id: "1") {
+ id
+ }
+ }
+ """
+ assert graphql_sync(schema, source) == ({"node": {"id": "1"}}, None)
+
+ def gets_the_correct_name_for_users():
+ source = """
+ {
+ node(id: "1") {
+ id
+ ... on User {
+ name
+ }
+ }
+ }
+ """
+ assert graphql_sync(schema, source) == (
+ {"node": {"id": "1", "name": "John Doe"}},
+ None,
+ )
+
+ def gets_the_correct_width_for_photos():
+ source = """
+ {
+ node(id: "4") {
+ id
+ ... on Photo {
+ width
+ }
+ }
+ }
+ """
+ assert graphql_sync(schema, source) == (
+ {"node": {"id": "4", "width": 400}},
+ None,
+ )
+
+ def gets_the_correct_typename_for_users():
+ source = """
+ {
+ node(id: "1") {
+ id
+ __typename
+ }
+ }
+ """
+ assert graphql_sync(schema, source) == (
+ {"node": {"id": "1", "__typename": "User"}},
+ None,
+ )
+
+ def gets_the_correct_typename_for_photos():
+ source = """
+ {
+ node(id: "4") {
+ id
+ __typename
+ }
+ }
+ """
+ assert graphql_sync(schema, source) == (
+ {"node": {"id": "4", "__typename": "Photo"}},
+ None,
+ )
+
+ def ignores_photo_fragments_on_user():
+ source = """
+ {
+ node(id: "1") {
+ id
+ ... on Photo {
+ width
+ }
+ }
+ }
+ """
+ assert graphql_sync(schema, source) == ({"node": {"id": "1"}}, None)
+
+ def returns_null_for_bad_ids():
+ source = """
+ {
+ node(id: "5") {
+ id
+ }
+ }
+ """
+ assert graphql_sync(schema, source) == ({"node": None}, None)
+
+ def returns_nulls_for_bad_ids():
+ source = """
+ {
+ nodes(ids: ["3", "5"]) {
+ id
+ }
+ }
+ """
+ assert graphql_sync(schema, source) == (
+ {"nodes": [{"id": "3"}, None]},
+ None,
+ )
+
+ def generates_correct_types():
+ assert print_schema(schema) == dedent(
+ '''
+ """An object with an ID"""
+ interface Node {
+ """The id of the object."""
+ id: ID!
+ }
+
+ type User implements Node {
+ id: ID!
+ name: String
+ }
+
+ type Photo implements Node {
+ id: ID!
+ width: Int
+ }
+
+ type Query {
+ """Fetches an object given its ID"""
+ node(
+ """The ID of an object"""
+ id: ID!
+ ): Node
+
+ """Fetches objects given their IDs"""
+ nodes(
+ """The IDs of objects"""
+ ids: [ID!]!
+ ): [Node]!
+ }
+ '''
+ )
diff --git a/tests/node/test_node_async.py b/tests/node/test_node_async.py
new file mode 100644
index 0000000..5c5c571
--- /dev/null
+++ b/tests/node/test_node_async.py
@@ -0,0 +1,74 @@
+from typing import NamedTuple
+
+from pytest import mark
+
+from graphql import (
+ graphql,
+ GraphQLField,
+ GraphQLID,
+ GraphQLNonNull,
+ GraphQLObjectType,
+ GraphQLSchema,
+ GraphQLString,
+)
+
+from graphql_relay import node_definitions
+
+
+class User(NamedTuple):
+ id: str
+ name: str
+
+
+user_data = [User(id="1", name="John Doe"), User(id="2", name="Jane Smith")]
+
+user_type: GraphQLObjectType
+
+node_interface, node_field = node_definitions(
+ lambda id_, _info: next(filter(lambda obj: obj.id == id_, user_data), None),
+ lambda _obj, _info, _type: user_type.name,
+)[:2]
+
+
+user_type = GraphQLObjectType(
+ "User",
+ lambda: {
+ "id": GraphQLField(GraphQLNonNull(GraphQLID)),
+ "name": GraphQLField(GraphQLString),
+ },
+ interfaces=[node_interface],
+)
+
+query_type = GraphQLObjectType("Query", lambda: {"node": node_field})
+
+schema = GraphQLSchema(query=query_type, types=[user_type])
+
+
+def describe_node_interface_and_fields_with_async_object_fetcher():
+ @mark.asyncio
+ async def gets_the_correct_id_for_users():
+ source = """
+ {
+ node(id: "1") {
+ id
+ }
+ }
+ """
+ assert await graphql(schema, source) == ({"node": {"id": "1"}}, None)
+
+ @mark.asyncio
+ async def gets_the_correct_name_for_users():
+ source = """
+ {
+ node(id: "1") {
+ id
+ ... on User {
+ name
+ }
+ }
+ }
+ """
+ assert await graphql(schema, source) == (
+ {"node": {"id": "1", "name": "John Doe"}},
+ None,
+ )
diff --git a/tests/node/test_plural.py b/tests/node/test_plural.py
new file mode 100644
index 0000000..f16c098
--- /dev/null
+++ b/tests/node/test_plural.py
@@ -0,0 +1,102 @@
+from typing import NamedTuple
+
+from graphql import (
+ graphql_sync,
+ print_schema,
+ GraphQLField,
+ GraphQLObjectType,
+ GraphQLResolveInfo,
+ GraphQLSchema,
+ GraphQLString,
+)
+
+from graphql_relay import plural_identifying_root_field
+
+from ..utils import dedent
+
+user_type = GraphQLObjectType(
+ "User",
+ fields=lambda: {
+ "username": GraphQLField(GraphQLString),
+ "url": GraphQLField(GraphQLString),
+ },
+)
+
+
+class User(NamedTuple):
+ username: str
+ url: str
+
+
+def resolve_single_input(info: GraphQLResolveInfo, username: str) -> User:
+ assert info.schema is schema
+ lang = info.context.lang
+ url = f"www.facebook.com/{username}?lang={lang}"
+ return User(username=username, url=url)
+
+
+query_type = GraphQLObjectType(
+ "Query",
+ lambda: {
+ "usernames": plural_identifying_root_field(
+ "usernames",
+ description="Map from a username to the user",
+ input_type=GraphQLString,
+ output_type=user_type,
+ resolve_single_input=resolve_single_input,
+ )
+ },
+)
+
+schema = GraphQLSchema(query=query_type)
+
+
+class Context(NamedTuple):
+ lang: str
+
+
+def describe_plural_identifying_root_field():
+ def allows_fetching():
+ source = """
+ {
+ usernames(usernames:["dschafer", "leebyron", "schrockn"]) {
+ username
+ url
+ }
+ }
+ """
+ context_value = Context(lang="en")
+ assert graphql_sync(schema, source, context_value=context_value) == (
+ {
+ "usernames": [
+ {
+ "username": "dschafer",
+ "url": "www.facebook.com/dschafer?lang=en",
+ },
+ {
+ "username": "leebyron",
+ "url": "www.facebook.com/leebyron?lang=en",
+ },
+ {
+ "username": "schrockn",
+ "url": "www.facebook.com/schrockn?lang=en",
+ },
+ ]
+ },
+ None,
+ )
+
+ def generates_correct_types():
+ assert print_schema(schema) == dedent(
+ '''
+ type Query {
+ """Map from a username to the user"""
+ usernames(usernames: [String!]!): [User]
+ }
+
+ type User {
+ username: String
+ url: String
+ }
+ '''
+ )
diff --git a/tests/star_wars_data.py b/tests/star_wars_data.py
new file mode 100644
index 0000000..ea12eef
--- /dev/null
+++ b/tests/star_wars_data.py
@@ -0,0 +1,67 @@
+"""This defines a basic set of data for our Star Wars Schema.
+
+This data is hard coded for the sake of the demo, but you could imagine
+fetching this data from a backend service rather than from hardcoded
+JSON objects in a more complex demo.
+"""
+
+from typing import List, NamedTuple, Optional
+
+
+class Ship(NamedTuple):
+ id: str
+ name: str
+
+
+all_ships = [
+ Ship("1", "X-Wing"),
+ Ship("2", "Y-Wing"),
+ Ship("3", "A-Wing"),
+ # Yeah, technically it's Corellian. But it flew in the service of the rebels,
+ # so for the purposes of this demo it's a rebel ship.
+ Ship("4", "Millennium Falcon"),
+ Ship("5", "Home One"),
+ Ship("6", "TIE Fighter"),
+ Ship("7", "TIE Interceptor"),
+ Ship("8", "Executor"),
+]
+
+
+class Faction(NamedTuple):
+ id: str
+ name: str
+ ships: List[str]
+
+
+rebels = Faction("1", "Alliance to Restore the Republic", ["1", "2", "3", "4", "5"])
+
+empire = Faction("2", "Galactic Empire", ["6", "7", "8"])
+
+all_factions = [rebels, empire]
+
+
+def create_ship(ship_name: str, faction_id: str) -> Ship:
+ new_ship = Ship(str(len(all_ships) + 1), ship_name)
+ all_ships.append(new_ship)
+ faction = get_faction(faction_id)
+ if faction: # pragma: no cover else
+ faction.ships.append(new_ship.id)
+ return new_ship
+
+
+def get_ship(id_: str) -> Optional[Ship]:
+ return next(filter(lambda ship: ship.id == id_, all_ships), None) # type: ignore
+
+
+def get_faction(id_: str) -> Optional[Faction]:
+ return next(
+ filter(lambda faction: faction.id == id_, all_factions), None # type: ignore
+ )
+
+
+def get_rebels() -> Faction:
+ return rebels
+
+
+def get_empire() -> Faction:
+ return empire
diff --git a/tests/starwars/schema.py b/tests/star_wars_schema.py
similarity index 56%
rename from tests/starwars/schema.py
rename to tests/star_wars_schema.py
index b1f2910..73b9aaa 100644
--- a/tests/starwars/schema.py
+++ b/tests/star_wars_schema.py
@@ -1,39 +1,25 @@
-from graphql.type import (
+from graphql import (
GraphQLID,
GraphQLNonNull,
GraphQLObjectType,
- GraphQLInputObjectField,
+ GraphQLInputField,
GraphQLSchema,
GraphQLString,
- GraphQLField
+ GraphQLField,
)
-from graphql_relay.node.node import (
- node_definitions,
- global_id_field,
- from_global_id
-)
-
-from graphql_relay.connection.arrayconnection import (
- connection_from_list
-)
-
-from graphql_relay.connection.connection import (
- connection_args,
- connection_definitions
-)
-
-from graphql_relay.mutation.mutation import (
- mutation_with_client_mutation_id
-)
+from graphql_relay.node.node import node_definitions, global_id_field, from_global_id
+from graphql_relay.connection.array_connection import connection_from_array
+from graphql_relay.connection.connection import connection_args, connection_definitions
+from graphql_relay.mutation.mutation import mutation_with_client_mutation_id
-from .data import (
+from .star_wars_data import (
Faction,
- getFaction,
- getShip,
- getRebels,
- getEmpire,
- createShip,
+ get_faction,
+ get_ship,
+ get_rebels,
+ get_empire,
+ create_ship,
)
# This is a basic end-to-end test, designed to demonstrate the various
@@ -92,13 +78,13 @@ from .data import (
# }
#
# input IntroduceShipInput {
-# clientMutationId: string!
+# clientMutationId: string
# shipName: string!
# factionId: ID!
# }
#
-# input IntroduceShipPayload {
-# clientMutationId: string!
+# type IntroduceShipPayload {
+# clientMutationId: string
# ship: Ship
# faction: Faction
# }
@@ -115,22 +101,20 @@ from .data import (
def get_node(global_id, _info):
type_, id_ = from_global_id(global_id)
- if type_ == 'Faction':
- return getFaction(id_)
- elif type_ == 'Ship':
- return getShip(id_)
- else:
- return None
+ if type_ == "Faction":
+ return get_faction(id_)
+ if type_ == "Ship":
+ return get_ship(id_)
+ return None # pragma: no cover
-def get_node_type(obj, _info):
+def get_node_type(obj, _info, _type):
if isinstance(obj, Faction):
- return factionType
- else:
- return shipType
+ return faction_type.name
+ return ship_type.name
-node_interface, node_field = node_definitions(get_node, get_node_type)
+node_interface, node_field = node_definitions(get_node, get_node_type)[:2]
# We define our basic ship type.
@@ -140,17 +124,14 @@ node_interface, node_field = node_definitions(get_node, get_node_type)
# id: String!
# name: String
# }
-shipType = GraphQLObjectType(
- name='Ship',
- description='A ship in the Star Wars saga',
+ship_type = GraphQLObjectType(
+ name="Ship",
+ description="A ship in the Star Wars saga",
fields=lambda: {
- 'id': global_id_field('Ship'),
- 'name': GraphQLField(
- GraphQLString,
- description='The name of the ship.',
- )
+ "id": global_id_field("Ship"),
+ "name": GraphQLField(GraphQLString, description="The name of the ship."),
},
- interfaces=[node_interface]
+ interfaces=[node_interface],
)
# We define a connection between a faction and its ships.
@@ -167,7 +148,7 @@ shipType = GraphQLObjectType(
# cursor: String!
# node: Ship
# }
-shipEdge, shipConnection = connection_definitions('Ship', shipType)
+ship_edge, ship_connection = connection_definitions(ship_type, "Ship")
# We define our faction type, which implements the node interface.
#
@@ -177,25 +158,22 @@ shipEdge, shipConnection = connection_definitions('Ship', shipType)
# name: String
# ships: ShipConnection
# }
-factionType = GraphQLObjectType(
- name='Faction',
- description='A faction in the Star Wars saga',
+faction_type = GraphQLObjectType(
+ name="Faction",
+ description="A faction in the Star Wars saga",
fields=lambda: {
- 'id': global_id_field('Faction'),
- 'name': GraphQLField(
- GraphQLString,
- description='The name of the faction.',
- ),
- 'ships': GraphQLField(
- shipConnection,
- description='The ships used by the faction.',
+ "id": global_id_field("Faction"),
+ "name": GraphQLField(GraphQLString, description="The name of the faction."),
+ "ships": GraphQLField(
+ ship_connection,
+ description="The ships used by the faction.",
args=connection_args,
- resolver=lambda faction, _info, **args: connection_from_list(
- [getShip(ship) for ship in faction.ships], args
+ resolve=lambda faction, _info, **args: connection_from_array(
+ [get_ship(ship) for ship in faction.ships], args
),
- )
+ ),
},
- interfaces=[node_interface]
+ interfaces=[node_interface],
)
# This is the type that will be the root of our query, and the
@@ -207,19 +185,13 @@ factionType = GraphQLObjectType(
# empire: Faction
# node(id: String!): Node
# }
-queryType = GraphQLObjectType(
- name='Query',
+query_type = GraphQLObjectType(
+ name="Query",
fields=lambda: {
- 'rebels': GraphQLField(
- factionType,
- resolver=lambda _obj, _info: getRebels(),
- ),
- 'empire': GraphQLField(
- factionType,
- resolver=lambda _obj, _info: getEmpire(),
- ),
- 'node': node_field
- }
+ "rebels": GraphQLField(faction_type, resolve=lambda _obj, _info: get_rebels()),
+ "empire": GraphQLField(faction_type, resolve=lambda _obj, _info: get_empire()),
+ "node": node_field,
+ },
)
# This will return a GraphQLFieldConfig for our ship
@@ -227,55 +199,48 @@ queryType = GraphQLObjectType(
#
# It creates these two types implicitly:
# input IntroduceShipInput {
-# clientMutationId: string!
+# clientMutationId: string
# shipName: string!
# factionId: ID!
# }
#
-# input IntroduceShipPayload {
-# clientMutationId: string!
+# type IntroduceShipPayload {
+# clientMutationId: string
# ship: Ship
# faction: Faction
# }
-class IntroduceShipMutation(object):
+class IntroduceShipMutation:
+ # noinspection PyPep8Naming
def __init__(self, shipId, factionId, clientMutationId=None):
self.shipId = shipId
self.factionId = factionId
self.clientMutationId = clientMutationId
+# noinspection PyPep8Naming
def mutate_and_get_payload(_info, shipName, factionId, **_input):
- newShip = createShip(shipName, factionId)
- return IntroduceShipMutation(
- shipId=newShip.id,
- factionId=factionId,
- )
+ new_ship = create_ship(shipName, factionId)
+ return IntroduceShipMutation(shipId=new_ship.id, factionId=factionId)
-shipMutation = mutation_with_client_mutation_id(
- 'IntroduceShip',
+ship_mutation = mutation_with_client_mutation_id(
+ "IntroduceShip",
input_fields={
- 'shipName': GraphQLInputObjectField(
- GraphQLNonNull(GraphQLString)
- ),
- 'factionId': GraphQLInputObjectField(
- GraphQLNonNull(GraphQLID)
- )
+ "shipName": GraphQLInputField(GraphQLNonNull(GraphQLString)),
+ "factionId": GraphQLInputField(GraphQLNonNull(GraphQLID)),
},
output_fields={
- 'ship': GraphQLField(
- shipType,
- resolver=lambda payload, _info: getShip(payload.shipId)
+ "ship": GraphQLField(
+ ship_type, resolve=lambda payload, _info: get_ship(payload.shipId)
+ ),
+ "faction": GraphQLField(
+ faction_type, resolve=lambda payload, _info: get_faction(payload.factionId)
),
- 'faction': GraphQLField(
- factionType,
- resolver=lambda payload, _info: getFaction(payload.factionId)
- )
},
- mutate_and_get_payload=mutate_and_get_payload
+ mutate_and_get_payload=mutate_and_get_payload,
)
# This is the type that will be the root of our mutations, and the
@@ -285,16 +250,10 @@ shipMutation = mutation_with_client_mutation_id(
# type Mutation {
# introduceShip(input IntroduceShipInput!): IntroduceShipPayload
# }
-mutationType = GraphQLObjectType(
- 'Mutation',
- fields=lambda: {
- 'introduceShip': shipMutation
- }
+mutation_type = GraphQLObjectType(
+ "Mutation", fields=lambda: {"introduceShip": ship_mutation}
)
# Finally, we construct our schema (whose starting query type is the query
# type we defined above) and export it.
-StarWarsSchema = GraphQLSchema(
- query=queryType,
- mutation=mutationType
-)
+star_wars_schema = GraphQLSchema(query=query_type, mutation=mutation_type)
diff --git a/tests/starwars/__init__.py b/tests/starwars/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/tests/starwars/data.py b/tests/starwars/data.py
deleted file mode 100644
index a979007..0000000
--- a/tests/starwars/data.py
+++ /dev/null
@@ -1,109 +0,0 @@
-"""This defines a basic set of data for our Star Wars Schema.
-
-This data is hard coded for the sake of the demo, but you could imagine
-fetching this data from a backend service rather than from hardcoded
-JSON objects in a more complex demo.
-"""
-
-from collections import namedtuple
-
-Ship = namedtuple('Ship', ['id', 'name'])
-Faction = namedtuple('Faction', ['id', 'name', 'ships'])
-
-xwing = Ship(
- id='1',
- name='X-Wing',
-)
-
-ywing = Ship(
- id='2',
- name='Y-Wing',
-)
-
-awing = Ship(
- id='3',
- name='A-Wing',
-)
-
-# Yeah, technically it's Corellian. But it flew in the service of the rebels,
-# so for the purposes of this demo it's a rebel ship.
-falcon = Ship(
- id='4',
- name='Millenium Falcon',
-)
-
-homeOne = Ship(
- id='5',
- name='Home One',
-)
-
-tieFighter = Ship(
- id='6',
- name='TIE Fighter',
-)
-
-tieInterceptor = Ship(
- id='7',
- name='TIE Interceptor',
-)
-
-executor = Ship(
- id='8',
- name='Executor',
-)
-
-rebels = Faction(
- id='1',
- name='Alliance to Restore the Republic',
- ships=['1', '2', '3', '4', '5']
-)
-
-empire = Faction(
- id='2',
- name='Galactic Empire',
- ships=['6', '7', '8']
-)
-
-data = {
- 'Faction': {
- '1': rebels,
- '2': empire
- },
- 'Ship': {
- '1': xwing,
- '2': ywing,
- '3': awing,
- '4': falcon,
- '5': homeOne,
- '6': tieFighter,
- '7': tieInterceptor,
- '8': executor
- }
-}
-
-
-def createShip(shipName, factionId):
- nextShip = len(data['Ship']) + 1
- newShip = Ship(
- id=str(nextShip),
- name=shipName
- )
- data['Ship'][newShip.id] = newShip
- data['Faction'][factionId].ships.append(newShip.id)
- return newShip
-
-
-def getShip(_id):
- return data['Ship'][_id]
-
-
-def getFaction(_id):
- return data['Faction'][_id]
-
-
-def getRebels():
- return rebels
-
-
-def getEmpire():
- return empire
diff --git a/tests/starwars/test_connections.py b/tests/starwars/test_connections.py
deleted file mode 100644
index 99e5916..0000000
--- a/tests/starwars/test_connections.py
+++ /dev/null
@@ -1,37 +0,0 @@
-from graphql import graphql
-
-from .schema import StarWarsSchema
-
-
-def test_correct_fetch_first_ship_rebels():
- query = '''
- query RebelsShipsQuery {
- rebels {
- name,
- ships(first: 1) {
- edges {
- node {
- name
- }
- }
- }
- }
- }
- '''
- expected = {
- 'rebels': {
- 'name': 'Alliance to Restore the Republic',
- 'ships': {
- 'edges': [
- {
- 'node': {
- 'name': 'X-Wing'
- }
- }
- ]
- }
- }
- }
- result = graphql(StarWarsSchema, query)
- assert not result.errors
- assert result.data == expected
diff --git a/tests/starwars/test_mutations.py b/tests/starwars/test_mutations.py
deleted file mode 100644
index a4194f9..0000000
--- a/tests/starwars/test_mutations.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from graphql import graphql
-
-from .schema import StarWarsSchema
-
-
-def test_correctly_mutates_dataset():
- query = '''
- mutation AddBWingQuery($input: IntroduceShipInput!) {
- introduceShip(input: $input) {
- ship {
- id
- name
- }
- faction {
- name
- }
- clientMutationId
- }
- }
- '''
- params = {
- 'input': {
- 'shipName': 'B-Wing',
- 'factionId': '1',
- 'clientMutationId': 'abcde',
- }
- }
- expected = {
- 'introduceShip': {
- 'ship': {
- 'id': 'U2hpcDo5',
- 'name': 'B-Wing'
- },
- 'faction': {
- 'name': 'Alliance to Restore the Republic'
- },
- 'clientMutationId': 'abcde',
- }
- }
- result = graphql(StarWarsSchema, query, variables=params)
- assert not result.errors
- assert result.data == expected
diff --git a/tests/starwars/test_objectidentification.py b/tests/starwars/test_objectidentification.py
deleted file mode 100644
index b579877..0000000
--- a/tests/starwars/test_objectidentification.py
+++ /dev/null
@@ -1,109 +0,0 @@
-from graphql import graphql
-
-from .schema import StarWarsSchema
-
-
-def test_correctly_fetches_id_name_rebels():
- query = '''
- query RebelsQuery {
- rebels {
- id
- name
- }
- }
- '''
- expected = {
- 'rebels': {
- 'id': 'RmFjdGlvbjox',
- 'name': 'Alliance to Restore the Republic'
- }
- }
- result = graphql(StarWarsSchema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_correctly_refetches_rebels():
- query = '''
- query RebelsRefetchQuery {
- node(id: "RmFjdGlvbjox") {
- id
- ... on Faction {
- name
- }
- }
- }
- '''
- expected = {
- 'node': {
- 'id': 'RmFjdGlvbjox',
- 'name': 'Alliance to Restore the Republic'
- }
- }
- result = graphql(StarWarsSchema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_correctly_fetches_id_name_empire():
- query = '''
- query EmpireQuery {
- empire {
- id
- name
- }
- }
- '''
- expected = {
- 'empire': {
- 'id': 'RmFjdGlvbjoy',
- 'name': 'Galactic Empire'
- }
- }
- result = graphql(StarWarsSchema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_correctly_refetches_empire():
- query = '''
- query EmpireRefetchQuery {
- node(id: "RmFjdGlvbjoy") {
- id
- ... on Faction {
- name
- }
- }
- }
- '''
- expected = {
- 'node': {
- 'id': 'RmFjdGlvbjoy',
- 'name': 'Galactic Empire'
- }
- }
- result = graphql(StarWarsSchema, query)
- assert not result.errors
- assert result.data == expected
-
-
-def test_correctly_refetches_xwing():
- query = '''
- query XWingRefetchQuery {
- node(id: "U2hpcDox") {
- id
- ... on Ship {
- name
- }
- }
- }
- '''
- expected = {
- 'node': {
- 'id': 'U2hpcDox',
- 'name': 'X-Wing'
- }
- }
- result = graphql(StarWarsSchema, query)
- assert not result.errors
- assert result.data == expected
diff --git a/tests/test_star_wars_connections.py b/tests/test_star_wars_connections.py
new file mode 100644
index 0000000..f4dd855
--- /dev/null
+++ b/tests/test_star_wars_connections.py
@@ -0,0 +1,191 @@
+from graphql import graphql_sync
+
+from .star_wars_schema import star_wars_schema as schema
+
+
+def describe_star_wars_connections():
+ def fetches_the_first_ship_of_the_rebels():
+ source = """
+ {
+ rebels {
+ name,
+ ships(first: 1) {
+ edges {
+ node {
+ name
+ }
+ }
+ }
+ }
+ }
+ """
+ expected = {
+ "rebels": {
+ "name": "Alliance to Restore the Republic",
+ "ships": {"edges": [{"node": {"name": "X-Wing"}}]},
+ }
+ }
+ result = graphql_sync(schema, source)
+ assert result == (expected, None)
+
+ def fetches_the_first_two_ships_of_the_rebels_with_a_cursor():
+ source = """
+ {
+ rebels {
+ name,
+ ships(first: 2) {
+ edges {
+ cursor,
+ node {
+ name
+ }
+ }
+ }
+ }
+ }
+ """
+ expected = {
+ "rebels": {
+ "name": "Alliance to Restore the Republic",
+ "ships": {
+ "edges": [
+ {
+ "cursor": "YXJyYXljb25uZWN0aW9uOjA=",
+ "node": {"name": "X-Wing"},
+ },
+ {
+ "cursor": "YXJyYXljb25uZWN0aW9uOjE=",
+ "node": {"name": "Y-Wing"},
+ },
+ ]
+ },
+ }
+ }
+ result = graphql_sync(schema, source)
+ assert result == (expected, None)
+
+ def fetches_the_next_three_ships_of_the_rebels_with_a_cursor():
+ source = """
+ {
+ rebels {
+ name,
+ ships(first: 3 after: "YXJyYXljb25uZWN0aW9uOjE=") {
+ edges {
+ cursor,
+ node {
+ name
+ }
+ }
+ }
+ }
+ }
+ """
+ expected = {
+ "rebels": {
+ "name": "Alliance to Restore the Republic",
+ "ships": {
+ "edges": [
+ {
+ "cursor": "YXJyYXljb25uZWN0aW9uOjI=",
+ "node": {"name": "A-Wing"},
+ },
+ {
+ "cursor": "YXJyYXljb25uZWN0aW9uOjM=",
+ "node": {"name": "Millennium Falcon"},
+ },
+ {
+ "cursor": "YXJyYXljb25uZWN0aW9uOjQ=",
+ "node": {"name": "Home One"},
+ },
+ ]
+ },
+ }
+ }
+ result = graphql_sync(schema, source)
+ assert result == (expected, None)
+
+ def fetches_no_ships_of_the_rebels_at_the_end_of_connection():
+ source = """
+ {
+ rebels {
+ name,
+ ships(first: 3 after: "YXJyYXljb25uZWN0aW9uOjQ=") {
+ edges {
+ cursor,
+ node {
+ name
+ }
+ }
+ }
+ }
+ }
+ """
+ expected = {
+ "rebels": {
+ "name": "Alliance to Restore the Republic",
+ "ships": {"edges": []},
+ }
+ }
+ result = graphql_sync(schema, source)
+ assert result == (expected, None)
+
+ def identifies_the_end_of_the_list():
+ source = """
+ {
+ rebels {
+ name,
+ originalShips: ships(first: 2) {
+ edges {
+ node {
+ name
+ }
+ }
+ pageInfo {
+ hasNextPage
+ }
+ }
+ moreShips: ships(first: 3 after: "YXJyYXljb25uZWN0aW9uOjE=") {
+ edges {
+ node {
+ name
+ }
+ }
+ pageInfo {
+ hasNextPage
+ }
+ }
+ }
+ }
+ """
+ expected = {
+ "rebels": {
+ "name": "Alliance to Restore the Republic",
+ "originalShips": {
+ "edges": [
+ {
+ "node": {"name": "X-Wing"},
+ },
+ {
+ "node": {"name": "Y-Wing"},
+ },
+ ],
+ "pageInfo": {"hasNextPage": True},
+ },
+ "moreShips": {
+ "edges": [
+ {
+ "node": {"name": "A-Wing"},
+ },
+ {
+ "node": {"name": "Millennium Falcon"},
+ },
+ {
+ "node": {"name": "Home One"},
+ },
+ ],
+ "pageInfo": {"hasNextPage": False},
+ },
+ },
+ }
+ result = graphql_sync(schema, source)
+ assert result == (expected, None)
diff --git a/tests/test_star_wars_mutations.py b/tests/test_star_wars_mutations.py
new file mode 100644
index 0000000..46daec3
--- /dev/null
+++ b/tests/test_star_wars_mutations.py
@@ -0,0 +1,37 @@
+from graphql import graphql_sync
+
+from .star_wars_schema import star_wars_schema as schema
+
+
+def describe_star_wars_mutations():
+ def correctly_mutates_dataset():
+ source = """
+ mutation ($input: IntroduceShipInput!) {
+ introduceShip(input: $input) {
+ ship {
+ id
+ name
+ }
+ faction {
+ name
+ }
+ clientMutationId
+ }
+ }
+ """
+ variable_values = {
+ "input": {
+ "shipName": "B-Wing",
+ "factionId": "1",
+ "clientMutationId": "abcde",
+ }
+ }
+ expected = {
+ "introduceShip": {
+ "ship": {"id": "U2hpcDo5", "name": "B-Wing"},
+ "faction": {"name": "Alliance to Restore the Republic"},
+ "clientMutationId": "abcde",
+ }
+ }
+ result = graphql_sync(schema, source, variable_values=variable_values)
+ assert result == (expected, None)
diff --git a/tests/test_star_wars_object_identification.py b/tests/test_star_wars_object_identification.py
new file mode 100644
index 0000000..9aa5e8d
--- /dev/null
+++ b/tests/test_star_wars_object_identification.py
@@ -0,0 +1,80 @@
+from graphql import graphql_sync
+
+from .star_wars_schema import star_wars_schema as schema
+
+
+def describe_star_wars_object_identification():
+ def fetches_the_id_and_name_of_the_rebels():
+ source = """
+ {
+ rebels {
+ id
+ name
+ }
+ }
+ """
+ expected = {
+ "rebels": {"id": "RmFjdGlvbjox", "name": "Alliance to Restore the Republic"}
+ }
+ result = graphql_sync(schema, source)
+ assert result == (expected, None)
+
+ def fetches_the_rebels_by_global_id():
+ source = """
+ {
+ node(id: "RmFjdGlvbjox") {
+ id
+ ... on Faction {
+ name
+ }
+ }
+ }
+ """
+ expected = {
+ "node": {"id": "RmFjdGlvbjox", "name": "Alliance to Restore the Republic"}
+ }
+ result = graphql_sync(schema, source)
+ assert result == (expected, None)
+
+ def fetches_the_id_and_name_of_the_empire():
+ source = """
+ {
+ empire {
+ id
+ name
+ }
+ }
+ """
+ expected = {"empire": {"id": "RmFjdGlvbjoy", "name": "Galactic Empire"}}
+ result = graphql_sync(schema, source)
+ assert result == (expected, None)
+
+ def fetches_the_empire_by_global_id():
+ source = """
+ {
+ node(id: "RmFjdGlvbjoy") {
+ id
+ ... on Faction {
+ name
+ }
+ }
+ }
+ """
+ expected = {"node": {"id": "RmFjdGlvbjoy", "name": "Galactic Empire"}}
+ result = graphql_sync(schema, source)
+ assert result == (expected, None)
+
+ def fetches_the_x_wing_by_global_id():
+ source = """
+ {
+ node(id: "U2hpcDox") {
+ id
+ ... on Ship {
+ name
+ }
+ }
+ }
+ """
+ expected = {"node": {"id": "U2hpcDox", "name": "X-Wing"}}
+ result = graphql_sync(schema, source)
+ assert result == (expected, None)
diff --git a/tests/test_version.py b/tests/test_version.py
new file mode 100644
index 0000000..e2144a9
--- /dev/null
+++ b/tests/test_version.py
@@ -0,0 +1,109 @@
+import re
+
+import graphql_relay
+from graphql_relay.version import (
+ VersionInfo,
+ version,
+ version_info,
+ version_js,
+ version_info_js,
+)
+
+_re_version = re.compile(r"(\d+)\.(\d+)\.(\d+)(?:(a|b|r?c)(\d+))?$")
+
+
+def describe_version():
+ def describe_version_info_class():
+ def create_version_info_from_fields():
+ v = VersionInfo(1, 2, 3, "alpha", 4)
+ assert v.major == 1
+ assert v.minor == 2
+ assert v.micro == 3
+ assert v.releaselevel == "alpha"
+ assert v.serial == 4
+
+ def create_version_info_from_str():
+ v = VersionInfo.from_str("1.2.3")
+ assert v.major == 1
+ assert v.minor == 2
+ assert v.micro == 3
+ assert v.releaselevel == "final"
+ assert v.serial == 0
+ v = VersionInfo.from_str("1.2.3a4")
+ assert v.major == 1
+ assert v.minor == 2
+ assert v.micro == 3
+ assert v.releaselevel == "alpha"
+ assert v.serial == 4
+ v = VersionInfo.from_str("1.2.3beta4")
+ assert v.major == 1
+ assert v.minor == 2
+ assert v.micro == 3
+ assert v.releaselevel == "beta"
+ assert v.serial == 4
+ v = VersionInfo.from_str("12.34.56rc789")
+ assert v.major == 12
+ assert v.minor == 34
+ assert v.micro == 56
+ assert v.releaselevel == "candidate"
+ assert v.serial == 789
+
+ def serialize_as_str():
+ v = VersionInfo(1, 2, 3, "final", 0)
+ assert str(v) == "1.2.3"
+ v = VersionInfo(1, 2, 3, "alpha", 4)
+ assert str(v) == "1.2.3a4"
+ v = VersionInfo(1, 2, 3, "candidate", 4)
+ assert str(v) == "1.2.3rc4"
+
+ def describe_graphql_core_version():
+ def base_package_has_correct_version():
+ assert graphql_relay.__version__ == version
+ assert graphql_relay.version == version
+
+ def base_package_has_correct_version_info():
+ assert graphql_relay.__version_info__ is version_info
+ assert graphql_relay.version_info is version_info
+
+ def version_has_correct_format():
+ assert isinstance(version, str)
+ assert _re_version.match(version)
+
+ def version_info_has_correct_fields():
+ assert isinstance(version_info, tuple)
+ assert str(version_info) == version
+ groups = _re_version.match(version).groups() # type: ignore
+ assert version_info.major == int(groups[0])
+ assert version_info.minor == int(groups[1])
+ assert version_info.micro == int(groups[2])
+ if groups[3] is None: # pragma: no cover
+ assert groups[4] is None
+ else: # pragma: no cover
+ assert version_info.releaselevel[:1] == groups[3].lstrip("r")
+ assert version_info.serial == int(groups[4])
+
+ def describe_graphql_js_version():
+ def base_package_has_correct_version_js():
+ assert graphql_relay.__version_js__ == version_js
+ assert graphql_relay.version_js == version_js
+
+ def base_package_has_correct_version_info_js():
+ assert graphql_relay.__version_info_js__ is version_info_js
+ assert graphql_relay.version_info_js is version_info_js
+
+ def version_js_has_correct_format():
+ assert isinstance(version_js, str)
+ assert _re_version.match(version_js)
+
+ def version_info_js_has_correct_fields():
+ assert isinstance(version_info_js, tuple)
+ assert str(version_info_js) == version_js
+ groups = _re_version.match(version_js).groups() # type: ignore
+ assert version_info_js.major == int(groups[0])
+ assert version_info_js.minor == int(groups[1])
+ assert version_info_js.micro == int(groups[2])
+ if groups[3] is None: # pragma: no cover
+ assert groups[4] is None
+ else: # pragma: no cover
+ assert version_info_js.releaselevel[:1] == groups[3].lstrip("r")
+ assert version_info_js.serial == int(groups[4])
diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py
new file mode 100644
index 0000000..a829568
--- /dev/null
+++ b/tests/utils/__init__.py
@@ -0,0 +1,5 @@
+"""Tests for graphql_relay.utils and test utilities"""
+
+from .dedent import dedent
+
+__all__ = ["dedent"]
diff --git a/tests/utils/dedent.py b/tests/utils/dedent.py
new file mode 100644
index 0000000..a65c2d9
--- /dev/null
+++ b/tests/utils/dedent.py
@@ -0,0 +1,8 @@
+from textwrap import dedent as _dedent
+
+__all__ = ["dedent"]
+
+
+def dedent(text: str) -> str:
+ """Fix indentation and also trim given text string."""
+ return _dedent(text.lstrip("\n").rstrip(" \t\n"))
diff --git a/tests/utils/test_base64.py b/tests/utils/test_base64.py
new file mode 100644
index 0000000..b8deadf
--- /dev/null
+++ b/tests/utils/test_base64.py
@@ -0,0 +1,33 @@
+from graphql_relay.utils import base64, unbase64
+
+
+example_unicode = "Some examples: ͢❤😀"
+example_base64 = "U29tZSBleGFtcGxlczogIM2i4p2k8J+YgA=="
+
+
+def describe_base64_conversion():
+ def converts_from_unicode_to_base64():
+ assert base64(example_unicode) == example_base64
+
+ def converts_from_base64_to_unicode():
+ assert unbase64(example_base64) == example_unicode
+
+ def converts_invalid_base64_to_empty_string():
+ assert unbase64("") == ""
+ assert unbase64("invalid") == ""
+ assert unbase64(example_base64[-1:]) == ""
+ assert unbase64(example_base64[1:]) == ""
+ assert unbase64("!" + example_base64[1:]) == ""
+ assert unbase64("Ü" + example_base64[1:]) == ""
+
+ def converts_from_unicode_as_bytes_to_base64():
+ bytes_example_code = example_unicode.encode("utf-8")
+ assert base64(bytes_example_code) == example_base64 # type: ignore
+ bytearray_example_code = bytearray(bytes_example_code)
+ assert base64(bytearray_example_code) == example_base64 # type: ignore
+
+ def converts_from_base64_as_bytes_to_unicode():
+ bytes_example_code = example_base64.encode("ascii")
+ assert unbase64(bytes_example_code) == example_unicode # type: ignore
+ bytearray_example_code = bytearray(bytes_example_code)
+ assert unbase64(bytearray_example_code) == example_unicode # type: ignore
diff --git a/tests/utils/test_dedent.py b/tests/utils/test_dedent.py
new file mode 100644
index 0000000..7084960
--- /dev/null
+++ b/tests/utils/test_dedent.py
@@ -0,0 +1,98 @@
+from . import dedent
+
+
+def describe_dedent():
+ def removes_indentation_in_typical_usage():
+ assert (
+ dedent(
+ """
+ type Query {
+ me: User
+ }
+
+ type User {
+ id: ID
+ name: String
+ }
+ """
+ )
+ == "type Query {\n me: User\n}\n\n"
+ "type User {\n id: ID\n name: String\n}"
+ )
+
+ def removes_only_the_first_level_of_indentation():
+ assert (
+ dedent(
+ """
+ first
+ second
+ third
+ fourth
+ """
+ )
+ == "first\n second\n third\n fourth"
+ )
+
+ def does_not_escape_special_characters():
+ assert (
+ dedent(
+ """
+ type Root {
+ field(arg: String = "wi\th de\fault"): String
+ }
+ """
+ )
+ == "type Root {\n"
+ ' field(arg: String = "wi\th de\fault"): String\n}'
+ )
+
+ def also_removes_indentation_using_tabs():
+ assert (
+ dedent(
+ """
+ \t\t type Query {
+ \t\t me: User
+ \t\t }
+ """
+ )
+ == "type Query {\n me: User\n}"
+ )
+
+ def removes_leading_and_trailing_newlines():
+ assert (
+ dedent(
+ """
+
+
+ type Query {
+ me: User
+ }
+
+
+ """
+ )
+ == "type Query {\n me: User\n}"
+ )
+
+ def removes_all_trailing_spaces_and_tabs():
+ assert (
+ dedent(
+ """
+ type Query {
+ me: User
+ }
+ \t\t \t """
+ )
+ == "type Query {\n me: User\n}"
+ )
+
+ def works_on_text_without_leading_newline():
+ assert (
+ dedent(
+ """ type Query {
+ me: User
+ }
+ """
+ )
+ == "type Query {\n me: User\n}"
+ )
diff --git a/tox.ini b/tox.ini
index 596e2bd..07d445a 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,24 +1,57 @@
[tox]
-envlist = py{27,34,35,36,37,38,py,py3}, flake8, manifest
+envlist = py3{6,7,8,9,10}, black, flake8, mypy, manifest, core320
+isolated_build = true
+
+[gh-actions]
+python =
+ 3.6: py36
+ 3.7: py37
+ 3.8: py38
+ 3.9: py39
+ 3.10: py310
+
+[testenv:black]
+basepython = python3.9
+deps = black==22.3.0
+commands =
+ black src tests setup.py -t py39 --check
[testenv:flake8]
-basepython = python3.7
-deps = flake8>=3.7,<4
+basepython = python3.9
+deps = flake8>=4,<5
commands =
- flake8 setup.py graphql_relay tests
+ flake8 src tests setup.py
-[testenv:pypy]
-whitelist_externals=*
-
-[testenv:pypy3]
-whitelist_externals=*
+[testenv:mypy]
+basepython = python3.9
+deps =
+ mypy==0.942
+ pytest>=6.2,<7
+commands =
+ mypy src tests
[testenv:manifest]
-basepython = python3.7
-deps = check-manifest>=0.40,<1
+basepython = python3.9
+deps = check-manifest>=0.48,<1
commands =
check-manifest -v
+[testenv:core320]
+basepython = python3.9
+deps =
+ graphql-core==3.2.0
+ pytest>=6.2,<7
+ pytest-asyncio>=0.16,<1
+ pytest-describe>=2,<3
+commands =
+ pytest tests {posargs}
+
[testenv]
-commands=
- python setup.py test -a "{posargs}"
+deps =
+ pytest>=6.2,<7
+ pytest-asyncio>=0.16,<1
+ pytest-cov>=3,<4
+ pytest-describe>=2,<3
+ py36,py37: typing-extensions>=4.1,<5
+commands =
+ pytest tests {posargs: --cov-report=term-missing --cov=graphql_relay --cov=tests --cov-fail-under=100}
More details
Historical runs
- missing-python-module: Missing python module: graphql.resolve_thunk
- push-failed: Failed to push result branch: Connection closed: Connection closed early The remote server unexpectedly closed the connection.
- push-failed: Failed to push result branch: Connection closed: Connection closed early The remote server unexpectedly closed the connection.
- missing-python-module: Missing python module: graphql.resolve_thunk
- worker-timeout: No keepalives received in 1:02:00.949269.
- run-disappeared: Worker started processing new run rather than e4400fb9-5d08-4a89-bf51-f1a9f90cf867
- missing-python-module: Missing python module: graphql.GraphQLArgumentMap