diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
new file mode 100644
index 0000000..2a2f56e
--- /dev/null
+++ b/.github/workflows/main.yml
@@ -0,0 +1,62 @@
+# This is a basic workflow to help you get started with Actions
+
+name: Unittests
+
+# Controls when the action will run.
+on:
+ # Triggers the workflow on push or pull request events but only for the master branch
+ push:
+ branches: [ master, develop ]
+ pull_request:
+ branches: [ master, develop ]
+
+ # Allows you to run this workflow manually from the Actions tab
+ workflow_dispatch:
+
+# A workflow run is made up of one or more jobs that can run sequentially or in parallel
+jobs:
+
+ # This workflow contains a single job called "build"
+ build:
+ environment: Master
+ # The type of runner that the job will run on
+ runs-on: ubuntu-latest
+
+ # Steps represent a sequence of tasks that will be executed as part of the job
+ steps:
+ # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
+ - uses: actions/checkout@v2
+ - name: Generate env vars from gh secrets
+ run: |
+ echo TEMPLATE_ROOT_DIR=${{ secrets.TEMPLATE_ROOT_DIR }} >> .env
+ echo FAUNA_SECRET=${{ secrets.FAUNA_SECRET }} >> .env
+ echo FAUNA_SCHEME=${{ secrets.FAUNA_SCHEME }} >> .env
+ echo FAUNA_DOMAIN=${{ secrets.FAUNA_DOMAIN }} >> .env
+ echo FAUNA_PORT=${{ secrets.FAUNA_PORT}} >> .env
+ echo FAUNA_GRAPHQL_IMPORT_URL=${{ secrets.FAUNA_GRAPHQL_IMPORT_URL}} >> .env
+ echo FAUNA_GRAPHQL_URL=$${{ secrets.FAUNA_GRAPHQL_URL }} >> .env
+ echo DEFAULT_FROM_EMAIL=${{ secrets.DEFAULT_FROM_EMAIL }} >> .env
+ echo PROJECT_NAME=${{ secrets.PROJECT_NAME }} >> .env
+ echo KEY_MODULE=${{ secrets.KEY_MODULE }} >> .env
+ cat .env
+ - run: docker-compose pull
+
+ # In this step, this action saves a list of existing images,
+ # the cache is created without them in the post run.
+ # It also restores the cache if it exists.
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ # Ignore the failure of a step and avoid terminating the job.
+ continue-on-error: true
+
+ - run: docker volume create --name=pfunk-fauna-data
+ - run: docker-compose build
+
+ # Runs a single command using the runners shell
+ - name: Run Unit Tests
+ run: docker-compose run web poetry run python -m unittest
+ - name: Build and publish to pypi
+ if: github.ref == 'refs/heads/master'
+ uses: JRubics/poetry-publish@v1.13
+ with:
+ pypi_token: ${{ secrets.PYPI_TOKEN }}
+ ignore_dev_requirements: "yes"
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index b4214fe..a703a12 100644
--- a/.gitignore
+++ b/.gitignore
@@ -132,3 +132,4 @@ dmypy.json
# Pyre type checker
.pyre/
/pfunk/carcamp/
+/notebooks/
diff --git a/Dockerfile b/Dockerfile
index 4b7920a..7f67ec2 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,5 @@
FROM capless/capless-docker:jupyter
+RUN pip install --upgrade pip
COPY . /code
RUN poetry run pip install --upgrade pip
RUN poetry install
diff --git a/README.md b/README.md
index db4a26a..7143deb 100644
--- a/README.md
+++ b/README.md
@@ -22,6 +22,7 @@ Includes GraphQL and generic ABAC auth workflow integrations.
- [Getting Started](#Getting-Started)
- [Installation](#Installation)
+ - [Environment Variables](#environment-variables)
- [Setup the Connection](#setup-the-connection)
- [Define your Collections](#define-your-collections-collectionspy)
- [Choose an Auth Workflow](#auth-workflows)
@@ -29,7 +30,9 @@ Includes GraphQL and generic ABAC auth workflow integrations.
- [Save Some Data](#save-some-data)
- [Query Your Data](#query-your-data)
- [Delete a Record](#delete-a-record)
-
+- [Customizing your setup](#customizing-your-setup)
+ - [Option 1: Environment Variables](#option-1-environment-variables)
+ - [Option 2: Inline Field](#option-2-inline-field)
### Getting Started
@@ -37,6 +40,11 @@ Includes GraphQL and generic ABAC auth workflow integrations.
### Installation
```pip install pfunk```
+### Environment Variables
+
+- **FAUNA_SECRET** - Fauna admin or server key.
+- **FAUNA_SCHEME** - (optional) HTTP scheme to use (default: https)
+-
### Setup the Connection
#### Using Environment Variables (Preferred Method)
@@ -207,3 +215,120 @@ Let's delete the record from above.
```python
product.delete()
```
+
+
+
+### Customizing your setup
+This section is for customizing your `user` and `group` collections to your liking. e.g.
+- Renaming your `User` and `Group` collection to something more verbose to your usage e.g. `Agent (User)` to `Firm (Group)`
+- Subclassing the `user` or `group` collection in order to have more control to what kind of auth collection you want to have
+
+
+### Custom User and Groups
+Initially, we have multiple ways of defining your custom user-group collections.
+Things to keep in mind:
+- `UserGroup` class **must** be subclassed and reference to the custom `user` and `group`
+- How the permissions work will do the same thing in custom user and group. This just gives you the ability to fully customize your own auth collections.
+
+
+
+### Option 1: Environment Variables
+This is the easiest way. Just go to your `.env` file and define:
+```
+USER_COLLECTION=Newuser # Class name of your custom user class - case-sensitive!
+GROUP_COLLECTION=Newgroup # Class name of your custom group class - case-sensitive!
+GROUP_COLLECTION_DIR=dir.to.Newgroup # class dir to import your custom group
+USER_COLLECTION_DIR=dir.to.Newuser # class dir to import your custom user group
+```
+Then you'll end up with this in your `collections.py`
+```python
+from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, BaseUserGroup as ug
+from pfunk.contrib.auth.resources import GenericUserBasedRole
+
+
+class UserGroups(ug):
+ userID = ReferenceField('dir.to.Newuser')
+ groupID = ReferenceField('dir.to.Newgroup')
+
+
+class Newgroup(BaseGroup):
+ users = ManyToManyField('dir.to.Newuser', relation_name='custom_users_groups')
+
+
+class Newuser(ExtendedUser):
+ user_group_class = import_util('dir.to.UserGroups')
+ group_class = import_util('dir.to.Newgroup')
+ groups = ManyToManyField('dir.to.Newgroup', relation_name='custom_users_groups')
+ blogs = ManyToManyField('dir.to.Blog', relation_name='users_blogs')
+
+
+class Blog(Collection):
+ collection_roles = [GenericUserBasedRole]
+ title = StringField(required=True)
+ content = StringField(required=True)
+ user = ReferenceField('dir.to.Newuser', relation_name='users_blogs')
+
+ def __unicode__(self):
+ return self.title
+
+```
+
+
+
+### Option 2: Inline Field
+If for some reason you don't want to use the environment variables, you can define the needed fields
+directly in your `Collection`. This is what we use in PFunk's unit tests, refer to it if you
+need more usage but essentially:
+```python
+class Blog(Collection):
+ user_collection = 'Newuser'
+ group_collection = 'Newgroup'
+ user_collection_dir = 'dir.to.Newuser'
+ group_collection_dir = 'dir.to.Newgroup'
+ ...
+```
+
+
+Generally, this is how your `collections.py` will look like in the end if you want to define
+your custom auth collections in fields.
+
+
+```python
+from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, BaseUserGroup as ug
+from pfunk.contrib.auth.resources import GenericUserBasedRole
+
+
+class UserGroups(ug):
+ userID = ReferenceField('this.file.NewUser')
+ groupID = ReferenceField('this.file.Newgroup')
+
+
+class Newgroup(BaseGroup):
+ users = ManyToManyField('this.file.Newuser', relation_name='custom_users_groups')
+
+
+class Newuser(ExtendedUser):
+ user_group_class = import_util('this.file.UserGroups')
+ group_class = import_util('this.file.Newgroup')
+ groups = ManyToManyField('this.file.Newgroup', relation_name='custom_users_groups')
+ blogs = ManyToManyField('this.file.Blog',
+ relation_name='users_blogs')
+
+ group_collection = 'Newgroup'
+
+
+class Blog(Collection):
+ collection_roles = [GenericUserBasedRole]
+ title = StringField(required=True)
+ content = StringField(required=True)
+ user = ReferenceField('this.file.Newuser', relation_name='users_blogs')
+
+ user_collection = 'Newuser'
+ group_collection = 'Newgroup'
+ user_collection_dir = 'this.file.Newuser'
+ group_collection_dir = 'this.file.Newgroup'
+
+ def __unicode__(self):
+ return self.title
+```
+
diff --git a/docker-compose.yaml b/docker-compose.yaml
index 5b88d63..66e29cb 100644
--- a/docker-compose.yaml
+++ b/docker-compose.yaml
@@ -10,7 +10,8 @@ services:
- "8010"
- "3434"
depends_on:
- - fauna
+ fauna:
+ condition: service_healthy
ports:
- 8010:8888
- 3434:3434
@@ -22,12 +23,12 @@ services:
fauna:
restart: always
- image: fauna/faunadb
+ image: fauna/faunadb:4.15.0
ports:
- 8443:8443
- 8444:8444
- 8445:8445
- - 8084:8084
+ - 8085:8085
volumes:
- ./logs:/var/log/faunadb
- ./docker-fauna.yml:/docker-fauna.yml
diff --git a/pfunk/__init__.py b/pfunk/__init__.py
index 8568372..56ad1d9 100644
--- a/pfunk/__init__.py
+++ b/pfunk/__init__.py
@@ -5,8 +5,9 @@
.. include:: ../CONTRIBUTE.md
"""
__docformat__ = "google"
+
+from .client import FaunaClient
from .collection import Collection, Enum
from .fields import (StringField, IntegerField, DateField, DateTimeField, BooleanField, FloatField, EmailField,
EnumField, ReferenceField, ManyToManyField, SlugField)
from .project import Project
-from .client import FaunaClient
diff --git a/pfunk/cli.py b/pfunk/cli.py
index 1261fa0..830f3c7 100644
--- a/pfunk/cli.py
+++ b/pfunk/cli.py
@@ -1,20 +1,22 @@
-import click
import json
import os
import sys
-import datetime
-from jinja2 import TemplateNotFound
+import click
+from envs import env
from valley.utils import import_util
from werkzeug.serving import run_simple
-from pfunk.client import FaunaClient, q
-from pfunk.contrib.auth.collections import Group, PermissionGroup
+from pfunk.client import FaunaClient, q
+from pfunk.contrib.auth.key import PermissionGroup
from pfunk.exceptions import DocNotFound
from pfunk.template import wsgi_template, project_template, collections_templates, key_template
from pfunk.utils.deploy import Deploy
+Group = import_util(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group'))
+
+
@click.group()
def pfunk():
pass
@@ -25,6 +27,7 @@ def load_config_file(filename):
config = json.load(f)
return config
+
@pfunk.command()
@click.option('--generate_local_key', prompt=True, help='Specifies whether to generate a local database and key',
default=False)
@@ -32,18 +35,23 @@ def load_config_file(filename):
@click.option('--email', prompt=True, help='Default From Email')
@click.option('--bucket', prompt=True, help='S3 Bucket')
@click.option('--fauna_key', prompt=True, help='Fauna Key')
+@click.option('--host', prompt=True, help='Host')
+@click.option('--description', prompt=True, help='Project Description')
@click.option('--api_type', type=click.Choice(['web', 'rest', 'none']), prompt=True, help='API Type (web, rest, none)')
@click.argument('name')
-def init(name: str, api_type: str, fauna_key: str, bucket: str, email: str, stage_name: str, generate_local_key: bool):
+def init(name: str, api_type: str, fauna_key: str, bucket: str, email: str, stage_name: str, description: str, host: str, generate_local_key: bool):
"""
Creates a PFunk project
Args:
name: Project name
api_type: API Gateway type (web, rest, none)
+ description: Project Description
+ host: Host
fauna_key: Fauna secret key
bucket: S3 Bucket
email: Default from Email
stage_name: Application stage
+ generate_local_key: Specifies whether to generate a local database and key
Returns:
@@ -55,6 +63,8 @@ def init(name: str, api_type: str, fauna_key: str, bucket: str, email: str, stag
json.dump({
'name': name,
'api_type': api_type,
+ 'description': description,
+ 'host': host,
'stages': {stage_name: {
'key_module': f'{name}.{stage_name}_keys.KEYS',
'fauna_secret': fauna_key,
@@ -64,21 +74,25 @@ def init(name: str, api_type: str, fauna_key: str, bucket: str, email: str, stag
}, f, indent=4, sort_keys=True)
open(f'{name}/__init__.py', 'x').close()
with open(f'{name}/wsgi.py', 'x') as f:
- f.write(wsgi_template.render(PFUNK_PROJECT=f'{name}.project.project'))
+ f.write(wsgi_template.render(
+ PFUNK_PROJECT=f'{name}.project.project'))
with open(f'{name}/project.py', 'x') as f:
f.write(project_template.render())
with open(f'{name}/collections.py', 'x') as f:
f.write(collections_templates.render())
if generate_local_key:
- client = FaunaClient(secret='secret')
+ domain = click.prompt('Please enter your local Fauna Docker hostname.', default='fauna')
+ client = FaunaClient(secret='secret', scheme='http')
db_name = f'{name}-local'
client.query(
q.create_database({'name': db_name})
)
key = client.query(
- q.create_key({'database': q.database(db_name), 'role': 'admin'})
+ q.create_key(
+ {'database': q.database(db_name), 'role': 'admin'})
)
- click.secho(f'Fauna Local Secret (copy into your .env or pipenv file): {key}', fg='green')
+ click.secho(
+ f'Fauna Local Secret (copy into your .env or pipenv file): {key}', fg='green')
else:
click.echo('There is already a project file in this directory.')
@@ -108,6 +122,7 @@ def add_stage(stage_name: str, fauna_key: str, filename: str):
else:
click.echo('You have not run the init command yet.')
+
@pfunk.command()
@click.option('--use_reloader', default=True)
@click.option('--use_debugger', default=True)
@@ -133,7 +148,8 @@ def local(hostname: str, port: int, wsgi: str, config_file: str, use_debugger: b
sys.path.insert(0, os.getcwd())
wsgi_path = wsgi or f'{config.get("name")}.wsgi.app'
app = import_util(wsgi_path)
- run_simple(hostname, port, app, use_debugger=use_debugger, use_reloader=use_reloader)
+ run_simple(hostname, port, app, use_debugger=use_debugger,
+ use_reloader=use_reloader)
@pfunk.command()
@@ -158,7 +174,6 @@ def publish(stage_name: str, project_path: str, config_path: str, publish_locall
project_path = f'{config.get("name")}.project.project'
project = import_util(project_path)
if not publish_locally:
-
secret = config['stages'][stage_name]['fauna_secret']
os.environ['FAUNA_SECRET'] = secret
project.publish()
@@ -178,7 +193,7 @@ def seed_keys(stage_name: str, config_path: str):
"""
config = load_config_file(config_path)
- Key = import_util('pfunk.contrib.auth.collections.Key')
+ Key = import_util('pfunk.contrib.auth.key.Key')
keys = Key.create_keys()
name = config.get('name')
keys_path = f'{name}/{stage_name}_keys.py'
@@ -186,6 +201,7 @@ def seed_keys(stage_name: str, config_path: str):
f.write(key_template.render(keys=keys))
return keys_path
+
@pfunk.command()
@click.option('--local_user', help='Specifies whether the user is local.', prompt=True, default=False)
@click.option('--config_path', help='Configuration file path', default='pfunk.json')
@@ -197,7 +213,8 @@ def seed_keys(stage_name: str, config_path: str):
@click.option('--last_name', prompt=True, help='Last Name')
@click.option('--group_slug', prompt=True, help='User Group Slug', default=None)
@click.argument('stage_name')
-def create_admin_user(stage_name: str, group_slug: str, last_name: str, first_name: str, email: str, password: str, username: str,
+def create_admin_user(stage_name: str, group_slug: str, last_name: str, first_name: str, email: str, password: str,
+ username: str,
project_path: str, config_path: str, local_user: bool):
"""
Create an admin user in the project's Fauna user collection.
@@ -242,9 +259,11 @@ def create_admin_user(stage_name: str, group_slug: str, last_name: str, first_na
project = import_util(project_path)
perm_list = []
for i in project.collections:
- perm_list.append(PermissionGroup(collection=i, permissions=['create', 'write', 'read', 'delete']))
+ perm_list.append(PermissionGroup(collection=i, permissions=[
+ 'create', 'write', 'read', 'delete']))
user.add_permissions(group, perm_list)
+
@pfunk.command()
@click.option('--config_path', help='Configuration file path')
@click.argument('stage_name')
@@ -266,6 +285,28 @@ def deploy(stage_name: str, config_path: str):
return
d.deploy(stage_name)
+
+@pfunk.command()
+@click.option('--config_path', help='Configuration file path', default='pfunk.json')
+@click.option('--yaml_path', help='Dir to create yaml swagger file to', default='')
+def generate_swagger(config_path: str, yaml_path: str):
+ """ Generates the swagger file of the project from a config json file
+
+ Args:
+ config_path (str, optional):
+ dir of the json config file to use
+ yaml_path (str, optional):
+ dir to put the generated swagger file
+
+ Returns:
+
+ """
+ config = load_config_file(config_path)
+ sys.path.insert(0, os.getcwd())
+ project_path = f'{config.get("name")}.project.project'
+ project = import_util(project_path)
+ project.generate_swagger(yaml_dir=yaml_path, config_file=config_path)
+
+
if __name__ == '__main__':
pfunk()
-
diff --git a/pfunk/collection.py b/pfunk/collection.py
index 204aad2..ea20f21 100644
--- a/pfunk/collection.py
+++ b/pfunk/collection.py
@@ -1,5 +1,5 @@
from envs import env
-from faunadb.errors import BadRequest
+from faunadb.errors import BadRequest, Unauthorized, PermissionDenied
from valley.contrib import Schema
from valley.declarative import DeclaredVars, DeclarativeVariablesMetaclass
from valley.properties import BaseProperty, CharProperty, ListProperty
@@ -10,12 +10,14 @@
from pfunk.web.views.json import DetailView, CreateView, UpdateView, DeleteView, ListView
from .client import q
from .contrib.generic import GenericCreate, GenericDelete, GenericUpdate, AllFunction
-from .exceptions import DocNotFound
+from .exceptions import DocNotFound, NotUniqueError
from .queryset import Queryset
from .resources import Index
__all__ = ['Enum', 'Collection']
+from .web.views.html import HTMLCreateView, HTMLUpdateView, HTMLListView, HTMLDeleteView, HTMLDetailView
+
class PFunkDeclaredVars(DeclaredVars):
base_field_class = BaseProperty
@@ -32,6 +34,8 @@ class Enum(Schema):
def __unicode__(self):
return self.name # pragma: no cover
+ def __str__(self):
+ return self.name # pragma: no cover
class Collection(BaseSchema, metaclass=PFunkDeclarativeVariablesMetaclass):
"""
@@ -54,7 +58,11 @@ class Collection(BaseSchema, metaclass=PFunkDeclarativeVariablesMetaclass):
use_crud_views: bool = True
"""Specifies whether to use the CRUD views."""
crud_views: list = [CreateView, UpdateView, ListView, DeleteView, DetailView]
- """Specifies the base events used if the `use_base_events` variable is `True`"""
+ """Specifies the crud views used if the `use_crud_views` variable is `True`"""
+ use_crud_html_views = False
+ """Specifies whether to use the CRUD HTML views."""
+ crud_html_views = [HTMLCreateView, HTMLUpdateView, HTMLListView, HTMLDeleteView, HTMLDetailView]
+ """Specifies the crud html views used if the `use_crud_html_views` variable is `True`"""
require_auth: bool = True
"""Determines wheter to require authentication and authorization"""
non_public_fields: list = []
@@ -70,6 +78,19 @@ class Collection(BaseSchema, metaclass=PFunkDeclarativeVariablesMetaclass):
'collection_name']
"""List of class variables that are not allowed a field names. """
+ """ Optional in-line definition user and group class """
+ user_collection = None
+ group_collection = None
+ user_collection_dir = None
+ group_collection_dir = None
+
+ def __str__(self):
+ try:
+ return self.__unicode__() # pragma: no cover
+ except (AttributeError, TypeError):
+ return f"{self.__class__.__name__} object" # pragma: no cover
+
+
def __init__(self, _ref: object = None, _lazied: bool = False, **kwargs) -> None:
"""
Args:
@@ -92,6 +113,8 @@ def __init__(self, _ref: object = None, _lazied: bool = False, **kwargs) -> None
self.collection_functions = set(self.collection_functions)
if self.use_crud_views:
self.collection_views.extend(self.crud_views)
+ if self.use_crud_html_views:
+ self.collection_views.extend(self.crud_html_views)
self.collection_views = set(self.collection_views)
if self.use_crud_functions:
@@ -107,6 +130,35 @@ def get_fields(self) -> dict:
"""
return {k: q.select(k, q.var("input")) for k, v in self._base_properties.items() if
k not in self.non_public_fields}
+
+ def get_user_field(self) -> str:
+ """ Acquires the field where the relationship with a user was defined.
+
+ It is required to define the `USER_COLLECTION` in env var if a custom
+ user will be used. This is to ensure the permissions to work properly
+ """
+ fields = self._base_properties.items()
+ user_class = self.user_collection or env('USER_COLLECTION', 'User')
+ user_field = None
+ user_fields = [k for k, v in fields if user_class in v.get_graphql_type()]
+ if user_fields:
+ user_field = user_fields[0]
+ return user_field
+
+ def get_group_field(self) -> str:
+ """ Acquires the field where the relationship with a group was defined.
+
+ It is required to define the `GROUP_COLLECTION` in env var if a custom
+ user will be used. This is to ensure the permissions to work properly
+
+ """
+ fields = self._base_properties.items()
+ group_class = self.group_collection or env('GROUP_COLLECTION', 'Group')
+ group_field = None
+ group_fields = [k for k, v in fields if group_class in v.get_graphql_type()]
+ if group_fields:
+ group_field = group_fields[0]
+ return group_field
def get_collection_name(self) -> str:
"""
@@ -196,7 +248,6 @@ def client(self, _token=None) -> FaunaClient:
Returns: FaunaClient
"""
-
if _token:
return FaunaClient(secret=_token)
return FaunaClient(secret=env('FAUNA_SECRET'))
@@ -376,7 +427,7 @@ def _save_related(self, relational_data, _token=None) -> None:
}
)
)
- except BadRequest:
+ except (BadRequest) as err:
pass
def call_signals(self, name):
@@ -412,11 +463,16 @@ def save(self, _credentials=None, _token=None) -> None:
if not self.ref:
self.call_signals('pre_create_signals')
data_dict, relational_data = self.get_data_dict(_credentials=_credentials)
- resp = self.client(_token=_token).query(
- q.create(
- q.collection(self.get_collection_name()),
- data_dict
- ))
+ try:
+ resp = self.client(_token=_token).query(
+ q.create(
+ q.collection(self.get_collection_name()),
+ data_dict
+ ))
+ except BadRequest as e:
+ if 'instance not unique' in [i.code for i in e.errors]:
+ raise NotUniqueError(f"{self.get_collection_name()} document is not unique.")
+ print(e.errors)
self.ref = resp['ref']
self.call_signals('post_create_signals')
else:
@@ -569,8 +625,12 @@ def delete_from_id(cls, id: str, _token=None) -> None:
# JSON #
########
- def to_dict(self):
+ def to_dict(self, flat=False):
field_data = self._data.copy()
+ if flat:
+ for k, v in field_data.items():
+ if isinstance(v, Collection):
+ field_data[k] = v.ref.id()
ref = {'id': self.ref.id(), 'collection': self.ref.collection().id()}
obj = {
'ref': ref,
diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py
index 5a1bcb5..5e39ed8 100644
--- a/pfunk/contrib/auth/collections.py
+++ b/pfunk/contrib/auth/collections.py
@@ -1,104 +1,105 @@
-import datetime
-import json
-import random
+from cmath import log
import uuid
+import os
-import jwt
-from cryptography.fernet import Fernet
-from dateutil import tz
from envs import env
-from faunadb.errors import BadRequest, NotFound
-from jwt import ExpiredSignatureError
+from faunadb.errors import BadRequest
from valley.exceptions import ValidationException
from valley.utils import import_util
-from werkzeug.utils import cached_property
+from pfunk import ReferenceField
from pfunk.client import q
from pfunk.collection import Collection, Enum
+from pfunk.resources import Index
+from pfunk.contrib.auth.key import Key
from pfunk.contrib.auth.resources import LoginUser, UpdatePassword, Public, UserRole, LogoutUser
-from pfunk.contrib.auth.views import ForgotPasswordChangeView, LoginView, SignUpView, VerifyEmailView, LogoutView, UpdatePasswordView, ForgotPasswordView
+from pfunk.contrib.auth.views import ForgotPasswordChangeView, LoginView, SignUpView, VerifyEmailView, LogoutView, \
+ UpdatePasswordView, ForgotPasswordView
from pfunk.contrib.email.base import send_email
-from pfunk.exceptions import LoginFailed, DocNotFound, Unauthorized
-from pfunk.fields import EmailField, SlugField, ManyToManyField, ListField, ReferenceField, StringField, EnumField
+from pfunk.exceptions import LoginFailed, DocNotFound
+from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField
+from pfunk.fields import ListField
+from pfunk.fields import SlugField
-AccountStatus = Enum(name='AccountStatus', choices=['ACTIVE', 'INACTIVE'])
+class BaseGroup(Collection):
+ """ Group collection that the user belongs to """
+ name = StringField(required=True)
+ slug = SlugField(unique=True, required=False)
+ def __unicode__(self):
+ """Return the name of the group
+ Returns:
+ str: Name of the group
+ """
+ return self.name # pragma: no cover
-class Key(object):
+class UserGroupByUserAndGroupIndex(Index):
+ """Lookup index for UserGroup M2M collection"""
+ name = 'usergroups_by_userID_and_groupID'
+ source = 'Usergroups'
+ terms = [
+ {'field': ['data', 'userID']},
+ {'field': ['data', 'groupID']}
+ ]
+ values = [
+ {'field': ['ref']}
+ ]
- @classmethod
- def create_keys(cls):
- c = cls()
- keys = {}
- for i in range(10):
- kid = str(uuid.uuid4())
- k = {'signature_key': Fernet.generate_key().decode(), 'payload_key': Fernet.generate_key().decode(),
- 'kid': kid}
- keys[kid] = k
- return keys
-
- @classmethod
- def import_keys(cls):
- try:
- keys = import_util(env('KEY_MODULE', 'bad.import'))
- except ImportError:
- keys = {}
- return keys
- @classmethod
- def get_keys(cls):
- keys = cls.import_keys()
- return list(keys.values())
+class BaseUserGroup(Collection):
+ """ Base UserGroup Collection to subclass from when using custom User and Group """
+ collection_indexes = [UserGroupByUserAndGroupIndex]
+ permissions = ListField()
- @classmethod
- def get_key(cls):
+ def __unicode__(self):
+ """Return the userID, groupID, and permissions
- return random.choice(cls.get_keys())
+ Returns:
+ str: userID, groupID, and permissions
+ """
+ return f"{self.userID}, {self.groupID}, {self.permissions}"
- @classmethod
- def create_jwt(cls, secret_claims):
-
- key = cls.get_key()
- pay_f = Fernet(key.get('payload_key'))
- gmt = tz.gettz('GMT')
- now = datetime.datetime.now(tz=gmt)
- exp = now + datetime.timedelta(days=1)
- payload = {
- 'iat': now.timestamp(),
- 'exp': exp.timestamp(),
- 'nbf': now.timestamp(),
- 'iss': env('PROJECT_NAME', 'pfunk'),
- 'til': pay_f.encrypt(json.dumps(secret_claims).encode()).decode()
- }
- return jwt.encode(payload, key.get('signature_key'), algorithm="HS256", headers={'kid': key.get('kid')}), exp
- @classmethod
- def decrypt_jwt(cls, encoded):
- headers = jwt.get_unverified_header(encoded)
- keys = cls.import_keys()
- key = keys.get(headers.get('kid'))
- try:
- decoded = jwt.decode(encoded, key.get('signature_key'), algorithms="HS256", verify=True,
- options={"require": ["iat", "exp", "nbf", 'iss', 'til']})
- except ExpiredSignatureError:
- raise Unauthorized('Unauthorized')
- pay_f = Fernet(key.get('payload_key').encode())
- k = pay_f.decrypt(decoded.get('til').encode())
- return json.loads(k.decode())
+class UserGroups(BaseUserGroup):
+ """ Many-to-many collection of the user-group relationship
+ The native fauna-way of holding many-to-many relationship
+ is to only have the ID of the 2 object. Here in pfunk, we
+ leverage the flexibility of the collection to have another
+ field, which is `permissions`, this field holds the capabilities
+ of a user, allowing us to add easier permission handling.
+ Instead of manually going to roles and adding individual
+ collections which can be painful in long term.
-class Group(Collection):
- """ Group collection that the user belongs to """
- name = StringField(required=True)
- slug = SlugField(unique=True, required=False)
- users = ManyToManyField(
- 'pfunk.contrib.auth.collections.User', relation_name='users_groups')
+ Attributes:
+ collection_name (str):
+ Name of the collection in Fauna
+ userID (str):
+ Fauna ref of user that is tied to the group
+ groupID (str):
+ Fauna ref of a collection that is tied with the user
+ permissions (str[]):
+ List of permissions, `['create', 'read', 'delete', 'write']`
+ """
+ userID = ReferenceField(
+ env('USER_COLLECTION_DIR', 'pfunk.contrib.auth.collections.User'))
+ groupID = ReferenceField(
+ env('GROUP_COLLECTION_DIR', 'pfunk.contrib.auth.collections.Group'))
+ permissions = ListField()
def __unicode__(self):
- return self.name # pragma: no cover
+ """Return the userID, groupID, and permissions
+
+ Returns:
+ str: userID, groupID, and permissions
+ """
+ return f"{self.userID}, {self.groupID}, {self.permissions}"
+
+
+AccountStatus = Enum(name='AccountStatus', choices=['ACTIVE', 'INACTIVE'])
def attach_verification_key(doc):
@@ -123,7 +124,8 @@ class BaseUser(Collection):
non_public_fields = ['groups']
use_email_verification = True
# Views
- collection_views = [LoginView, SignUpView, VerifyEmailView, LogoutView, UpdatePasswordView, ForgotPasswordView, ForgotPasswordChangeView]
+ collection_views = [LoginView, SignUpView, VerifyEmailView, LogoutView, UpdatePasswordView, ForgotPasswordView,
+ ForgotPasswordChangeView]
# Signals
pre_create_signals = [attach_verification_key]
post_create_signals = [send_verification_email]
@@ -134,41 +136,66 @@ class BaseUser(Collection):
email = EmailField(required=True, unique=True)
verification_key = StringField(required=False, unique=True)
forgot_password_key = StringField(required=False, unique=True)
- account_status = EnumField(AccountStatus, required=True, default_value="INACTIVE")
+ account_status = EnumField(
+ AccountStatus, required=True, default_value="INACTIVE")
def __unicode__(self):
+ """Returns the username of the user"""
return self.username # pragma: no cover
@classmethod
def login(cls, username, password, _token=None):
""" Logs the user in to Fauna
+ Args:
+ username (str, required): Username of the user
+ password (str, required): Password of the user
+ _token (str, optional): Token of the user
+
Returns:
- token: the token from fauna
+ token (str, required): the token from fauna
"""
c = cls()
try:
return c.client(_token=_token).query(
q.call("login_user", {
- "username": username, "password": password})
+ "username": username, "password": password})
)
- except BadRequest:
+ except Exception as err:
raise LoginFailed(
'The login credentials you entered are incorrect.')
@classmethod
def logout(cls, _token=None):
- """ Expires/invalidates the user's login token """
+ """ Expires/invalidates the user's login token
+
+ Args:
+ _token (str, optional): Token of the user
+
+ Returns:
+ None
+ """
c = cls()
return c.client(_token=_token).query(
q.call("logout_user")
)
def permissions(self, _token=None):
+ """Returns an empty array"""
return []
@classmethod
def api_login(cls, username, password, _token=None):
+ """ Logs the user in to Fauna and creates a JWT
+
+ Args:
+ username (str, required): Username of the user
+ password (str, required): Password of the user
+ _token (str, optional): Token of the user
+
+ Returns:
+ token (str, required): the token from fauna
+ """
token = cls.login(username=username, password=password, _token=_token)
user = cls.get_current_user(_token=token)
claims = user.to_dict().copy()
@@ -183,7 +210,14 @@ def api_login(cls, username, password, _token=None):
@classmethod
def get_from_id(cls, _token=None):
- """ Acquire user from the given Id """
+ """ Acquire user from the given Id
+
+ Args:
+ _token (str, optional): Token of the user
+
+ Returns:
+ user (BaseUser, required): The user object
+ """
c = cls()
ref = c.client(_token=_token).query(
q.current_identity()
@@ -193,10 +227,18 @@ def get_from_id(cls, _token=None):
def attach_verification_key(self):
""" Attaches the verification key to user
to enable one-time activate
+
+ Returns:
+ None
"""
self.verification_key = str(uuid.uuid4())
def attach_forgot_verification_key(self):
+ """ Attaches forgot password key to user
+
+ Returns:
+ None
+ """
self.forgot_password_key = str(uuid.uuid4())
self.save()
@@ -204,32 +246,54 @@ def attach_forgot_verification_key(self):
def verify_email(cls, verification_key, verify_type='signup', password=None):
""" Activate the user from the verification key
- Args:
- verification_key (str, required):
- verification key in the email to compare the one
- attached to the user
- """
+ Args:
+ verification_key (str, required):
+ verification key in the email to compare the one
+ attached to the user
+ verify_type (str, optional):
+ Type of verification being performed. Default: 'signup'
+ password (str, optional):
+ Password of the user. Required if verify_type is 'forgot'
+
+ Returns:
+ None
+ """
if verify_type == 'signup':
- user = cls.get_by('unique_User_verification_key', [verification_key])
+ user = cls.get_by('unique_User_verification_key',
+ [verification_key])
user.verification_key = ''
user.account_status = 'ACTIVE'
user.save()
elif verify_type == 'forgot' and password:
- user = cls.get_by('unique_User_forgot_password_key', [verification_key])
+ user = cls.get_by('unique_User_forgot_password_key', [
+ verification_key])
user.forgot_password_key = ''
user.save(_credentials=password)
def send_verification_email(self, from_email=None, verification_type='signup'):
- """ Send the verification email with the hashed key """
+ """ Send the verification email with the hashed key
+
+ Args:
+ from_email (str, optional):
+ From email address of the verification email.
+ Default: env('DEFAULT_FROM_EMAIL')
+ verification_type (str, optional):
+ Type of verification being performed. Default: 'signup'
+
+ Returns:
+ None
+ """
project_name = env('PROJECT_NAME', '')
if verification_type == 'signup':
txt_template = 'auth/verification_email.txt'
html_template = 'auth/verification_email.html'
verification_key = self.verification_key
+ verification_link = f'https://{env("PROJECT_DOMAIN")}/{self.get_collection_name().lower()}/verify/{verification_key}'
elif verification_type == 'forgot':
txt_template = 'auth/forgot_email.txt'
html_template = 'auth/forgot_email.html'
verification_key = self.forgot_password_key
+ verification_link = f'https://{env("PROJECT_DOMAIN")}/{self.get_collection_name().lower()}/forgot-password/{verification_key}'
try:
send_email(
txt_template=txt_template,
@@ -239,7 +303,10 @@ def send_verification_email(self, from_email=None, verification_type='signup'):
subject=f'{project_name} Email Verification',
first_name=self.first_name,
last_name=self.last_name,
- verification_key=verification_key
+ verification_key=verification_key,
+ verification_type=verification_type,
+ verification_link=verification_link,
+ collection=self.get_collection_name().lower(),
)
except Exception as e:
import logging
@@ -247,11 +314,14 @@ def send_verification_email(self, from_email=None, verification_type='signup'):
@classmethod
def forgot_password(cls, email):
- """ Sends forgot password email to let user
+ """ Sends forgot password email to let user
use that link to reset their password
"""
+ # get the user object
user = cls.get_by('unique_User_email', email)
+ # attach the forgot verification key
user.attach_forgot_verification_key()
+ # send the verification email
user.send_verification_email(verification_type='forgot')
@classmethod
@@ -264,12 +334,15 @@ def signup(cls, _token=None, **kwargs):
**kwargs (dict, required):
The user's needed information for creation
"""
+ # create a data dict with the user's needed information
data = kwargs
data['account_status'] = 'INACTIVE'
+ # pop the group key if it exists
try:
data.pop('groups')
except KeyError:
pass
+ # create the user
cls.create(**data, _token=_token)
@classmethod
@@ -296,15 +369,21 @@ def update_password(cls, current_password, new_password, new_password_confirm, _
If current_password is wrong, will return
`Wrong current password.`
"""
+ # raise an exception if new password and new password confirm don't match
if new_password != new_password_confirm:
- raise ValidationException('new_password: Password field and password confirm field do not match.')
+ raise ValidationException(
+ 'new_password: Password field and password confirm field do not match.')
+ # create a collection instance
c = cls()
+ # update the password using the user's current password and the new password
try:
return c.client(_token=_token).query(
- q.call("update_password", {'current_password': current_password, 'new_password': new_password})
+ q.call("update_password", {
+ 'current_password': current_password, 'new_password': new_password})
)
except BadRequest:
- raise ValidationException('current_password: Password update failed.')
+ raise ValidationException(
+ 'current_password: Password update failed.')
@classmethod
def get_current_user(cls, _token=None):
@@ -316,81 +395,50 @@ def get_current_user(cls, _token=None):
id (str):
Fauna ID of the user in `User` collection
"""
+ # create a collection instance
c = cls()
+ # get the current identity
return cls.get(c.client(_token=_token).query(q.current_identity()).id())
- def __unicode__(self):
- return self.username # pragma: no cover
-
-
-class UserGroups(Collection):
- """ Many-to-many collection of the user-group relationship
-
- The native fauna-way of holding many-to-many relationship
- is to only have the ID of the 2 object. Here in pfunk, we
- leverage the flexibility of the collection to have another
- field, which is `permissions`, this field holds the capablities
- of a user, allowing us to add easier permission handling.
- Instead of manually going to roles and adding individual
- collections which can be painful in long term.
-
- Attributes:
- collection_name (str):
- Name of the collection in Fauna
- userID (str):
- Fauna ref of user that is tied to the group
- groupID (str):
- Fauna ref of a collection that is tied with the user
- permissions (str[]):
- List of permissions, `['create', 'read', 'delete', 'write']`
- """
- collection_name = 'users_groups'
- userID = ReferenceField(env('USER_COLLECTION', 'pfunk.contrib.auth.collections.User'))
- groupID = ReferenceField(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group'))
- permissions = ListField()
-
- def __unicode__(self):
- return f"{self.userID}, {self.groupID}, {self.permissions}"
-
-class PermissionGroup(object):
- """ List of permission that a user/object has
-
- Attributes:
- collection (`pfunk.collection.Collection`, required):
- Collection to allow permissions
- permission (list, required):
- What operations should be allowed `['create', 'read', 'delete', 'write']`
+class ExtendedUser(BaseUser):
+ """ User that has permission capabilities. Extension of `BaseUser`.
+ Subclass and define the properties needed.
"""
- valid_actions: list = ['create', 'read', 'delete', 'write']
-
- def __init__(self, collection: Collection, permissions: list):
- if not issubclass(collection, Collection):
- raise ValueError(
- 'Permission class requires a Collection class as the first argument.')
- self.collection = collection
- self._permissions = permissions
- self.collection_name = self.collection.get_class_name()
-
- @cached_property
- def permissions(self):
- """ Lists all collections and its given permissions """
- return [f'{self.collection_name}-{i}'.lower() for i in self._permissions if i in self.valid_actions]
-
-
-class User(BaseUser):
- """ User that has permission capabilities. Extension of `BaseUser` """
- groups = ManyToManyField(Group, 'users_groups')
+ user_group_class = None
+ group_class = None
@classmethod
def get_permissions(cls, ref, _token=None):
+ """Returns the permissions of the user
+
+ Args:
+ ref (str): The user ID
+ _token (str): Fauna auth token
+
+ Returns:
+ str[]: Permissions of the user in list:
+ `['create', 'read', 'delete', 'write']`
+ """
return cls.get(ref, _token).permissions(_token=_token)
def get_groups(self, _token=None):
""" Returns the groups (collections) that the user is bound with """
- return [Group.get(i.id(), _token=_token) for i in self.client(_token=_token).query(
- q.paginate(q.match('users_groups_by_user', self.ref))
- ).get('data')]
+ if not self.group_class or not self.user_group_class:
+ raise NotImplementedError
+ group_class_field = self.get_group_field()
+ user_class = self.__class__.__name__.lower()
+ group_class = self.group_class.__name__.lower()
+ relation_name = self._base_properties.get(group_class_field).relation_name
+ index_name = f'{user_class}s_{group_class}s_by_{user_class}'
+ if relation_name:
+ index_name = f'{relation_name}_by_{user_class}'
+ # query Fauna to get the data
+ data = self.client(_token=_token).query(
+ q.paginate(q.match(index_name, self.ref))
+ ).get('data')
+ # create a list of group instances from the data
+ return [self.group_class.get(i.id(), _token=_token) for i in data]
def permissions(self, _token=None):
""" Returns the permissions of the user
@@ -406,48 +454,84 @@ def permissions(self, _token=None):
perm_list (str[]):
Permissions of the user in list: `['create', 'read', 'delete', 'write']`
"""
+ index_name = 'usergroups_by_userID_and_groupID'
perm_list = []
+ # loop over the groups of the user
for i in self.get_groups(_token=_token):
- ug = UserGroups.get_index('users_groups_by_group_and_user', [
- i.ref, self.ref], _token=_token)
+ # query Fauna to get the UserGroup instance of the user
+ ug = self.user_group_class.get_index(index_name, [
+ self.ref, i.ref], _token=_token)
+ # loop over the UserGroup instances
for user_group in ug:
p = []
+ # check if there are any permissions in the instance
if isinstance(user_group.permissions, list):
+ # loop over the permissions
p = [
f'{user_group.groupID.slug}-{i}' for i in user_group.permissions]
+ # add the permissions to the list
perm_list.extend(p)
+ # return a list of the user's permissions
return perm_list
def add_permissions(self, group, permissions: list, _token=None):
- """ Adds permission for the user
-
- Adds permission by extending the list of permission
- in the many-to-many collection of the user, i.e. in
+ """ Adds permission for the user
+
+ Adds permission by extending the list of permission
+ in the many-to-many collection of the user, i.e. in
the `UserGroup` collection.
Args:
- group (str, required):
+ group (str, required):
Group collection of the User
permissions (list, required):
Permissions to give, `['create', 'read', 'delete', 'write']`
Just add the operation you need
_token (str, required):
auth token of the user
-
+
Returns:
UserGroup (`contrib.auth.collections.UserGroup`):
- `UserGroup` instance which has the added permissions
+ `UserGroup` instance which has the added permissions
of the user
"""
perm_list = []
+ index_name = 'usergroups_by_userID_and_groupID'
+
+ # loop over the permissions and add to the list
for i in permissions:
perm_list.extend(i.permissions)
+ # raise an error if the user_group_class is not defined
+ if not self.user_group_class:
+ raise NotImplementedError
+ # try to get the UserGroup instance
try:
- user_group = UserGroups.get_by('users_groups_by_group_and_user', terms=[group.ref, self.ref])
+ user_group = self.user_group_class.get_by(
+ index_name, terms=[self.ref, group.ref])
+ # create a new instance if not found
except DocNotFound:
- user_group = UserGroups.create(userID=self.ref, groupID=group.ref, permissions=perm_list)
+ user_group = self.user_group_class.create(
+ userID=self.ref, groupID=group.ref, permissions=perm_list, _token=_token)
+ # update the permissions if they're not the same
if user_group.permissions != perm_list:
user_group.permissions = perm_list
+ # save the changes
user_group.save()
+
+ # return the UserGroup instance
return user_group
+
+
+class Group(BaseGroup):
+ """ A default group that already has predefined M2M relationship with `pfunk.contrib.auth.collections.User` """
+ users = ManyToManyField(
+ 'pfunk.contrib.auth.collections.User', 'users_groups')
+
+
+class User(ExtendedUser):
+ """ A default user that already has predefined M2M relationship with `pfunk.contrib.auth.collections.Group` """
+ user_group_class = import_util('pfunk.contrib.auth.collections.UserGroups')
+ group_class = import_util('pfunk.contrib.auth.collections.Group')
+ groups = ManyToManyField(
+ 'pfunk.contrib.auth.collections.Group', 'users_groups')
diff --git a/pfunk/contrib/auth/key.py b/pfunk/contrib/auth/key.py
new file mode 100644
index 0000000..bea8951
--- /dev/null
+++ b/pfunk/contrib/auth/key.py
@@ -0,0 +1,103 @@
+import datetime
+import json
+import random
+import uuid
+
+import jwt
+from cryptography.fernet import Fernet
+from dateutil import tz
+from envs import env
+from jwt import ExpiredSignatureError
+from valley.utils import import_util
+from werkzeug.utils import cached_property
+
+from pfunk import Collection
+from pfunk.exceptions import Unauthorized
+
+
+class Key(object):
+
+ @classmethod
+ def create_keys(cls):
+ c = cls()
+ keys = {}
+ for i in range(10):
+ kid = str(uuid.uuid4())
+ k = {'signature_key': Fernet.generate_key().decode(), 'payload_key': Fernet.generate_key().decode(),
+ 'kid': kid}
+ keys[kid] = k
+ return keys
+
+ @classmethod
+ def import_keys(cls):
+ try:
+ keys = import_util(env('KEY_MODULE', 'bad.import'))
+ except ImportError:
+ keys = {}
+ return keys
+
+ @classmethod
+ def get_keys(cls):
+ keys = cls.import_keys()
+ return list(keys.values())
+
+ @classmethod
+ def get_key(cls):
+ keys = cls.get_keys()
+ return random.choice(keys)
+
+ @classmethod
+ def create_jwt(cls, secret_claims):
+
+ key = cls.get_key()
+ pay_f = Fernet(key.get('payload_key'))
+ gmt = tz.gettz('GMT')
+ now = datetime.datetime.now(tz=gmt)
+ exp = now + datetime.timedelta(days=1)
+ payload = {
+ 'iat': int(now.timestamp()),
+ 'exp': exp.timestamp(),
+ 'nbf': now.timestamp(),
+ 'iss': env('PROJECT_NAME', 'pfunk'),
+ 'til': pay_f.encrypt(json.dumps(secret_claims).encode()).decode()
+ }
+ return jwt.encode(payload, key.get('signature_key'), algorithm="HS256", headers={'kid': key.get('kid')}), exp
+
+ @classmethod
+ def decrypt_jwt(cls, encoded):
+ headers = jwt.get_unverified_header(encoded)
+ keys = cls.import_keys()
+ key = keys.get(headers.get('kid'))
+ try:
+ decoded = jwt.decode(encoded, key.get('signature_key'), algorithms=["HS256"], verify=True,
+ options={"require": ["iat", "exp", "nbf", 'iss', 'til']})
+ except ExpiredSignatureError:
+ raise Unauthorized('Unauthorized')
+ pay_f = Fernet(key.get('payload_key').encode())
+ k = pay_f.decrypt(decoded.get('til').encode())
+ return json.loads(k.decode())
+
+
+class PermissionGroup(object):
+ """ List of permission that a user/object has
+
+ Attributes:
+ collection (`pfunk.collection.Collection`, required):
+ Collection to allow permissions
+ permission (list, required):
+ What operations should be allowed `['create', 'read', 'delete', 'write']`
+ """
+ valid_actions: list = ['create', 'read', 'delete', 'write']
+
+ def __init__(self, collection: Collection, permissions: list):
+ if not issubclass(collection, Collection):
+ raise ValueError(
+ 'Permission class requires a Collection class as the first argument.')
+ self.collection = collection
+ self._permissions = permissions
+ self.collection_name = self.collection.get_class_name()
+
+ @cached_property
+ def permissions(self):
+ """ Lists all collections and its given permissions """
+ return [f'{self.collection_name}-{i}'.lower() for i in self._permissions if i in self.valid_actions]
diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py
index 40a560f..52ba448 100644
--- a/pfunk/contrib/auth/resources.py
+++ b/pfunk/contrib/auth/resources.py
@@ -1,5 +1,13 @@
+from valley.utils import import_util
+from tokenize import group
+from envs import env
+
from pfunk.client import q
-from pfunk.resources import Function, Role, Index
+from pfunk.resources import Function, Role
+
+# Global collections
+# USER_CLASS = env('USER_COLLECTION', 'User')
+# GROUP_CLASS = env('GROUP_COLLECTION', 'Group')
class AuthFunction(Function):
@@ -13,7 +21,7 @@ def get_body(self):
return q.query(
q.lambda_(["input"],
q.let({
- "user": q.match(q.index("unique_User_username"), q.select("username", q.var("input")))
+ "user": q.match(q.index(f"unique_{self.collection.__class__.__name__}_username"), q.select("username", q.var("input")))
},
q.if_(
q.equals(
@@ -34,10 +42,11 @@ def get_body(self):
}
)
),
- q.abort("Account is not active. Please check email for activation.")
- )
- )
+ q.abort(
+ "Account is not active. Please check email for activation.")
)
+ )
+ )
)
@@ -61,9 +70,9 @@ def get_body(self):
"credentials": {"password": q.select("new_password", q.var("input"))}
}),
q.abort("Wrong current password.")
- )
- )
- )
+ )
+ )
+ )
class CreateUser(AuthFunction):
@@ -71,7 +80,8 @@ def get_body(self):
data_dict = {
"data": self.collection.get_fields(),
"credentials": {
- self.collection._credential_field: q.select(self.collection._credential_field, q.var("input"))
+ self.collection._credential_field: q.select(
+ self.collection._credential_field, q.var("input"))
}
}
return q.query(
@@ -79,7 +89,8 @@ def get_body(self):
q.let(
{
'result': q.create(
- q.collection(self.collection.get_collection_name()),
+ q.collection(
+ self.collection.get_collection_name()),
data_dict),
'input': q.var('input')
},
@@ -90,20 +101,20 @@ def get_body(self):
q.lambda_(
'group',
q.create(
- q.collection(self.collection._base_properties.get('groups').relation_name),
+ q.collection(self.collection._base_properties.get(
+ 'groups').relation_name),
{'data': {
'userID': q.select('ref', q.var('result')),
'groupID': q.var('group')
}}
)
- )
- ,
+ ),
q.select('groups', q.var('input'))
),
q.abort('Groups not defined.')
)
- )
- ))
+ )
+ ))
class Public(Role):
@@ -116,11 +127,11 @@ def get_function_lambda(self):
q.lambda_(['data'],
q.equals(
q.select('account_status', q.select('data',
- q.match(q.index('unique_User_username',
+ q.match(q.index(f'unique_{self.collection.__class__.__name__}_username',
q.select('username', q.var('data')))))),
"ACTIVE"
- )
- ))
+ )
+ ))
def get_privileges(self):
return [
@@ -170,15 +181,34 @@ def get_lambda(self, resource_type):
)
+
class GenericAuthorizationRole(Role):
- user_table = 'User'
- current_user_field = 'user'
- name_suffix = 'user_based_crud_role'
+ """This class provides generic authorization roles for collections"""
- def get_name(self):
- return self.name or f"{self.collection.get_class_name()}_{self.name_suffix}"
+ def get_relation_index_name(self) -> str:
+ """
+ Returns the index name of the created permission index of group and user -> 'usergroups_by_userID_and_groupID'
+ """
+ return 'usergroups_by_userID_and_groupID'
- def get_privileges(self):
+ def get_user_table(self) -> str:
+ """Returns the user table name"""
+ return self.collection.user_collection or env('USER_COLLECTION', 'User')
+
+ def get_group_table(self) -> str:
+ """Returns the group table name"""
+ return self.collection.group_collection or env('GROUP_COLLECTION', 'Group')
+
+ def get_name_suffix(self) -> str:
+ """Returns the name suffix for this role"""
+ return f'{self.collection.get_user_field().lower()}_based_crud_role'
+
+ def get_name(self) -> str:
+ """Returns the name for this role"""
+ return self.name or f"{self.collection.get_class_name()}_{self.get_name_suffix()}"
+
+ def get_privileges(self) -> list:
+ """Returns the list of privileges for this role"""
priv_list = [
{
"resource": q.collection(self.collection.get_collection_name()),
@@ -190,7 +220,7 @@ def get_privileges(self):
}
},
{
- "resource": q.index(self.relation_index_name),
+ "resource": q.index(self.get_relation_index_name()),
"actions": {
"read": True
}
@@ -220,66 +250,123 @@ def get_privileges(self):
}
for i in self.collection.collection_functions
])
+
return priv_list
class GenericUserBasedRole(GenericAuthorizationRole):
- relation_index_name = 'users_groups_by_user'
+ """Class to provide a generic set of permissions based on the user-entity relationship.
+
+ Args:
+ GenericAuthorizationRole (class): Inherited class
+ """
+
+ def get_relation_index_name(self):
+ """Returns the user-group by user index name
+
+ Formatted as: {user_group_relation_name}_by_{user_class}
+
+ Returns:
+ str: User-group by user index name
+ """
+ # Acquires the `groups` field from the user collection
+ user_field = self.collection.get_user_field()
+ if user_field:
+ user_field = user_field.lower()
+ else:
+ return None
+ user_col = self.collection._base_properties.get(user_field)
+ user_col = user_col.get_foreign_class()
+ user_groups = user_col._base_properties.get("groups")
+
+ if user_groups:
+ relation_index_name = (user_groups.relation_name
+ + '_by_'
+ + self.get_user_table().lower())
+ return relation_index_name
+ return None
def get_lambda(self, resource_type):
+ """ Returns the lambda for the specified resource type
+
+ Args:
+ resource_type (str): Type of resource
+
+ Returns:
+ q.query: Lambda query
+ """
+ current_user_field = self.collection.get_user_field()
if resource_type == 'write':
lambda_args = ["old_object", "new_object", "object_ref"]
- user_ref = q.select(self.current_user_field,
- q.select('data', q.var('old_object')))
+ user_ref = q.select(
+ current_user_field, q.select('data', q.var('old_object')))
return q.query(
- q.lambda_(lambda_args,
- q.and_(
- q.equals(
- user_ref,
- q.current_identity()
- ),
- q.equals(
- q.select(self.current_user_field, q.select('data', q.var('new_object'))),
- q.current_identity()
- )
- )
-
- )
+ q.lambda_(
+ lambda_args,
+ q.and_(
+ q.equals(user_ref, q.current_identity()),
+ q.equals(
+ q.select(current_user_field, q.select(
+ 'data', q.var('new_object'))),
+ q.current_identity()
+ )
+ )
+ )
)
elif resource_type == 'create':
lambda_args = ["new_object"]
- user_ref = q.select(self.current_user_field,
- q.select('data', q.var('new_object')))
+ user_ref = q.select(
+ current_user_field, q.select('data', q.var('new_object')))
elif resource_type == 'read' or resource_type == 'delete':
lambda_args = ["object_ref"]
- user_ref = q.select(self.current_user_field,
- q.select('data', q.get(q.var('object_ref'))))
+ user_ref = q.select(
+ current_user_field, q.select('data', q.get(q.var('object_ref'))))
return q.query(
- q.lambda_(lambda_args,
- q.equals(
- user_ref,
- q.current_identity()
- )
- )
+ q.lambda_(lambda_args, q.equals(user_ref, q.current_identity()))
)
-
class GenericGroupBasedRole(GenericAuthorizationRole):
- relation_index_name = 'users_groups_by_group_and_user'
- through_user_field = 'userID'
- current_group_field = 'group'
+ """Class for giving permissions to Group-based entities
+ """
+ # Initialize the `permissions_field` variable
permissions_field = 'permissions'
- user_table = 'User'
- name_suffix = 'group_based_crud_role'
+ def get_name_suffix(self):
+ """Get the name suffix for the group-based role
+
+ Returns:
+ str: The name suffix for the group-based role
+ """
+ return f'{self.get_group_table().lower()}_based_crud_role'
+
def get_lambda(self, resource_type):
+ """Returns the lambda function for giving the permission to Group-based entities
+
+ Args:
+ resource_type (str): The type of operation (create, read, write, and delete)
+
+ Returns:
+ Lambda: The lambda function for giving the permission to Group-based entities
+ """
+ current_group_field = self.collection.get_group_field().lower()
perm = f'{self.collection.get_collection_name()}-{resource_type}'.lower()
+
+ # Initialize the lambda arguments based on the `resource_type`
if resource_type == 'write':
- group_ref = q.select(self.current_group_field,
+ group_ref = q.select(current_group_field,
q.select('data', q.var('old_object')))
lambda_args = ["old_object", "new_object", "object_ref"]
+ elif resource_type == 'create':
+ lambda_args = ["new_object"]
+ group_ref = q.select(current_group_field,
+ q.select('data', q.var('new_object')))
+ elif resource_type == 'read' or resource_type == 'delete':
+ lambda_args = ["object_ref"]
+ group_ref = q.select(current_group_field,
+ q.select('data', q.get(q.var('object_ref'))))
+ if resource_type == 'write':
return q.query(
q.lambda_(lambda_args,
q.and_(
@@ -287,44 +374,166 @@ def get_lambda(self, resource_type):
# User ID from index
q.select(0, q.filter_(lambda i: q.equals(perm, i),
q.select(self.permissions_field,
+ q.select("data",
q.get(
q.match(
- q.index(self.relation_index_name),
- group_ref,
- q.current_identity()
+ q.index(
+ self.get_relation_index_name()),
+ q.current_identity(),
+ group_ref
)
- )))),
+ ))))),
perm
),
q.equals(
- q.select(self.current_group_field, q.select('data', q.var('old_object'))),
- q.select(self.current_group_field, q.select('data', q.var('new_object'))),
+ q.select(current_group_field, q.select(
+ 'data', q.var('old_object'))),
+ q.select(current_group_field, q.select(
+ 'data', q.var('new_object'))),
)
- )
- )
+ )
+ )
+ )
+ else:
+ # Return the lambda function for giving the permission to Group-based entities
+ return q.query(
+ q.lambda_(
+ lambda_args,
+ q.equals(
+ # NOTE: After acquiring the instance of `UserGroup`, filter the result: permission field
+ # that matches the `perm` variable AND then see if that is equals to `perm` var
+ # IMPORTANT: by using this, it will easily filter permissions available, and if there were none, then it is automatically false
+ q.select(0, q.filter_(lambda i: q.equals(perm, i),
+ q.select(self.permissions_field,
+ q.select("data",
+ q.get(q.match(
+ q.index(
+ self.get_relation_index_name()),
+ q.current_identity(),
+ group_ref
+ )))))),
+ perm
+ )
+ )
+ )
+
+
+class GenericUserBasedRoleM2M(GenericAuthorizationRole):
+ """ Generic set of permissions for many-to-many entity to user relationship """
+
+ def get_privileges(self):
+ """
+ Usage of parent `get_privileges()` with addition of access to M2M collection
+ Returns:
+ List: list of privileges
+ """
+ priv_list = super().get_privileges()
+ fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField')
+ for field, value in fields.items():
+ # Get foreign column
+ foreign_col = self.collection._base_properties.get(field)
+ relation_name = foreign_col.relation_name
+ if relation_name:
+ priv_list.extend([
+ {
+ "resource": q.collection(relation_name),
+ "actions": {
+ 'read': True,
+ 'create': True,
+ 'update': False,
+ 'delete': False
+ }
+ }
+ ])
+ return priv_list
+
+ def get_name_suffix(self):
+ """
+ Returns:
+ String: suffix for name of the role
+ """
+ return f'{self.collection.get_user_field().lower()}_based_crud_role'
+
+ def get_relation_index_name(self):
+ """
+ Returns the index name of the m2m index of an entity and user e.g. 'users_blogs_by_blog_and_newuser'
+ Returns:
+ String: name of the index
+ """
+ user_field = self.collection.get_user_field()
+ if user_field:
+ user_field = user_field.lower()
+ else:
+ return None
+ user_col = self.collection._base_properties.get(user_field)
+ user_col_relation = user_col.relation_name
+
+ group_table = self.get_group_table().lower()
+ if group_table:
+ relation_index_name = (user_col_relation
+ + '_by_'
+ + self.collection.get_collection_name().lower()
+ + '_and_'
+ + self.get_user_table().lower())
+ return relation_index_name
+ return None
+
+ def get_lambda(self, resource_type):
+ """
+ Returns lamda expression for the given resource type
+ Args:
+ resource_type (String): type of resource
+ Returns:
+ Lamda expression
+ """
+ current_user_field = self.collection.get_user_field()
+ if resource_type == 'write':
+ lambda_args = ["old_object", "new_object", "object_ref"]
+ obj_ref = q.var('old_object')
+ return q.query(
+ q.lambda_(lambda_args,
+ q.and_(
+ q.equals(
+ q.select(f'{self.get_user_table().lower()}ID',
+ q.select("data",
+ q.get(q.match(
+ q.index(
+ self.get_relation_index_name()),
+ obj_ref,
+ q.current_identity()
+ )))
+ ),
+ q.current_identity()
+ ),
+ q.equals(
+ q.select(current_user_field, q.select(
+ 'data', q.var('new_object'))),
+ q.current_identity()
+ )
+ )
+ )
)
elif resource_type == 'create':
- group_ref = q.select(self.current_group_field,
- q.select('data', q.var('new_object')))
- lambda_args = ["new_object"]
+ # Create ops will always be allowed
+ return True
elif resource_type == 'read' or resource_type == 'delete':
- group_ref = q.select(self.current_group_field,
- q.select('data', q.get(q.var('object_ref'))))
lambda_args = ["object_ref"]
+ obj_ref = q.var('object_ref')
return q.query(
q.lambda_(
lambda_args,
q.equals(
- q.select(0, q.filter_(lambda i: q.equals(perm, i),
- q.select(self.permissions_field,
- q.select("data",
- q.get(q.match(
- q.index(self.relation_index_name),
- group_ref,
- q.current_identity()
- )))))),
- perm
+ q.select(f'{self.get_user_table().lower()}ID',
+ q.select("data",
+ q.get(q.match(
+ q.index(
+ self.get_relation_index_name()),
+ obj_ref,
+ q.current_identity()
+ )))
+ ),
+ q.current_identity()
)
)
- )
+ )
\ No newline at end of file
diff --git a/pfunk/contrib/auth/templates/auth/forgot_email.html b/pfunk/contrib/auth/templates/auth/forgot_email.html
new file mode 100644
index 0000000..9a55777
--- /dev/null
+++ b/pfunk/contrib/auth/templates/auth/forgot_email.html
@@ -0,0 +1,13 @@
+
+
+
Email Verification
+
First Name
+
{{first_name}}
+
Last Name
+
{{last_name}}
+
Email
+
{{email}}
+
Verification Link
+
{{verification_link}}
+
+
\ No newline at end of file
diff --git a/pfunk/contrib/auth/templates/auth/forgot_email.txt b/pfunk/contrib/auth/templates/auth/forgot_email.txt
new file mode 100644
index 0000000..e69de29
diff --git a/pfunk/contrib/auth/views.py b/pfunk/contrib/auth/views.py
index fbfea54..495443b 100644
--- a/pfunk/contrib/auth/views.py
+++ b/pfunk/contrib/auth/views.py
@@ -1,7 +1,6 @@
from abc import ABC
from envs import env
-from werkzeug.http import http_date
from werkzeug.routing import Rule
from pfunk.web.views.base import ActionMixin
@@ -39,6 +38,25 @@ def get_query(self):
'exp': exp
}
+ def _payload_docs(self):
+ return {"data": [
+ {
+ "name": "username",
+ "in": "formData",
+ "description": "Username of the user",
+ "required": True,
+ "type": "string"
+ },
+ {
+ "name": "password",
+ "in": "formData",
+ "description": "Password of the user",
+ "required": True,
+ "type":"string",
+ "format": "password"
+ }
+ ]}
+
class LogoutView(ActionMixin, JSONAuthView):
""" Creates a logout view to enable logout via endpoint
@@ -65,6 +83,24 @@ class SignUpView(ActionMixin, JSONAuthView):
def get_query(self):
return self.collection.signup(**self.get_query_kwargs())
+ def _payload_docs(self):
+ return {"data": [
+ {
+ "name": "username",
+ "in": "formData",
+ "description": "username of the user",
+ "required": True,
+ "type": "string"
+ },
+ {
+ "name": "password",
+ "in": "formData",
+ "description": "password of the user",
+ "required": True,
+ "type":"string",
+ "format": "password"
+ }
+ ]}
class VerifyEmailView(ActionMixin, JSONAuthView):
""" Creates a view that enables verification of a user
@@ -98,6 +134,33 @@ def get_query(self):
self.collection.update_password(kwargs['current_password'], kwargs['new_password'],
kwargs['new_password_confirm'], _token=self.request.token)
+ def _payload_docs(self):
+ return {"data": [
+ {
+ "name": "current_password",
+ "in": "formData",
+ "description": "current password of the user",
+ "required": True,
+ "type": "string",
+ "format": "password"
+ },
+ {
+ "name": "new_password",
+ "in": "formData",
+ "description": "new password of the user",
+ "required": True,
+ "type":"string",
+ "format": "password"
+ },
+ {
+ "name": "new_password_confirm",
+ "in": "formData",
+ "description": "confirm the new password of the user by entering the same string",
+ "required": True,
+ "type":"string",
+ "format": "password"
+ }
+ ]}
class ForgotPasswordView(ActionMixin, JSONAuthView):
""" Create a view to allow call of forgot password func """
@@ -107,10 +170,24 @@ class ForgotPasswordView(ActionMixin, JSONAuthView):
def get_query(self):
return self.collection.forgot_password(**self.get_query_kwargs())
+
+ def _payload_docs(self):
+ return {"data": [
+ {
+ "name": "email",
+ "in": "formData",
+ "description": "email of the user",
+ "required": True,
+ "type": "string"
+ }
+ ]}
class ForgotPasswordChangeView(ActionMixin, JSONAuthView):
- """ Accepts a hashed key from the forgot-password email, validates it if it matches the user's and change the password """
+ """
+ Accepts a hashed key from the forgot-password email, validates
+ it if it matches the user's and change the password
+ """
action = 'forgot-password'
login_required = False
http_methods = ['put']
@@ -118,10 +195,20 @@ class ForgotPasswordChangeView(ActionMixin, JSONAuthView):
def get_query(self):
kwargs = self.get_query_kwargs()
return self.collection.verify_email(
- str(kwargs['verification_key']),
- verify_type='forgot',
+ str(kwargs['verification_key']),
+ verify_type='forgot',
password=kwargs['password'])
+ def _payload_docs(self):
+ return {"data": [
+ {
+ "name": "verification_key",
+ "in": "formData",
+ "description": "hashed key for verification of forgot password event",
+ "required": True,
+ "type": "string"
+ }
+ ]}
class WebhookView(JSONView):
pass
diff --git a/pfunk/contrib/ecommerce/collections.py b/pfunk/contrib/ecommerce/collections.py
index 569d58e..ddc415e 100644
--- a/pfunk/contrib/ecommerce/collections.py
+++ b/pfunk/contrib/ecommerce/collections.py
@@ -1,19 +1,21 @@
import stripe
from envs import env
+from valley.utils import import_util
from pfunk.collection import Collection
-from pfunk.contrib.auth.collections import User, Group
+from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole
+from pfunk.contrib.ecommerce.views import ListStripePackage, DetailStripePackage, CheckoutSuccessView, BaseWebhookView
from pfunk.exceptions import DocNotFound
-from pfunk.fields import EmailField, SlugField, ManyToManyField, ListField, ReferenceField, StringField, EnumField, FloatField
-from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole, Public, UserRole
-from pfunk.contrib.ecommerce.resources import StripePublic
-from pfunk.contrib.ecommerce.views import ListStripePackage, DetailStripePackage
+from pfunk.fields import ReferenceField, StringField, FloatField
from pfunk.web.views.json import CreateView, UpdateView, DeleteView
-
stripe.api_key = env('STRIPE_API_KEY')
+User = import_util(env('USER_COLLECTION', 'pfunk.contrib.auth.collections.User'))
+Group = import_util(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group'))
+
+
class StripePackage(Collection):
""" Collection that has the essential info about a stripe package
@@ -22,11 +24,15 @@ class StripePackage(Collection):
fields and functions to match your system.
Read and detail views are naturally public. Write operations
- requires authentication from admin group.
+ requires authentication from admin group. While it grealty
+ depends on your app, it is recommended to have this only
+ modified by the admins and use `StripeCustomer` model to
+ attach a `stripe_id` to a model that is bound for payment.
"""
use_crud_views = False
collection_roles = [GenericGroupBasedRole]
- collection_views = [ListStripePackage, DetailStripePackage, CreateView, UpdateView, DeleteView]
+ collection_views = [ListStripePackage, DetailStripePackage,
+ CheckoutSuccessView, CreateView, UpdateView, DeleteView]
stripe_id = StringField(required=True)
name = StringField(required=True)
price = FloatField(required=True)
@@ -38,7 +44,7 @@ def __unicode__(self):
@property
def stripe_price(self):
- return int(self.price*100)
+ return int(self.price * 100)
class StripeCustomer(Collection):
@@ -49,9 +55,9 @@ class StripeCustomer(Collection):
fields and functions to match your system.
"""
collection_roles = [GenericUserBasedRole]
+ collection_views = [BaseWebhookView]
user = ReferenceField(User)
- customer_id = StringField(required=True)
- package = ReferenceField(StripePackage)
+ stripe_id = StringField(required=True, unique=True)
def __unicode__(self):
return self.customer_id
diff --git a/pfunk/contrib/ecommerce/resources.py b/pfunk/contrib/ecommerce/resources.py
index da8460a..ebd0729 100644
--- a/pfunk/contrib/ecommerce/resources.py
+++ b/pfunk/contrib/ecommerce/resources.py
@@ -1,5 +1,5 @@
from pfunk.client import q
-from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole, Public, UserRole
+from pfunk.contrib.auth.resources import Public
class StripePublic(Public):
diff --git a/pfunk/contrib/ecommerce/views.py b/pfunk/contrib/ecommerce/views.py
index e48813e..fb7f23d 100644
--- a/pfunk/contrib/ecommerce/views.py
+++ b/pfunk/contrib/ecommerce/views.py
@@ -1,19 +1,18 @@
import collections
import json
+from json import JSONDecodeError
+
+import bleach
import requests
import stripe
-import bleach
from envs import env
-from datetime import datetime
-from json import JSONDecodeError
from jinja2 import Environment, BaseLoader
+from werkzeug.routing import Rule
-from pfunk.contrib.email import ses
-from pfunk.exceptions import DocNotFound
-from pfunk.web.views.json import JSONView, ListView, DetailView, CreateView
from pfunk.contrib.email.ses import SESBackend
-from pfunk.contrib.auth.collections import Group, User
+from pfunk.exceptions import DocNotFound
from pfunk.web.views.base import ActionMixin
+from pfunk.web.views.json import ListView, DetailView, CreateView
stripe.api_key = env('STRIPE_API_KEY')
STRIPE_PUBLISHABLE_KEY = env('STRIPE_PUBLISHABLE_KEY')
@@ -41,10 +40,10 @@ class CheckoutView(DetailView):
a base class.
"""
- def get_context_data(self, **kwargs):
+ def get_context(self, **kwargs):
context = super().get_context_data(**kwargs)
customer = self.collection.objects.get_or_create_customer(
- self.request.user) # `StripeCustomer` collection
+ self.request.user) # `StripeCustomer` collection
session = stripe.checkout.Session.create(
payment_method_types=['card'],
customer=customer.customer_id,
@@ -71,30 +70,51 @@ def get_context_data(self, **kwargs):
return context
-class CheckoutSuccessView(DetailView):
+class CheckoutSuccessView(DetailView, ActionMixin):
""" Defines action from the result of `CheckoutView` """
+ action = 'checkout-success'
+ http_method_names = ['get']
+
+ @classmethod
+ def url(cls, collection):
+ return Rule(f'/json/{collection.get_class_name()}/{cls.action}//', endpoint=cls.as_view(collection),
+ methods=cls.http_methods)
- def get_object(self, queryset=None):
+ def get_query(self, *args, **kwargs):
""" Acquires the object from the `SessionView` """
- try:
- session_id = self.request.GET['session_id']
- except KeyError:
- raise DocNotFound
+ session_id = self.request.kwargs.get('id')
self.stripe_session = stripe.checkout.Session.retrieve(session_id)
- return self.model.objects.get(stripe_id=self.stripe_session.client_reference_id)
-
- def get_context_data(self, **kwargs):
- context = super().get_context_data(**kwargs)
- context['stripe_session'] = self.stripe_session
- return context
+ # NOTE: Chose listing instead of indexing under the assumption of limited paid packages. Override if needed
+ pkg = [pkg for pkg in self.collection.all() if pkg.stripe_id ==
+ self.stripe_session.client_reference_id]
+ if pkg:
+ return pkg
+ raise DocNotFound
-class BaseWebhookView(JSONView, ActionMixin):
+class BaseWebhookView(CreateView, ActionMixin):
""" Base class to use for executing Stripe webhook actions """
+ login_required = False
action = 'webhook'
http_method_names = ['post']
webhook_signing_secret = STRIPE_WEBHOOK_SECRET
+ def get_query(self, *args, **kwargs):
+ self.args = args
+ self.kwargs = kwargs
+ self.event = self.check_signing_secret()
+ try:
+ self.event_json = json.loads(self.request.body)
+ except TypeError:
+ self.event_json = self.request.body
+
+ try:
+ self.object = self.event.data.object
+ except AttributeError:
+ self.object = None
+
+ return self.event_action()
+
def event_action(self):
""" Transforms Stripe action to snake case for easier
calling in child class
@@ -106,25 +126,12 @@ def event_action(self):
that
"""
event_type = self.event.type.replace('.', '_')
- action = getattr(self, event_type, None)
- if isinstance(action, collections.Callable):
- action()
- return {'success': 'ok'}
- raise super().not_found_class()
-
- def post(self, request, *args, **kwargs):
- self.request = request
- self.args = args
- self.kwargs = kwargs
- self.event = self.check_signing_secret()
- self.event_json = json.loads(self.request.body)
-
- try:
- self.object = self.event.data.object
- except AttributeError:
- self.object = None
-
- return self.event_action()
+ if event_type is str:
+ action = getattr(self, event_type, None)
+ if isinstance(action, collections.Callable):
+ action()
+ return {'success': 'ok'}
+ raise NotImplementedError
def check_ip(self):
"""
@@ -137,7 +144,7 @@ def check_ip(self):
except (KeyError, JSONDecodeError):
return True
try:
- return self.request.META['REMOTE_ADDR'] in valid_ips
+ return self.request.source_ip in valid_ips
except KeyError:
return False
@@ -150,8 +157,7 @@ def send_html_email(self, subject, from_email: str, to_email_list: list, templat
DEFAULT_FROM_EMAIL (str): default `from` email
"""
if not context:
- context = {'object': self.object,
- 'request_body': self.request.body}
+ context = {'request_body': self.request.body}
if template_name:
rtemplate = Environment(
loader=BaseLoader()).from_string(template_name)
@@ -172,13 +178,15 @@ def send_html_email(self, subject, from_email: str, to_email_list: list, templat
def check_signing_secret(self):
"""
- Make sure the request's Stripe signature to make sure it matches our signing secret.
- :return: HttpResponse or Stripe Event Object
+ Make sure the request's Stripe signature to make sure it matches our signing secret
+ then returns the event
+
+ :return: Stripe Event Object
"""
# If we are running tests we can't verify the signature but we need the event objects
event = stripe.Webhook.construct_event(
- self.request.body, self.request.META['HTTP_STRIPE_SIGNATURE'], self.webhook_signing_secret
+ self.request.body, self.request.headers['HTTP_STRIPE_SIGNATURE'], self.webhook_signing_secret
)
return event
@@ -187,8 +195,9 @@ def get_transfer_data(self):
def checkout_session_completed(self):
""" A method to override to implement custom actions
- after successful Stripe checkout
+ after successful Stripe checkout.
+ This is a Stripe event.
Use this method by subclassing this class in your
custom claas
"""
diff --git a/pfunk/contrib/email/base.py b/pfunk/contrib/email/base.py
index 452da44..7f35655 100644
--- a/pfunk/contrib/email/base.py
+++ b/pfunk/contrib/email/base.py
@@ -3,14 +3,15 @@
from envs import env
from valley.utils import import_util
-from pfunk.contrib.templates import temp_env
+from pfunk.utils.templates import temp_env
class EmailBackend(object):
"""
Base email backend class
"""
- def get_template(self, template:str):
+
+ def get_template(self, template: str):
"""
Get the template based on the template location string
Args:
@@ -81,5 +82,5 @@ def send_email(subject: str, to_emails: list, html_template: str = None, txt_tem
with warnings.catch_warnings():
warnings.simplefilter('ignore', category=ResourceWarning)
email_backend().send_email(subject=subject, to_emails=to_emails, html_template=html_template,
- txt_template=txt_template, from_email=from_email, cc_emails=cc_emails,
- bcc_emails=bcc_emails, fail_silently=fail_silently, **kwargs)
\ No newline at end of file
+ txt_template=txt_template, from_email=from_email, cc_emails=cc_emails,
+ bcc_emails=bcc_emails, fail_silently=fail_silently, **kwargs)
diff --git a/pfunk/contrib/email/dummy.py b/pfunk/contrib/email/dummy.py
new file mode 100644
index 0000000..f542e90
--- /dev/null
+++ b/pfunk/contrib/email/dummy.py
@@ -0,0 +1,42 @@
+from pfunk.contrib.email.base import EmailBackend
+
+
+class DummyBackend(EmailBackend):
+ """
+ AWS SES email backend (https://aws.amazon.com/ses/)
+ """
+ region_name = None
+ charset = "UTF-8"
+
+ def send_email(self, subject: str, to_emails: list, html_template: str = None, txt_template: str = None,
+ from_email: str = None, cc_emails: list = [], bcc_emails: list = [], fail_silently: bool = True,
+ **kwargs):
+ """
+ Sends email
+ Args:
+ subject: Email subject line
+ to_emails: List of email addresses
+ html_template: HTML template location string
+ txt_template: Text template location string
+ from_email: From email address
+ cc_emails: CC email addresses
+ bcc_emails: BCC email addresses
+ fail_silently: Specifies whether to fail silently
+ **kwargs: keyword arguments used to render template(s)
+
+ Returns: None
+
+ """
+ email_dict = {
+ 'subject': subject,
+ 'to_emails': to_emails,
+ 'html_template': html_template,
+ 'txt_template': txt_template,
+ 'from_email': from_email,
+ 'cc_emails': cc_emails,
+ 'bcc_emails': bcc_emails,
+ 'fail_silently': fail_silently,
+ 'kwargs': kwargs,
+ 'body': self.get_body_kwargs(html_template=html_template, txt_template=txt_template, **kwargs)
+ }
+ return email_dict
\ No newline at end of file
diff --git a/pfunk/contrib/email/ses.py b/pfunk/contrib/email/ses.py
index 20077e5..fd181d2 100644
--- a/pfunk/contrib/email/ses.py
+++ b/pfunk/contrib/email/ses.py
@@ -1,5 +1,6 @@
import boto3
from envs import env
+
from pfunk.contrib.email.base import EmailBackend
@@ -47,5 +48,5 @@ def send_email(self, subject: str, to_emails: list, html_template: str = None, t
'Body': self.get_body_kwargs(html_template=html_template, txt_template=txt_template, **kwargs)
}
)
-
- return res
\ No newline at end of file
+
+ return res
diff --git a/pfunk/contrib/generic.py b/pfunk/contrib/generic.py
index a0b00b9..204b541 100644
--- a/pfunk/contrib/generic.py
+++ b/pfunk/contrib/generic.py
@@ -1,13 +1,12 @@
-from pfunk.resources import Function, Index
from pfunk.client import q
-
+from pfunk.resources import Function
class GenericFunction(Function):
action = 'create'
def get_role(self):
- return None # pragma: no cover
+ return None # pragma: no cover
def get_name(self):
return f"{self.action}_{self.collection.get_class_name()}"
@@ -24,19 +23,19 @@ def get_name(self):
def get_body(self):
return q.query(
q.lambda_(["input"],
- q.map_(
- q.lambda_(['ref'],
- q.get(q.var('ref'))
- ),
- q.paginate(
- q.match(q.index(self.collection.all_index_name())),
- q.select('size', q.var('input'))
- )
- )
- )
+ q.map_(
+ q.lambda_(['ref'],
+ q.get(q.var('ref'))
+ ),
+ q.paginate(
+ q.match(q.index(self.collection.all_index_name())),
+ q.select('size', q.var('input'))
+ )
+ )
+ )
)
-
-
+
+
class GenericCreate(GenericFunction):
def get_body(self):
@@ -68,13 +67,13 @@ def get_body(self):
))
-
class GenericDelete(GenericFunction):
action = 'delete'
def get_body(self):
return q.query(
q.lambda_(["input"],
- q.delete(q.ref(q.collection(self.collection.get_collection_name()), q.select('id', q.var("input"))))
+ q.delete(
+ q.ref(q.collection(self.collection.get_collection_name()), q.select('id', q.var("input"))))
)
- )
\ No newline at end of file
+ )
diff --git a/pfunk/contrib/templates.py b/pfunk/contrib/templates.py
deleted file mode 100644
index e86940a..0000000
--- a/pfunk/contrib/templates.py
+++ /dev/null
@@ -1,6 +0,0 @@
-from envs import env
-from jinja2 import Environment
-from jinja2.loaders import FileSystemLoader
-
-
-temp_env = Environment(loader=FileSystemLoader(env('TEMPLATE_ROOT_DIR')))
\ No newline at end of file
diff --git a/pfunk/exceptions.py b/pfunk/exceptions.py
index fc128e1..b91fbbe 100644
--- a/pfunk/exceptions.py
+++ b/pfunk/exceptions.py
@@ -1,5 +1,3 @@
-
-
class LoginFailed(Exception):
"""Exception raised when an attempt to login fails."""
pass
@@ -22,4 +20,9 @@ class Unauthorized(Exception):
class GraphQLError(Exception):
"""Graphql SyntaxError"""
+ pass
+
+
+class NotUniqueError(Exception):
+ """Exception raised when a unique constraint is violated"""
pass
\ No newline at end of file
diff --git a/pfunk/fields.py b/pfunk/fields.py
index d0e06e3..fa5755a 100644
--- a/pfunk/fields.py
+++ b/pfunk/fields.py
@@ -3,13 +3,12 @@
import pytz
from valley.exceptions import ValidationException
from valley.properties import CharProperty, IntegerProperty, DateTimeProperty, DateProperty, FloatProperty, \
- BooleanProperty, EmailProperty, SlugProperty, BaseProperty, ForeignProperty, ForeignListProperty, ListProperty
+ BooleanProperty, EmailProperty, SlugProperty, BaseProperty, ForeignProperty as FP, ForeignListProperty, ListProperty
from valley.utils import import_util
+from valley.validators import ChoiceValidator, ForeignValidator
-from valley.validators import Validator, ChoiceValidator, ForeignValidator
-
-from pfunk.collection import Enum
from pfunk.client import Ref
+from pfunk.collection import Enum
class ChoiceListValidator(ChoiceValidator):
@@ -41,6 +40,10 @@ def get_graphql_type(self):
return f"{self.GRAPHQL_FIELD_TYPE}{req} {unique}"
+ def get_form_field(self, **kwargs):
+ return 'formy.fields.StringField'
+
+
class StringField(GraphQLMixin, CharProperty):
pass
@@ -48,7 +51,8 @@ class StringField(GraphQLMixin, CharProperty):
class IntegerField(GraphQLMixin, IntegerProperty):
GRAPHQL_FIELD_TYPE = 'Int'
-
+ def get_form_field(self, **kwargs):
+ return 'formy.fields.IntegerField'
class DateTimeField(GraphQLMixin, DateTimeProperty):
GRAPHQL_FIELD_TYPE = 'Time'
@@ -56,17 +60,28 @@ class DateTimeField(GraphQLMixin, DateTimeProperty):
def now(self):
return datetime.datetime.now(tz=pytz.UTC)
+ def get_form_field(self, **kwargs):
+ return 'formy.fields.DateTimeField'
+
class FloatField(GraphQLMixin, FloatProperty):
GRAPHQL_FIELD_TYPE = 'Float'
+ def get_form_field(self, **kwargs):
+ return 'formy.fields.FloatField'
+
+
class BooleanField(GraphQLMixin, BooleanProperty):
GRAPHQL_FIELD_TYPE = 'Boolean'
+ def get_form_field(self, **kwargs):
+ return 'formy.fields.BooleanField'
+
class EmailField(GraphQLMixin, EmailProperty):
- pass
+ def get_form_field(self, **kwargs):
+ return 'formy.fields.EmailField'
class SlugField(GraphQLMixin, SlugProperty):
@@ -102,6 +117,9 @@ def get_graphql_type(self):
unique = '@unique'
return f"{self.enum.name}{req} {unique}"
+ def get_form_field(self, **kwargs):
+ return 'formy.fields.ChoiceField'
+
class ReferenceValidator(ForeignValidator):
@@ -113,8 +131,21 @@ def validate(self, value, key):
raise ValidationException('{0}: This value ({1}) should be an instance of {2}.'.format(
key, value, self.foreign_class.__name__))
+
+class ForeignProperty(FP):
+
+ def __init__(self, foreign_class, return_type=None, return_prop=None, choices_index=None, **kwargs):
+ super(ForeignProperty, self).__init__(
+ foreign_class, return_type=return_type, return_prop=return_prop,
+ choices_index=choices_index, **kwargs)
+ self.choices_index = choices_index
+
+
class ReferenceField(GraphQLMixin, ForeignProperty):
+ def get_form_field(self, **kwargs):
+ return 'formy.fields.ChoiceField'
+
def get_validators(self):
super(BaseProperty, self).get_validators()
self.validators.insert(0, ReferenceValidator(self.foreign_class))
@@ -153,7 +184,7 @@ def validate(self, value, key):
self.foreign_class = import_util(self.foreign_class)
if value:
for obj in value:
- if not isinstance(obj,self.foreign_class):
+ if not isinstance(obj, self.foreign_class):
raise ValidationException(
'{0}: This value ({1}) should be an instance of {2}.'.format(
key, obj, self.foreign_class.__name__))
@@ -162,7 +193,7 @@ def validate(self, value, key):
class ManyToManyField(GraphQLMixin, ForeignListProperty):
relation_field = True
- def __init__(self, foreign_class, relation_name, return_type=None,return_prop=None,**kwargs):
+ def __init__(self, foreign_class, relation_name, return_type=None, return_prop=None, **kwargs):
self.foreign_class = foreign_class
self.relation_name = relation_name
super(ManyToManyField, self).__init__(foreign_class, return_type=return_type, return_prop=return_prop, **kwargs)
@@ -187,8 +218,13 @@ def get_python_value(self, value):
c.ref = i
c._lazied = True
ra(c)
- if isinstance(i, self.foreign_class):
- ra(i)
+
+ try:
+ if isinstance(i, self.foreign_class):
+ ra(i)
+ except TypeError:
+ if f'{i.__class__.__module__}.{i.__class__.__name__}' == self.foreign_class:
+ ra(i)
return ref_list
def get_db_value(self, value):
@@ -204,6 +240,9 @@ class DateField(GraphQLMixin, DateProperty):
def now(self):
return datetime.datetime.now(tz=pytz.UTC).date()
+ def get_form_field(self, **kwargs):
+ return 'formy.fields.DateField'
+
class ListField(GraphQLMixin, ListProperty):
GRAPHQL_FIELD_TYPE = '[String]'
diff --git a/pfunk/forms/__init__.py b/pfunk/forms/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pfunk/forms/collections.py b/pfunk/forms/collections.py
new file mode 100644
index 0000000..6c39c17
--- /dev/null
+++ b/pfunk/forms/collections.py
@@ -0,0 +1,74 @@
+from valley.utils import import_util
+
+from pfunk.forms.form import Form
+
+
+class CollectionForm(Form):
+ _template = 'forms/ul.html'
+
+ def __init__(self, **kwargs):
+ try:
+ self._instance = kwargs.pop('_instance')
+ except KeyError:
+ self._instance = None
+ if self._instance:
+ self._data = self._instance.to_dict(flat=True).get('data')
+ self.create_fields()
+ super(CollectionForm, self).__init__(**kwargs)
+
+ @classmethod
+ def add_field_choices(cls, class_name, field):
+ if class_name == 'EnumField':
+ choices = {item: item for item in field.choices}
+ else:
+ choices = {str(obj): obj.ref.id() for obj in cls.get_queryset(
+ field.get_foreign_class(), field.choices_index)}
+ return choices
+
+ @classmethod
+ def get_queryset(cls, collection, index=None):
+ if not index:
+ return collection.all()
+ return collection.get_index(index)
+
+ def add_field(self, name, field):
+ # We need to know the class name to determine the correct form field
+ class_name = field.__class__.__name__
+ # We use the class name to get the correct form field class from the map
+ field_class = import_util(field.get_form_field())
+
+ if field_class:
+ field_kwargs = {
+ 'required': field.required,
+ }
+ if field.choices:
+ field_kwargs['choices'] = field.choices
+ if class_name in ['ReferenceField', 'ManyToManyField', 'EnumField']:
+ field_kwargs['choices'] = self.add_field_choices(
+ class_name, field)
+ if field.default_value:
+ field_kwargs['default_value'] = field.default
+ try:
+ if self._data.get(name):
+ field_kwargs['value'] = self._data.get(name)
+ except AttributeError:
+ pass
+ self._base_properties[name] = field_class(**field_kwargs)
+
+ def create_fields(self):
+ if hasattr(self.Meta, 'fields') and len(self.Meta.fields) > 0:
+ for name in self.Meta.fields:
+ self.add_field(name, self.Meta.collection._base_properties[name])
+ else:
+ try:
+ for name, field in self.Meta.collection._base_properties.items():
+ self.add_field(name, field)
+ except TypeError:
+ pass
+
+ def save(self):
+ return self.Meta.collection(**self._data).save()
+
+ class Meta:
+ collection = None
+ fields = None
diff --git a/pfunk/forms/fields.py b/pfunk/forms/fields.py
new file mode 100644
index 0000000..ab4b6f6
--- /dev/null
+++ b/pfunk/forms/fields.py
@@ -0,0 +1,166 @@
+from valley.mixins import *
+from valley.properties import BaseProperty as VBaseProperty
+
+from pfunk.utils.templates import temp_env
+
+
+class BaseField(VBaseProperty):
+ template = 'formy/fields/base.html'
+ static_assets = tuple()
+ css_classes = ''
+ value = None
+ name = None
+ input_type = 'text'
+
+ def __init__(
+ self,
+ default_value=None,
+ required=False,
+ validators=[],
+ verbose_name=None,
+ css_classes=None,
+ placeholder=None,
+ help_text=None,
+ static_assets=None,
+ template=None,
+ **kwargs
+ ):
+ super(BaseField, self).__init__(default_value=default_value,
+ required=required,
+ validators=validators,
+ verbose_name=verbose_name,
+ **kwargs)
+ self.default_value = default_value
+ self.required = required
+ self.kwargs = kwargs
+ self.template = template or self.template
+ self.static_assets = static_assets or self.static_assets
+ self.css_classes = css_classes or self.css_classes
+ self.verbose_name = verbose_name
+ self.placeholder = placeholder or self.verbose_name
+ self.help_text = help_text
+ self.validators = list()
+ self.get_validators()
+ self.validators = set(self.validators)
+
+ def get_verbose_name(self):
+ return self.verbose_name or self.name.replace('_', ' ').title()
+
+ def render(self, name=None, value=None, css_classes=None, input_type=None,
+ placeholder=None, choices=None, errors=dict()):
+ name = name or self.name
+ verbose_name = self.verbose_name or name.replace('_', ' ').title()
+ value = value or self.value
+ choices = choices or self.choices
+ input_type = input_type or self.input_type
+ placeholder = placeholder or self.placeholder or verbose_name
+ error = errors.get(name)
+ if css_classes and self.css_classes:
+ css_classes = '{},{}'.format(self.css_classes, css_classes)
+ elif not css_classes:
+ css_classes = self.css_classes
+
+ return temp_env.get_template(self.template).render(
+ name=name,
+ error=error,
+ choices=choices,
+ value=value,
+ verbose_name=verbose_name,
+ placeholder=placeholder,
+ css_classes=css_classes,
+ input_type=input_type,
+ )
+
+
+class StringField(CharVariableMixin, BaseField):
+ pass
+
+
+class SlugField(SlugVariableMixin, BaseField):
+ pass
+
+
+class EmailField(EmailVariableMixin, BaseField):
+ input_type = 'email'
+
+
+class IntegerField(IntegerVariableMixin, BaseField):
+ input_type = 'number'
+
+
+class PasswordField(StringField):
+ input_type = 'password'
+
+
+class FloatField(FloatVariableMixin, BaseField):
+ input_type = 'number'
+
+
+class BooleanField(BooleanMixin, BaseField):
+ input_type = 'checkbox'
+
+
+class DateField(DateMixin, BaseField):
+ input_type = 'date'
+
+ def __init__(
+ self,
+ default_value=None,
+ required=True,
+ validators=[],
+ verbose_name=None,
+ auto_now=False,
+ auto_now_add=False,
+ **kwargs):
+ super(
+ DateField,
+ self).__init__(
+ default_value=default_value,
+ required=required,
+ validators=validators,
+ verbose_name=verbose_name,
+ **kwargs)
+ self.auto_now = auto_now
+ self.auto_now_add = auto_now_add
+
+
+class DateTimeField(DateTimeMixin, BaseField):
+ input_type = 'datetime-local'
+
+ def __init__(
+ self,
+ default_value=None,
+ required=True,
+ validators=[],
+ verbose_name=None,
+ auto_now=False,
+ auto_now_add=False,
+ **kwargs):
+ super(
+ DateTimeField,
+ self).__init__(
+ default_value=default_value,
+ required=required,
+ validators=validators,
+ verbose_name=verbose_name,
+ **kwargs)
+ self.auto_now = auto_now
+ self.auto_now_add = auto_now_add
+
+
+class ChoiceField(BaseField):
+ template = 'formy/fields/select.html'
+
+
+class MultipleChoiceField(BaseField):
+ template = 'formy/fields/select-multiple.html'
+
+
+class TextAreaField(BaseField):
+ template = 'formy/fields/textarea.html'
+
+
+class CKEditor(BaseField):
+ template = 'formy/fields/ckeditor.html'
+ static_assets = (
+ '')
\ No newline at end of file
diff --git a/pfunk/forms/form.py b/pfunk/forms/form.py
new file mode 100644
index 0000000..3bb1b61
--- /dev/null
+++ b/pfunk/forms/form.py
@@ -0,0 +1,43 @@
+from valley.declarative import DeclaredVars as DV, \
+ DeclarativeVariablesMetaclass as DVM
+from valley.schema import BaseSchema
+
+from pfunk.forms.fields import BaseField
+from pfunk.utils.templates import temp_env
+
+
+class DeclaredVars(DV):
+ base_field_class = BaseField
+
+
+class DeclarativeVariablesMetaclass(DVM):
+ declared_vars_class = DeclaredVars
+
+
+class BaseForm(BaseSchema):
+ """
+ Base class for all Formy form classes.
+ """
+ _template = 'formy/form/ul.html'
+ BUILTIN_DOC_ATTRS = []
+ _create_error_dict = True
+
+ def __iter__(self):
+ for k, field in self._base_properties.items():
+ field.name = k
+ field.value = self._data.get(k)
+ yield field
+
+ def render(self, include_submit=True):
+ return temp_env.get_template(self._template).render(
+ form=self, include_submit=include_submit)
+
+ def render_static_assets(self):
+ static_assets = []
+ for field in self:
+ static_assets.extend(field.static_assets)
+ return ''.join(set(static_assets))
+
+
+class Form(BaseForm, metaclass=DeclarativeVariablesMetaclass):
+ pass
\ No newline at end of file
diff --git a/pfunk/forms/templates/forms/ul.html b/pfunk/forms/templates/forms/ul.html
new file mode 100644
index 0000000..0549589
--- /dev/null
+++ b/pfunk/forms/templates/forms/ul.html
@@ -0,0 +1,15 @@
+
\ No newline at end of file
diff --git a/pfunk/project.py b/pfunk/project.py
index e3c3a85..dd44c8d 100644
--- a/pfunk/project.py
+++ b/pfunk/project.py
@@ -1,14 +1,12 @@
import logging
-
-import requests
from io import BytesIO
+import requests
from envs import env
from faunadb.client import FaunaClient
from jinja2 import Template
from valley.contrib import Schema
-
from valley.properties import CharProperty, ForeignProperty
from valley.utils import import_util
from werkzeug import Request as WerkzeugRequest
@@ -19,11 +17,13 @@
from pfunk.web.request import HTTPRequest, RESTRequest, WSGIRequest
from pfunk.web.response import HttpNotFoundResponse, JSONMethodNotAllowedResponse
+from .contrib.auth.collections import User, Group, UserGroups, BaseGroup, BaseUser, ExtendedUser, BaseUserGroup
from .collection import Collection
from .fields import ForeignList
from .template import graphql_template
from .utils.publishing import BearerAuth
from .web.views.graphql import GraphQLView
+from .utils.swagger import SwaggerDoc
logger = logging.getLogger('pfunk')
@@ -42,6 +42,35 @@
]
}
+GRAPHQL_TO_YAML_TYPES = {
+ "String": "string",
+ "Int": "integer",
+ "Float": "integer",
+ "Boolean": "boolean"
+}
+
+PFUNK_TO_YAML_TYPES = {
+ "StringField": "string",
+ "SlugField": "string",
+ "EmailField": "string",
+ "EnumField": "string",
+ "ManyToManyField": "#/definitions/",
+ "ReferenceField": "#/definitions/",
+ "ForeignList": "#/definitions/",
+ "IntegerField": "integer",
+ "FloatField": "integer",
+ "BooleanField": "boolean",
+ "ListField": "array"
+}
+
+WERKZEUG_URL_TO_YAML_TYPES = {
+ "int": "integer",
+ "string": "string",
+ "float": "integer",
+ "path": "string",
+ "uuid": "string"
+}
+
class Project(Schema):
"""
@@ -176,14 +205,31 @@ def publish(self, mode: str = 'merge') -> int:
auth=BearerAuth(secret),
data=gql_io
)
+
if resp.status_code == 200:
test_mode = env('PFUNK_TEST_MODE', False, var_type='boolean')
if not test_mode:
print('GraphQL Schema Imported Successfully!!') # pragma: no cover
- for col in set(self.collections):
- col.publish()
- if resp.status_code != 200:
+ else:
+ print('Error Publishing GraphQL!!')
+ print('----------------------------------------')
print(resp.content)
+ return
+
+ collections = set(self.collections)
+ # make publishing prioritize User, Group and UserGroups
+ for col in collections.copy():
+ if (issubclass(col, User)
+ or issubclass(col, Group)
+ or issubclass(col, BaseGroup)
+ or issubclass(col, ExtendedUser)
+ or issubclass(col, BaseUser)
+ or issubclass(col, UserGroups)
+ or issubclass(col, BaseUserGroup)):
+ col.publish()
+ collections.remove(col)
+ for col in collections:
+ col.publish()
return resp.status_code
def unpublish(self) -> None:
@@ -213,6 +259,7 @@ def event_handler(self, event: dict, context: object) -> object:
path = event.get('path')
method = event.get('httpMethod')
request_cls = RESTRequest
+ event.reverse = self.urls.build
try:
view, kwargs = self.urls.match(path, method)
except NotFound:
@@ -266,3 +313,22 @@ def wsgi_app(self, environ, start_response):
start_response(status_str, response.wsgi_headers)
return [str.encode(response.body)]
+
+ def generate_swagger(self, yaml_dir='', config_file='pfunk.json'):
+ """ Generates a swagger file that houses all endpoints
+
+ Args:
+ yaml_dir (str, optional):
+ which directory to create the swagger yaml file
+ config_file (str, optional):
+ which directory to look for the config file
+
+ Returns:
+ swagger file
+ """
+ swag = SwaggerDoc(
+ collections=self.collections,
+ rules=[GraphQLView.url()],
+ config_file=config_file)
+ swag_file = swag.generate_swagger(dir=yaml_dir)
+ return swag_file
diff --git a/pfunk/queryset.py b/pfunk/queryset.py
index e9195cc..5c49e96 100644
--- a/pfunk/queryset.py
+++ b/pfunk/queryset.py
@@ -25,4 +25,4 @@ def __len__(self):
return len(self.data)
def __getitem__(self, x):
- return self.data[x]
\ No newline at end of file
+ return self.data[x]
diff --git a/pfunk/resources.py b/pfunk/resources.py
index a4e9058..74786e2 100644
--- a/pfunk/resources.py
+++ b/pfunk/resources.py
@@ -2,8 +2,8 @@
from faunadb.query import query
-from pfunk.utils.publishing import create_or_update_function, create_or_update_role, create_or_pass_index
from pfunk.client import q
+from pfunk.utils.publishing import create_or_update_function, create_or_update_role, create_or_pass_index
class Resource(object):
@@ -52,20 +52,20 @@ def get_payload(self) -> dict:
return payload_dict
def publish(self):
- raise NotImplementedError # pragma: no cover
+ raise NotImplementedError # pragma: no cover
def unpublish(self):
- raise NotImplementedError # pragma: no cover
+ raise NotImplementedError # pragma: no cover
def get_body(self):
- raise NotImplementedError # pragma: no cover
+ raise NotImplementedError # pragma: no cover
class Function(Resource):
def get_role(self):
"""Gets the role to use when calling the function."""
- return None # pragma: no cover
+ return None # pragma: no cover
def publish(self):
"""
@@ -88,7 +88,7 @@ class Role(Resource):
user_table: str = None
def get_lambda(self, resource_type):
- return # pragma: no cover
+ return # pragma: no cover
def get_payload(self) -> dict:
"""
@@ -98,12 +98,14 @@ def get_payload(self) -> dict:
"""
payload_dict = {
"name": self.get_name(),
- "membership": self.get_membership(),
"privileges": self.get_privileges(),
}
data = self.get_data()
+ membership = self.get_membership()
if data:
payload_dict['data'] = data
+ if membership:
+ payload_dict['membership'] = membership
return payload_dict
def get_data(self) -> dict:
@@ -112,10 +114,10 @@ def get_data(self) -> dict:
Returns: dict
"""
- return None # pragma: no cover
+ return None # pragma: no cover
def get_privileges(self):
- raise NotImplementedError # pragma: no cover
+ raise NotImplementedError # pragma: no cover
def get_membership_lambda(self):
"""
@@ -125,11 +127,16 @@ def get_membership_lambda(self):
"""
return q.query(
q.lambda_(['object_ref'],
- q.equals(
- q.select('account_status', q.select('data', q.get(q.var('object_ref')))),
- "ACTIVE"
- )
- ))
+ q.equals(
+ q.select('account_status', q.select(
+ 'data', q.get(q.var('object_ref')))),
+ "ACTIVE"
+ )
+ ))
+
+ def get_user_table(self):
+ """ Acquires user table from the class name """
+ return None
def get_membership(self) -> dict:
"""
@@ -137,10 +144,13 @@ def get_membership(self) -> dict:
Returns: dict
"""
- return {
- 'resource': q.collection(self.user_table or self.collection.get_collection_name()),
- 'predicate': self.get_membership_lambda()
+ membership = self.get_membership_lambda()
+ payload_dict = {
+ 'resource': q.collection(self.get_user_table() or self.collection.get_collection_name()),
}
+ if membership:
+ payload_dict['predicate'] = self.get_membership_lambda()
+ return payload_dict
def publish(self):
"""
@@ -166,7 +176,8 @@ class Index(object):
serialized: bool = None
terms: list = None
values: list = None
- _accept_kwargs: list = ['name', 'source', 'unique', 'serialized', 'terms', 'values']
+ _accept_kwargs: list = ['name', 'source',
+ 'unique', 'serialized', 'terms', 'values']
def __init__(self, **kwargs):
"""
@@ -189,7 +200,6 @@ def get_kwargs(self) -> dict:
kwargs = {'name': self.name, 'source': q.collection(self.source), }
if self.terms:
-
kwargs['terms'] = self.terms
if self.values:
kwargs['values'] = self.values
@@ -240,10 +250,10 @@ def get_body(self):
q.get(q.var('ref'))
),
q.paginate(
- q.match(q.index(self.collection.all_index_name())),
+ q.match(
+ q.index(self.collection.all_index_name())),
q.select('size', q.var('input'))
)
- )
- )
+ )
+ )
)
-
diff --git a/pfunk/template.py b/pfunk/template.py
index a61f68f..090ea9c 100644
--- a/pfunk/template.py
+++ b/pfunk/template.py
@@ -56,4 +56,4 @@
key_template = Template("""
KEYS = {{keys}}
-""")
\ No newline at end of file
+""")
diff --git a/pfunk/test_resources/__init__.py b/pfunk/test_resources/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pfunk/test_resources/templates/house/list.html b/pfunk/test_resources/templates/house/list.html
new file mode 100644
index 0000000..dbd6f25
--- /dev/null
+++ b/pfunk/test_resources/templates/house/list.html
@@ -0,0 +1,14 @@
+
+
+
+
+ Title
+
+
+
+ {% for house in object_list %}
+ - {{ house.address }}
+ {% endfor %}
+
+
+
\ No newline at end of file
diff --git a/pfunk/test_resources/templates/house/update.html b/pfunk/test_resources/templates/house/update.html
new file mode 100644
index 0000000..dbd6f25
--- /dev/null
+++ b/pfunk/test_resources/templates/house/update.html
@@ -0,0 +1,14 @@
+
+
+
+
+ Title
+
+
+
+ {% for house in object_list %}
+ - {{ house.address }}
+ {% endfor %}
+
+
+
\ No newline at end of file
diff --git a/pfunk/testcase.py b/pfunk/testcase.py
index eb6d022..d438807 100644
--- a/pfunk/testcase.py
+++ b/pfunk/testcase.py
@@ -1,10 +1,10 @@
+import os
import unittest
-
import uuid
-import os
from valley.utils import import_util
from werkzeug.test import Client
+
from pfunk import Project
from pfunk.client import FaunaClient, q
from pfunk.template import key_template
@@ -14,7 +14,7 @@ class PFunkTestCase(unittest.TestCase):
def setUp(self) -> None:
os.environ['PFUNK_TEST_MODE'] = 'True'
- os.environ['TEMPLATE_ROOT_DIR'] = '/tmp'
+ os.environ['TEMPLATE_ROOT_DIR'] = '/'
self.client = FaunaClient(secret='secret')
self.db_name = str(uuid.uuid4())
self.client.query(
@@ -41,8 +41,14 @@ class CollectionTestCase(PFunkTestCase):
def setUp(self) -> None:
super(CollectionTestCase, self).setUp()
self.project = Project()
+ coll = []
+ for i in self.collections:
+ if isinstance(i, str):
+ coll.append(import_util(i))
+ else:
+ coll.append(i)
- self.project.add_resources(self.collections)
+ self.project.add_resources(coll)
self.project.publish()
@@ -53,12 +59,16 @@ def setUp(self) -> None:
self.app = self.project.wsgi_app
self.c = Client(self.app)
os.environ.setdefault('KEY_MODULE', 'pfunk.tests.unittest_keys.KEYS')
- Key = import_util('pfunk.contrib.auth.collections.Key')
+ Key = import_util('pfunk.contrib.auth.key.Key')
keys = Key.create_keys()
self.keys_path = 'pfunk/tests/unittest_keys.py'
- with open(self.keys_path, 'w+') as f:
- f.write(key_template.render(keys=keys))
-
+ try:
+ with open(self.keys_path, 'w+') as f:
+ f.write(key_template.render(keys=keys))
+ except (Exception, FileNotFoundError) as e:
+ print(e)
+ # Print the current working directory
+ print('unittest_keys.py not found in current working directory', os.getcwd())
def tearDown(self) -> None:
super(APITestCase, self).tearDown()
diff --git a/pfunk/tests/__init__.py b/pfunk/tests/__init__.py
index 74257c0..ce57492 100644
--- a/pfunk/tests/__init__.py
+++ b/pfunk/tests/__init__.py
@@ -1,7 +1,22 @@
from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField
-from pfunk.resources import Index
-from pfunk.contrib.auth.collections import User, Group
from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole
+from pfunk.resources import Index
+from pfunk.web.views.html import HTMLView
+
+from jinja2 import Environment
+from jinja2.loaders import ChoiceLoader, PackageLoader, FileSystemLoader
+
+
+temp_env = Environment(loader=FileSystemLoader('./pfunk/tests/templates'))
+# Let's monkey-patch `HTMLView`'s method `get_template` for testing
+def get_template(self):
+ return temp_env.get_template(
+ self.template_name.format(
+ collection=self.collection.get_collection_name().lower(),
+ action=self.action
+ )
+ )
+HTMLView.get_template = get_template
GENDER_PRONOUN = Enum(name='gender_pronouns', choices=['he', 'her', 'they'])
@@ -15,6 +30,7 @@ class SimpleIndex(Index):
class Sport(Collection):
use_crud_functions = True
+ use_crud_html_views = True
name = StringField(required=True)
slug = SlugField()
@@ -27,12 +43,13 @@ class Meta:
class Person(Collection):
collection_roles = [GenericGroupBasedRole]
+ use_crud_html_views = True
verbose_plural_name = 'people'
first_name = StringField(required=True)
last_name = StringField(required=True)
gender_pronoun = EnumField(GENDER_PRONOUN)
sport = ReferenceField(Sport)
- group = ReferenceField(Group)
+ group = ReferenceField('pfunk.contrib.auth.collections.Group')
def __unicode__(self):
return f"{self.first_name} {self.last_name}"
@@ -40,8 +57,9 @@ def __unicode__(self):
class House(Collection):
collection_roles = [GenericUserBasedRole]
+ use_crud_html_views = True
address = StringField(required=True)
- user = ReferenceField(User)
+ user = ReferenceField('pfunk.contrib.auth.collections.User')
def __unicode__(self):
- return self.address
\ No newline at end of file
+ return self.address
diff --git a/pfunk/tests/templates/house/create.html b/pfunk/tests/templates/house/create.html
new file mode 100644
index 0000000..e930a89
--- /dev/null
+++ b/pfunk/tests/templates/house/create.html
@@ -0,0 +1,12 @@
+
+
+
+
+ Title
+
+
+
+
+
\ No newline at end of file
diff --git a/pfunk/tests/templates/house/delete.html b/pfunk/tests/templates/house/delete.html
new file mode 100644
index 0000000..fa76b2c
--- /dev/null
+++ b/pfunk/tests/templates/house/delete.html
@@ -0,0 +1,15 @@
+
+
+
+
+ Delete {{object.address}}
+
+
+Delete {{object.address}}
+
+
+
\ No newline at end of file
diff --git a/pfunk/tests/templates/house/detail.html b/pfunk/tests/templates/house/detail.html
new file mode 100644
index 0000000..07d58a8
--- /dev/null
+++ b/pfunk/tests/templates/house/detail.html
@@ -0,0 +1,13 @@
+
+
+
+
+ {{object.address}}
+
+
+{{object.address}}
+
+{{object.user}}
+
+
+
\ No newline at end of file
diff --git a/pfunk/tests/templates/house/list.html b/pfunk/tests/templates/house/list.html
new file mode 100644
index 0000000..dbd6f25
--- /dev/null
+++ b/pfunk/tests/templates/house/list.html
@@ -0,0 +1,14 @@
+
+
+
+
+ Title
+
+
+
+ {% for house in object_list %}
+ - {{ house.address }}
+ {% endfor %}
+
+
+
\ No newline at end of file
diff --git a/pfunk/tests/templates/house/update.html b/pfunk/tests/templates/house/update.html
new file mode 100644
index 0000000..3c4af60
--- /dev/null
+++ b/pfunk/tests/templates/house/update.html
@@ -0,0 +1,3 @@
+{% block test_block %}
+{{unittest_value}}
+{% endblock %}
\ No newline at end of file
diff --git a/pfunk/tests/test_auth.py b/pfunk/tests/test_auth.py
index 33ec70a..91b2e47 100644
--- a/pfunk/tests/test_auth.py
+++ b/pfunk/tests/test_auth.py
@@ -1,13 +1,15 @@
from faunadb.errors import PermissionDenied
-from pfunk.contrib.auth.collections import PermissionGroup
-from pfunk.tests import User, Group, Sport, Person, House
+from pfunk.contrib.auth.key import PermissionGroup
+from pfunk.contrib.auth.collections import Group, User, UserGroups
from pfunk.exceptions import LoginFailed
from pfunk.testcase import CollectionTestCase
+from pfunk.tests import Sport, Person, House
class AuthTestCase(CollectionTestCase):
- collections = [User, Group, Sport, Person, House]
+ collections = [User, Group, UserGroups,
+ Sport, Person, House]
def setUp(self) -> None:
super(AuthTestCase, self).setUp()
diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py
new file mode 100644
index 0000000..b181b78
--- /dev/null
+++ b/pfunk/tests/test_aws.py
@@ -0,0 +1,91 @@
+import os
+import json
+import unittest
+import tempfile
+from unittest import mock
+
+from pfunk.utils.aws import ApiGateway
+from pfunk.tests import Person, Sport
+from pfunk.project import Project
+from pfunk.contrib.auth.collections import Group, User, UserGroups
+
+
+class ApiGatewayTests(unittest.TestCase):
+ """ Unit tests for creation of API from Swagger file
+
+ Note that the unittests uses mocked boto3 normally. If
+ you want to test against a real endpoint, remove the
+ patch decorator at `setUpClass` and the `mocked`
+ param. Also make sure you have the required
+ env vars for AWS credentials and you have
+ the json config in the current env.
+ """
+
+ @classmethod
+ @mock.patch('boto3.client')
+ def setUpClass(cls, mocked) -> None:
+ cls.project = Project()
+ cls.aws_client = ApiGateway()
+ cls.project.add_resources([Person, Sport, Group, User])
+
+ with open(f'pfunk.json', 'x') as f:
+ json.dump({
+ 'name': 'test',
+ 'api_type': 'rest',
+ 'description': 'test project',
+ 'host': 'localhost',
+ 'stages': {'dev': {
+ 'key_module': f'test.dev_keys.KEYS',
+ 'fauna_secret': 'test-key',
+ 'bucket': 'test-bucket',
+ 'default_from_email': 'test@example.org'
+ }}
+ }, f, indent=4, sort_keys=True)
+ swagger = cls.project.generate_swagger()
+ cls.swagger_dir = swagger['dir']
+ cls.swagger_file = swagger['swagger_file']
+
+ @classmethod
+ def tearDownClass(cls) -> None:
+ os.remove("pfunk.json")
+ try:
+ os.remove('swagger.yaml')
+ except FileNotFoundError:
+ pass
+
+ def test_validate_yaml(self):
+ result = self.aws_client.validate_yaml(self.swagger_dir)
+ self.assertIsNone(result) # if there are no errors, then spec is valid
+
+ def test_validate_wrong_yaml(self):
+ result = self.aws_client.validate_yaml('wrong yaml...33::39')
+ # if there are returned objs, there is an error
+ self.assertIsNotNone(result)
+
+ @mock.patch('boto3.client')
+ def test_create_api_from_yaml(self, mocked):
+ result = self.aws_client.create_api_from_yaml(
+ yaml_file=self.swagger_dir)
+ self.assertTrue(result['success'])
+
+ @mock.patch('boto3.client')
+ def test_create_api_from_wrong_yaml(self, mocked):
+ with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp:
+ tmp.seek(0)
+ tmp.write('test wrong yaml')
+ result = self.aws_client.create_api_from_yaml(tmp.name)
+ self.assertEqual(result['error'], 'Bad Request. YAML is not valid.')
+
+ @mock.patch('boto3.client')
+ def test_update_api_from_yaml(self, mocked):
+ result = self.aws_client.update_api_from_yaml(
+ yaml_file=self.swagger_dir, mode='merge')
+ self.assertTrue(result['success'])
+
+ @mock.patch('boto3.client')
+ def test_update_api_from_wrong_yaml(self, mocked):
+ with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp:
+ tmp.seek(0)
+ tmp.write('test wrong yaml')
+ result = self.aws_client.update_api_from_yaml(tmp.name, mode='merge')
+ self.assertEqual(result['error'], 'Bad Request. YAML is not valid.')
diff --git a/pfunk/tests/test_collection.py b/pfunk/tests/test_collection.py
index 717b164..2692497 100644
--- a/pfunk/tests/test_collection.py
+++ b/pfunk/tests/test_collection.py
@@ -50,7 +50,3 @@ def test_get_unique_together(self):
sport = Sport()
sport.get_unique_together()
self.assertEqual(len(sport.collection_indexes), 1)
-
-
-
-
diff --git a/pfunk/tests/test_crud.py b/pfunk/tests/test_crud.py
index c09c17e..a960fc5 100644
--- a/pfunk/tests/test_crud.py
+++ b/pfunk/tests/test_crud.py
@@ -1,6 +1,5 @@
-from faunadb.errors import PermissionDenied
-
-from pfunk.tests import User, Group
+from pfunk.contrib.auth.collections import Group
+from pfunk.contrib.auth.collections import User
from pfunk.testcase import CollectionTestCase
@@ -12,8 +11,8 @@ def setUp(self) -> None:
self.managers = Group.create(name='Managers', slug='managers')
self.power_users = Group.create(name='Power Users', slug='power-users')
self.user = User.create(username='test', email='tlasso@example.org', first_name='Ted',
- last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.managers])
-
+ last_name='Lasso', _credentials='abc123', account_status='ACTIVE',
+ groups=[self.managers])
def test_create_user(self):
self.assertEqual(2, len(Group.all()))
@@ -33,7 +32,3 @@ def test_update(self):
self.user.save()
u = User.get(self.user.ref.id())
self.assertEqual(u.username, 'test-c')
-
-
-
-
diff --git a/pfunk/tests/test_deployment.py b/pfunk/tests/test_deployment.py
index 587f9f0..a85ae03 100644
--- a/pfunk/tests/test_deployment.py
+++ b/pfunk/tests/test_deployment.py
@@ -1,8 +1,8 @@
-from pfunk.contrib.auth.collections import Group, User
-from pfunk.testcase import PFunkTestCase
-from pfunk.project import Project
from pfunk.client import q
-from pfunk.tests import Sport, Person, User, Group
+from pfunk.contrib.auth.collections import Group, User, UserGroups
+from pfunk.project import Project
+from pfunk.testcase import PFunkTestCase
+from pfunk.tests import Sport, Person
class DeploymentTestCase(PFunkTestCase):
@@ -10,7 +10,7 @@ class DeploymentTestCase(PFunkTestCase):
def setUp(self) -> None:
super(DeploymentTestCase, self).setUp()
self.project = Project()
- self.project.add_resources([User, Group, Sport, Person])
+ self.project.add_resources([User, Group, Sport, Person, UserGroups])
def test_project_publish(self):
# Make sure collections are created
@@ -23,7 +23,7 @@ def test_project_publish(self):
q.paginate(q.collections(q.database(self.db_name)))
).get('data')
- self.assertEqual(5, len(collections_after))
+ self.assertEqual(6, len(collections_after))
# Make sure functions are created
functions = self.client.query(
q.paginate(q.functions(q.database(self.db_name)))
@@ -35,13 +35,10 @@ def test_project_publish(self):
indexes = self.client.query(
q.paginate(q.indexes(q.database(self.db_name)))
).get('data')
- self.assertEqual(13, len(indexes))
+ self.assertEqual(15, len(indexes))
# Add User and Group to the project
self.project.add_resources([User, Group])
# Publish twice more to make sure there are no errors with create_or_update_role or create_or_update_function
# functions
self.project.publish()
self.project.publish()
-
-
-
diff --git a/pfunk/tests/test_email.py b/pfunk/tests/test_email.py
index 491a3a2..b885441 100644
--- a/pfunk/tests/test_email.py
+++ b/pfunk/tests/test_email.py
@@ -1,19 +1,21 @@
+import os
import tempfile
-from werkzeug.test import Client
from unittest import mock
-import os
+
from jinja2.exceptions import TemplateNotFound
+from werkzeug.test import Client
-from pfunk.tests import User, Group
-from pfunk.testcase import APITestCase
-from pfunk.contrib.email.ses import SESBackend
+from pfunk.contrib.auth.collections import Group, User, UserGroups
from pfunk.contrib.email.base import EmailBackend
+from pfunk.contrib.email.ses import SESBackend
+from pfunk.testcase import APITestCase
class TestEmailBackend(APITestCase):
- collections = [User, Group]
+ collections = [User, Group, UserGroups]
def setUp(self) -> None:
+ # NOTE: env var TEMPLATE_ROOT_DIR should be set to "/"
super(TestEmailBackend, self).setUp()
self.group = Group.create(name='Power Users', slug='power-users')
self.user = User.create(username='test', email='tlasso@example.org', first_name='Ted',
@@ -22,8 +24,8 @@ def setUp(self) -> None:
self.backend = EmailBackend()
def test_get_template(self):
-
- template = self.backend.get_template('email/email_template.html')
+ template = self.backend.get_template(
+ '/code/pfunk/tests/templates/email/email_template.html')
# test jinja render if no exceptions
template.render(unittest_value="random value")
self.assertTrue(True) # if there are no exceptions, then it is a pass
@@ -44,7 +46,7 @@ def test_get_body(self):
class TestEmailSES(APITestCase):
- collections = [User, Group]
+ collections = [User, Group, UserGroups]
def setUp(self) -> None:
super(TestEmailSES, self).setUp()
@@ -58,11 +60,10 @@ def setUp(self) -> None:
@mock.patch('boto3.client')
def test_send_email(self, mocked):
-
res = self.SES.send_email(
subject="test",
to_emails=["testemail@email.com"],
- html_template='email/email_template.html',
+ html_template='code/pfunk/tests/templates/email/email_template.html',
from_email="testFromEmail@email.com",
cc_emails=["testCCemail@email.com"],
bcc_emails=["testBCCemail@email.com"],
diff --git a/pfunk/tests/test_jwt.py b/pfunk/tests/test_jwt.py
index a9ff9b3..7d57c7f 100644
--- a/pfunk/tests/test_jwt.py
+++ b/pfunk/tests/test_jwt.py
@@ -1,14 +1,11 @@
-from faunadb.errors import PermissionDenied
-
-from pfunk.contrib.auth.collections import Key
-from pfunk.tests import User, Group
+from pfunk.contrib.auth.key import Key
+from pfunk.contrib.auth.collections import Group, User, UserGroups
from pfunk.exceptions import LoginFailed
from pfunk.testcase import APITestCase
-from pfunk.contrib.auth.collections import Key
class AuthToken(APITestCase):
- collections = [User, Group]
+ collections = [User, Group, UserGroups]
def setUp(self) -> None:
super(AuthToken, self).setUp()
diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py
index db859a9..2665808 100644
--- a/pfunk/tests/test_project.py
+++ b/pfunk/tests/test_project.py
@@ -1,12 +1,38 @@
+import json
+import os
import unittest
+
+from pfunk.cli import init
from pfunk.project import Project
from pfunk.tests import Person, Sport, GENDER_PRONOUN
+from pfunk.contrib.auth.collections import User
+from pfunk.contrib.auth.collections import Group
class ProjectTestCase(unittest.TestCase):
def setUp(self) -> None:
self.project = Project()
+ with open(f'pfunk.json', 'x') as f:
+ json.dump({
+ 'name': 'test',
+ 'api_type': 'rest',
+ 'description': 'test project',
+ 'host': 'localhost',
+ 'stages': {'dev': {
+ 'key_module': f'test.dev_keys.KEYS',
+ 'fauna_secret': 'test-key',
+ 'bucket': 'test-bucket',
+ 'default_from_email': 'test@example.org'
+ }}
+ }, f, indent=4, sort_keys=True)
+
+ def tearDown(self) -> None:
+ os.remove("pfunk.json")
+ try:
+ os.remove('swagger.yaml')
+ except FileNotFoundError:
+ pass
def test_add_resource(self):
self.project.add_resource(Person)
@@ -27,4 +53,7 @@ def test_render(self):
self.assertTrue('type Sport' in gql)
self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql)
-
+ def test_swagger(self):
+ self.project.add_resources([Person, Sport, Group, User])
+ self.project.generate_swagger()
+ self.assertTrue(True) # if there are no exceptions, then it passed
\ No newline at end of file
diff --git a/pfunk/tests/test_resources.py b/pfunk/tests/test_resources.py
index 05a7117..faf2f02 100644
--- a/pfunk/tests/test_resources.py
+++ b/pfunk/tests/test_resources.py
@@ -1,6 +1,8 @@
import unittest
-from pfunk.tests import SimpleIndex
+
from pfunk.client import q
+from pfunk.tests import SimpleIndex
+
class IndexTestCase(unittest.TestCase):
@@ -20,9 +22,9 @@ def test_get_kwargs(self):
self.assertEqual(
self.index.get_kwargs(),
{
- 'name':'simple-index',
+ 'name': 'simple-index',
'source': q.collection('Project'),
'terms': ['name', 'slug'],
'unique': True
}
- )
\ No newline at end of file
+ )
diff --git a/pfunk/tests/test_web_crud.py b/pfunk/tests/test_web_crud.py
index fe0fd2e..fbf1f43 100644
--- a/pfunk/tests/test_web_crud.py
+++ b/pfunk/tests/test_web_crud.py
@@ -1,13 +1,12 @@
from werkzeug.test import Client
-from pfunk.tests import User, Group
-from pfunk.exceptions import LoginFailed
+from pfunk.contrib.auth.collections import Group, User, UserGroups
from pfunk.testcase import APITestCase
from pfunk.tests import House
class TestWebCrud(APITestCase):
- collections = [User, Group, House]
+ collections = [User, Group, UserGroups, House]
def setUp(self) -> None:
super(TestWebCrud, self).setUp()
@@ -17,6 +16,8 @@ def setUp(self) -> None:
groups=[self.group])
self.token, self.exp = User.api_login("test", "abc123")
self.house = House.create(address="test address", user=self.user)
+ self.house_b = House.create(address="test another address", user=self.user)
+ self.house_b = House.create(address="test even another address", user=self.user)
self.app = self.project.wsgi_app
self.c = Client(self.app)
@@ -24,48 +25,66 @@ def test_read(self):
res = self.c.get(f'/house/detail/{self.house.ref.id()}/',
headers={
"Authorization": self.token})
- self.assertTrue(res.json['success'])
- self.assertEqual("test address", res.json['data']['data']['address'])
+
+ self.assertIn("test address", res.text)
+ self.assertTrue(res.status_code, 200)
def test_read_all(self):
res = self.c.get(f'/house/list/',
headers={
"Authorization": self.token})
- self.assertTrue(res.json['success'])
+ self.assertTrue(res.status_code, 200)
+ self.assertIn("test address", str(res.text))
+ self.assertIn("test another address", str(res.text))
+ self.assertIn("test even another address", str(res.text))
def test_create(self):
self.assertNotIn("the street somewhere", [
house.address for house in House.all()])
res = self.c.post('/house/create/',
- json={
+ data={
"address": "the street somewhere",
"user": self.user.ref.id()},
headers={
"Authorization": self.token})
- self.assertTrue(res.json['success'])
self.assertIn("the street somewhere", [
- house.address for house in House.all()])
+ house.address for house in House.all()])
+ self.assertEqual(res.status_code, 302)
+ self.assertEqual(res.location, "/house/list/")
+ self.assertTrue(res.status_code, 200)
def test_update(self):
self.assertNotIn("the updated street somewhere", [
house.address for house in House.all()])
- res = self.c.put(f'/house/update/{self.house.ref.id()}/',
- json={
- "address": "the updated street somewhere",
- "user": self.user.ref.id()},
- headers={
- "Authorization": self.token})
-
- self.assertTrue(res.json['success'])
+ res = self.c.post(f'/house/update/{self.house.ref.id()}/',
+ data={
+ "address": "the updated street somewhere",
+ "user": self.user.ref.id()},
+ headers={
+ "Authorization": self.token})
self.assertIn("the updated street somewhere", [
- house.address for house in House.all()])
-
+ house.address for house in House.all()])
+ self.assertTrue(res.status_code, 200)
def test_delete(self):
- res = self.c.delete(f'/house/delete/{self.house.ref.id()}/',
+ self.assertIn("test address", [
+ house.address for house in House.all()])
+ res = self.c.get(f'/house/delete/{self.house.ref.id()}/',
headers={
"Authorization": self.token,
"Content-Type": "application/json"
})
+ self.assertEqual(res.status_code, 200)
+ self.assertIn("Delete test address", str(res.text))
+ res = self.c.post(f'/house/delete/{self.house.ref.id()}/',
+ headers={
+ "Authorization": self.token,
+ "Content-Type": "application/json"
+ })
+ self.assertEqual(res.status_code, 302)
+ self.assertEqual(res.location, "/house/list/")
+ self.assertNotIn("the address", [
+ house.address for house in House.all()])
+
+
- self.assertTrue(res.json['success'])
diff --git a/pfunk/tests/test_web_custom_user_group_group_perms.py b/pfunk/tests/test_web_custom_user_group_group_perms.py
new file mode 100644
index 0000000..cf0f057
--- /dev/null
+++ b/pfunk/tests/test_web_custom_user_group_group_perms.py
@@ -0,0 +1,114 @@
+from valley.utils import import_util
+
+from pfunk.contrib.auth.key import PermissionGroup
+from pfunk.testcase import APITestCase
+from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, BaseUserGroup as ug
+from pfunk import Collection, StringField, ReferenceField, ManyToManyField
+from pfunk.fields import ManyToManyField, StringField
+from pfunk.contrib.auth.resources import GenericGroupBasedRole
+
+
+class UserGroups(ug):
+ userID = ReferenceField(
+ 'pfunk.tests.test_web_custom_user_group_group_perms.Newuser')
+ groupID = ReferenceField(
+ 'pfunk.tests.test_web_custom_user_group_group_perms.Newgroup')
+
+
+class Newgroup(BaseGroup):
+ users = ManyToManyField('pfunk.tests.test_web_custom_user_group_group_perms.Newuser',
+ relation_name='custom_users_groups')
+
+
+class Blog(Collection):
+ user_collection = 'Newuser'
+ group_collection = 'Newgroup'
+ user_collection_dir = 'pfunk.tests.test_web_custom_user_group_group_perms.Newuser'
+ group_collection_dir = 'pfunk.tests.test_web_custom_user_group_group_perms.Newgroup'
+ collection_roles = [GenericGroupBasedRole]
+ title = StringField(required=True)
+ content = StringField(required=True)
+ group = ReferenceField('pfunk.tests.test_web_custom_user_group_group_perms.Newgroup',
+ relation_name='newgroup_blogs')
+
+ def __unicode__(self):
+ return self.title
+
+
+class Newuser(ExtendedUser):
+ group_collection = 'Newgroup'
+ user_group_class = import_util(
+ 'pfunk.tests.test_web_custom_user_group_group_perms.UserGroups')
+ group_class = import_util(
+ 'pfunk.tests.test_web_custom_user_group_group_perms.Newgroup')
+ groups = ManyToManyField(
+ 'pfunk.tests.test_web_custom_user_group_group_perms.Newgroup', relation_name='custom_users_groups')
+
+
+class TestCustomGroupBasedPerms(APITestCase):
+ collections = [Newuser, Newgroup, Blog, UserGroups]
+
+ def setUp(self) -> None:
+ super().setUp()
+ self.group = Newgroup.create(name='Power Users', slug='power-users')
+ self.user = Newuser.create(username='test_user', email='tlasso@example.org', first_name='Ted',
+ last_name='Lasso', _credentials='abc123', account_status='ACTIVE',
+ groups=[self.group])
+ perms = self.user.add_permissions(
+ self.group, [PermissionGroup(Blog, ['create', 'read', 'write', 'delete'])])
+ self.token, self.exp = Newuser.api_login("test_user", "abc123")
+ self.raw_token = Newuser.login("test_user", "abc123")
+ self.blog = Blog.create(
+ title='test_blog', content='test content', group=self.group)
+
+ def test_read(self):
+ res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/',
+ headers={
+ "Authorization": self.token})
+ self.assertTrue(res.status_code, 200)
+ self.assertEqual("test_blog", res.json['data']['data']['title'])
+
+ def test_read_all(self):
+ res = self.c.get(f'/json/blog/list/',
+ headers={
+ "Authorization": self.token})
+ self.assertTrue(res.status_code, 200)
+
+ def test_create(self):
+ self.assertNotIn("new blog", [
+ blog.title for blog in Blog.all()])
+ res = self.c.post('/json/blog/create/',
+ json={
+ "title": "new blog",
+ "content": "I created a new blog.",
+ "group": self.group.ref.id()},
+ headers={
+ "Authorization": self.token})
+ self.assertTrue(res.status_code, 200)
+ self.assertIn("new blog", [
+ blog.title for blog in Blog.all()])
+
+ def test_update(self):
+ self.assertNotIn("the updated street somewhere", [
+ house.address for house in Blog.all()])
+ res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/',
+ json={
+ "title": "updated blog",
+ "content": "I updated my blog."},
+ headers={
+ "Authorization": self.token})
+
+ self.assertTrue(res.status_code, 200)
+ self.assertIn("updated blog", [
+ blog.title for blog in Blog.all()])
+
+ def test_delete(self):
+ res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/',
+ headers={
+ "Authorization": self.token,
+ "Content-Type": "application/json"
+ })
+
+ self.assertTrue(res.status_code, 200)
+ self.assertNotIn("test_blog", [
+ blog.title for blog in Blog.all()])
diff --git a/pfunk/tests/test_web_custom_user_group_m2m.py b/pfunk/tests/test_web_custom_user_group_m2m.py
new file mode 100644
index 0000000..8e04044
--- /dev/null
+++ b/pfunk/tests/test_web_custom_user_group_m2m.py
@@ -0,0 +1,122 @@
+# test_dev.py - a placeholder test for fixing User - Group circular import errors
+
+import os
+from valley.utils import import_util
+
+from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, BaseUserGroup as ug
+from pfunk.testcase import APITestCase
+from pfunk import Collection, StringField, ReferenceField, ManyToManyField
+from pfunk.fields import ManyToManyField, StringField
+from pfunk.contrib.auth.resources import GenericUserBasedRoleM2M
+
+
+class UserGroups(ug):
+ userID = ReferenceField(
+ 'pfunk.tests.test_web_custom_user_group_m2m.Newuser')
+ groupID = ReferenceField(
+ 'pfunk.tests.test_web_custom_user_group_m2m.Newgroup')
+
+
+class Newgroup(BaseGroup):
+ users = ManyToManyField('pfunk.tests.test_web_custom_user_group_m2m.Newuser',
+ relation_name='custom_users_groups')
+
+
+class Newuser(ExtendedUser):
+ group_collection = 'Newgroup'
+ user_group_class = import_util(
+ 'pfunk.tests.test_web_custom_user_group_m2m.UserGroups')
+ group_class = import_util(
+ 'pfunk.tests.test_web_custom_user_group_m2m.Newgroup')
+ groups = ManyToManyField(
+ 'pfunk.tests.test_web_custom_user_group_m2m.Newgroup', relation_name='custom_users_groups')
+ blogs = ManyToManyField('pfunk.tests.test_web_custom_user_group_m2m.Blog',
+ relation_name='users_blogs')
+
+
+class Blog(Collection):
+ user_collection = 'Newuser'
+ group_collection = 'Newgroup'
+ user_collection_dir = 'pfunk.tests.test_web_custom_user_group_m2m.Newuser'
+ group_collection_dir = 'pfunk.tests.test_web_custom_user_group_m2m.Newgroup'
+ collection_roles = [GenericUserBasedRoleM2M]
+ title = StringField(required=True)
+ content = StringField(required=True)
+ users = ManyToManyField('pfunk.tests.test_web_custom_user_group_m2m.Newuser',
+ relation_name='users_blogs')
+
+ def __unicode__(self):
+ return self.title
+
+
+# Test case to see if user-group is working
+class TestCustomUserM2M(APITestCase):
+ collections = [Newuser, Newgroup, UserGroups, Blog]
+
+ def setUp(self) -> None:
+ super().setUp()
+ self.group = Newgroup.create(name='Power Users', slug='power-users')
+ self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted',
+ last_name='Lasso', _credentials='abc123', account_status='ACTIVE',
+ groups=[self.group])
+ self.user2 = Newuser.create(username='test2', email='tlasso2@example.org', first_name='Juliuz',
+ last_name='Lasso', _credentials='abc123', account_status='ACTIVE',
+ groups=[self.group])
+ self.blog = Blog.create(
+ title='test_blog', content='test content', users=[self.user], token=self.secret)
+ self.token, self.exp = Newuser.api_login("test", "abc123")
+
+ def test_read(self):
+ res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/',
+ headers={
+ "Authorization": self.token})
+ self.assertTrue(res.status_code, 200)
+ self.assertEqual("test_blog", res.json['data']['data']['title'])
+
+ def test_read_all(self):
+ res = self.c.get(f'/json/blog/list/',
+ headers={
+ "Authorization": self.token})
+ self.assertTrue(res.status_code, 200)
+
+ def test_create(self):
+ self.assertNotIn("new blog", [
+ blog.title for blog in Blog.all()])
+ res = self.c.post('/json/blog/create/',
+ json={
+ "title": "new blog",
+ "content": "I created a new blog.",
+ "users": [self.user.ref.id(), self.user2.ref.id()]},
+ headers={
+ "Authorization": self.token})
+
+ self.assertTrue(res.status_code, 200)
+ self.assertIn("new blog", [
+ blog.title for blog in Blog.all()])
+
+ def test_update(self):
+ self.assertNotIn("updated blog", [
+ blog.title for blog in Blog.all()])
+ res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/',
+ json={
+ "title": "updated blog",
+ "content": "I updated my blog.",
+ "users": [self.user.ref.id()]
+ },
+ headers={
+ "Authorization": self.token})
+
+ self.assertTrue(res.status_code, 200)
+ self.assertIn("updated blog", [
+ blog.title for blog in Blog.all()])
+
+ def test_delete(self):
+ res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/',
+ headers={
+ "Authorization": self.token,
+ "Content-Type": "application/json"
+ })
+
+ self.assertTrue(res.status_code, 200)
+ self.assertNotIn("test_blog", [
+ blog.title for blog in Blog.all()])
diff --git a/pfunk/tests/test_web_custom_user_group_users_perms.py b/pfunk/tests/test_web_custom_user_group_users_perms.py
new file mode 100644
index 0000000..7dcdcb4
--- /dev/null
+++ b/pfunk/tests/test_web_custom_user_group_users_perms.py
@@ -0,0 +1,119 @@
+# test_dev.py - a placeholder test for fixing User - Group circular import errors
+
+import os
+from valley.utils import import_util
+from pprint import pprint as p
+
+from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, BaseUserGroup as ug
+from pfunk.testcase import APITestCase
+from pfunk import Collection, StringField, ReferenceField, ManyToManyField
+from pfunk.fields import ManyToManyField, StringField
+from pfunk.contrib.auth.resources import GenericUserBasedRole
+
+
+class UserGroups(ug):
+ userID = ReferenceField(
+ 'pfunk.tests.test_web_custom_user_group_users_perms.Newuser')
+ groupID = ReferenceField(
+ 'pfunk.tests.test_web_custom_user_group_users_perms.Newgroup')
+
+
+class Newgroup(BaseGroup):
+ users = ManyToManyField('pfunk.tests.test_web_custom_user_group_users_perms.Newuser',
+ relation_name='custom_users_groups')
+
+
+class Newuser(ExtendedUser):
+ group_collection = 'Newgroup'
+ user_group_class = import_util(
+ 'pfunk.tests.test_web_custom_user_group_users_perms.UserGroups')
+ group_class = import_util(
+ 'pfunk.tests.test_web_custom_user_group_users_perms.Newgroup')
+ groups = ManyToManyField(
+ 'pfunk.tests.test_web_custom_user_group_users_perms.Newgroup', relation_name='custom_users_groups')
+ blogs = ManyToManyField('pfunk.tests.test_web_custom_user_group_users_perms.Blog',
+ relation_name='users_blogs')
+
+
+class Blog(Collection):
+ user_collection = 'Newuser'
+ group_collection = 'Newgroup'
+ user_collection_dir = 'pfunk.tests.test_web_custom_user_group_users_perms.Newuser'
+ group_collection_dir = 'pfunk.tests.test_web_custom_user_group_users_perms.Newgroup'
+ collection_roles = [GenericUserBasedRole]
+ title = StringField(required=True)
+ content = StringField(required=True)
+ user = ReferenceField('pfunk.tests.test_web_custom_user_group_users_perms.Newuser',
+ relation_name='users_blogs')
+
+ def __unicode__(self):
+ return self.title
+
+
+# Test case to see if user-group is working
+class TestCustomUserBasedPerms(APITestCase):
+ collections = [Newuser, Newgroup, UserGroups, Blog]
+
+ def setUp(self) -> None:
+ super().setUp()
+ self.group = Newgroup.create(name='Power Users', slug='power-users')
+ self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted',
+ last_name='Lasso', _credentials='abc123', account_status='ACTIVE',
+ groups=[self.group])
+ self.blog = Blog.create(
+ title='test_blog', content='test content', user=self.user, token=self.secret)
+ self.token, self.exp = Newuser.api_login("test", "abc123")
+
+ def test_read(self):
+ res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/',
+ headers={
+ "Authorization": self.token})
+ self.assertTrue(res.status_code, 200)
+ self.assertEqual("test_blog", res.json['data']['data']['title'])
+
+ def test_read_all(self):
+ res = self.c.get(f'/json/blog/list/',
+ headers={
+ "Authorization": self.token})
+ self.assertTrue(res.status_code, 200)
+
+ def test_create(self):
+ self.assertNotIn("new blog", [
+ blog.title for blog in Blog.all()])
+ res = self.c.post('/json/blog/create/',
+ json={
+ "title": "new blog",
+ "content": "I created a new blog.",
+ "user": self.user.ref.id()},
+ headers={
+ "Authorization": self.token})
+
+ self.assertTrue(res.status_code, 200)
+ self.assertIn("new blog", [
+ blog.title for blog in Blog.all()])
+
+ def test_update(self):
+ self.assertNotIn("updated blog", [
+ blog.title for blog in Blog.all()])
+ res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/',
+ json={
+ "title": "updated blog",
+ "content": "I updated my blog.",
+ "user": self.user.ref.id()},
+ headers={
+ "Authorization": self.token})
+
+ self.assertTrue(res.status_code, 200)
+ self.assertIn("updated blog", [
+ blog.title for blog in Blog.all()])
+
+ def test_delete(self):
+ res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/',
+ headers={
+ "Authorization": self.token,
+ "Content-Type": "application/json"
+ })
+
+ self.assertTrue(res.status_code, 200)
+ self.assertNotIn("test_blog", [
+ blog.title for blog in Blog.all()])
diff --git a/pfunk/tests/test_web_change_password.py b/pfunk/tests/test_web_json_change_password.py
similarity index 94%
rename from pfunk/tests/test_web_change_password.py
rename to pfunk/tests/test_web_json_change_password.py
index 85e6fc7..cb8dae0 100644
--- a/pfunk/tests/test_web_change_password.py
+++ b/pfunk/tests/test_web_json_change_password.py
@@ -1,9 +1,9 @@
-from pfunk.tests import User, Group
+from pfunk.contrib.auth.collections import Group, User, UserGroups
from pfunk.testcase import APITestCase
class TestWebChangePassword(APITestCase):
- collections = [User, Group]
+ collections = [User, Group, UserGroups]
def setUp(self) -> None:
super(TestWebChangePassword, self).setUp()
@@ -25,12 +25,12 @@ def test_update_password(self):
headers={
"Authorization": self.token
})
-
+
new_token, new_exp = User.api_login("test", "updated_password")
self.assertIsNotNone(new_token)
self.assertTrue(res.json['success'])
-
+
def test_update_pass_wrong_current(self):
""" Tests `pfunk.contrib.auth.views.UpdatePasswordView` throw an error if the current password given was wrong """
res = self.c.post('/user/update-password/',
@@ -43,6 +43,6 @@ def test_update_pass_wrong_current(self):
"Authorization": self.token
})
expected = {'success': False, 'data': {'validation_errors': {'current_password': ' Password update failed.'}}}
-
+
self.assertDictEqual(res.json, expected)
self.assertFalse(res.json['success'])
diff --git a/pfunk/tests/test_web_json_crud.py b/pfunk/tests/test_web_json_crud.py
new file mode 100644
index 0000000..6179e41
--- /dev/null
+++ b/pfunk/tests/test_web_json_crud.py
@@ -0,0 +1,70 @@
+from werkzeug.test import Client
+
+from pfunk.contrib.auth.collections import Group, User, UserGroups
+from pfunk.testcase import APITestCase
+from pfunk.tests import House
+
+
+class TestWebCrud(APITestCase):
+ collections = [User, Group, House, UserGroups]
+
+ def setUp(self) -> None:
+ super(TestWebCrud, self).setUp()
+ self.group = Group.create(name='Power Users', slug='power-users')
+ self.user = User.create(username='test', email='tlasso@example.org', first_name='Ted',
+ last_name='Lasso', _credentials='abc123', account_status='ACTIVE',
+ groups=[self.group])
+ self.token, self.exp = User.api_login("test", "abc123")
+ self.house = House.create(address="test address", user=self.user)
+ self.app = self.project.wsgi_app
+ self.c = Client(self.app)
+
+ def test_read(self):
+ res = self.c.get(f'/json/house/detail/{self.house.ref.id()}/',
+ headers={
+ "Authorization": self.token})
+ self.assertTrue(res.json['success'])
+ self.assertEqual("test address", res.json['data']['data']['address'])
+
+ def test_read_all(self):
+ res = self.c.get(f'/json/house/list/',
+ headers={
+ "Authorization": self.token})
+ self.assertTrue(res.json['success'])
+
+ def test_create(self):
+ self.assertNotIn("the street somewhere", [
+ house.address for house in House.all()])
+ res = self.c.post('/json/house/create/',
+ json={
+ "address": "the street somewhere",
+ "user": self.user.ref.id()},
+ headers={
+ "Authorization": self.token})
+
+ self.assertTrue(res.json['success'])
+ self.assertIn("the street somewhere", [
+ house.address for house in House.all()])
+
+ def test_update(self):
+ self.assertNotIn("the updated street somewhere", [
+ house.address for house in House.all()])
+ res = self.c.put(f'/json/house/update/{self.house.ref.id()}/',
+ json={
+ "address": "the updated street somewhere",
+ "user": self.user.ref.id()},
+ headers={
+ "Authorization": self.token})
+
+ self.assertTrue(res.json['success'])
+ self.assertIn("the updated street somewhere", [
+ house.address for house in House.all()])
+
+ def test_delete(self):
+ res = self.c.delete(f'/json/house/delete/{self.house.ref.id()}/',
+ headers={
+ "Authorization": self.token,
+ "Content-Type": "application/json"
+ })
+
+ self.assertTrue(res.json['success'])
diff --git a/pfunk/tests/test_web_forgot_password.py b/pfunk/tests/test_web_json_forgot_password.py
similarity index 74%
rename from pfunk/tests/test_web_forgot_password.py
rename to pfunk/tests/test_web_json_forgot_password.py
index 4b81492..6dbdaa2 100644
--- a/pfunk/tests/test_web_forgot_password.py
+++ b/pfunk/tests/test_web_json_forgot_password.py
@@ -1,11 +1,11 @@
from werkzeug.test import Client
-from pfunk.tests import User, Group
+from pfunk.contrib.auth.collections import Group, User, UserGroups
from pfunk.testcase import APITestCase
class TestWebForgotPassword(APITestCase):
- collections = [User, Group]
+ collections = [User, Group, UserGroups]
def setUp(self) -> None:
super(TestWebForgotPassword, self).setUp()
@@ -26,16 +26,16 @@ def test_send_forgot_req(self):
"Content-Type": "application/json"})
self.assertTrue(res.json['success'])
-
+
def test_submit_key_for_forgot_pass(self):
""" Submits the key from the forgot password email to initiate password reset """
-
+
res = self.c.put(f'/user/forgot-password/',
- json={
- "verification_key": self.key,
- "password": "new_updated_pass"},
- headers={
- "Content-Type": "application/json"})
+ json={
+ "verification_key": self.key,
+ "password": "new_updated_pass"},
+ headers={
+ "Content-Type": "application/json"})
new_login = User.api_login("test", "new_updated_pass")
self.assertTrue(res.json['success'])
@@ -45,11 +45,11 @@ def test_submit_wrong_key_for_forgot_pass(self):
""" Submit a wrong key for verification of reset password. Should return `Not Found` """
key = 'wrong-key'
res = self.c.put(f'/user/forgot-password/',
- json={
- "verification_key": key,
- "password": "forgotten_password"},
- headers={
- "Content-Type": "application/json"})
+ json={
+ "verification_key": key,
+ "password": "forgotten_password"},
+ headers={
+ "Content-Type": "application/json"})
expected = {'data': 'Not Found', 'success': False}
self.assertFalse(res.json['success'])
diff --git a/pfunk/tests/test_web_login.py b/pfunk/tests/test_web_json_login.py
similarity index 94%
rename from pfunk/tests/test_web_login.py
rename to pfunk/tests/test_web_json_login.py
index b08cc27..862b7d0 100644
--- a/pfunk/tests/test_web_login.py
+++ b/pfunk/tests/test_web_json_login.py
@@ -1,12 +1,12 @@
from werkzeug.test import Client
-from pfunk.tests import User, Group
+from pfunk.contrib.auth.collections import Group, User, UserGroups
from pfunk.exceptions import LoginFailed
from pfunk.testcase import APITestCase
class TestWebLogin(APITestCase):
- collections = [User, Group]
+ collections = [User, Group, UserGroups]
def setUp(self) -> None:
super(TestWebLogin, self).setUp()
@@ -24,6 +24,7 @@ def test_login(self):
# check if response has cookies
self.assertIsNotNone(res.headers['Set-Cookie'])
+
self.assertTrue(res.json['success'])
def test_wrong_login(self):
@@ -36,11 +37,11 @@ def test_wrong_login(self):
def test_logout(self):
""" Tests `pfunk.contrib.auth.views.LogoutView` invalidate token login and remove cookie """
token, exp = User.api_login("test", "abc123")
+
res = self.c.post('/user/logout/', headers={
"Authorization": token,
"Content-Type": "application/json"
})
-
self.assertTrue(res.json['success'])
def test_wrong_logout(self):
diff --git a/pfunk/tests/test_web_signup.py b/pfunk/tests/test_web_json_signup.py
similarity index 85%
rename from pfunk/tests/test_web_signup.py
rename to pfunk/tests/test_web_json_signup.py
index f1c5fa4..0964414 100644
--- a/pfunk/tests/test_web_signup.py
+++ b/pfunk/tests/test_web_json_signup.py
@@ -1,11 +1,11 @@
from werkzeug.test import Client
-from pfunk.tests import User, Group
+from pfunk.contrib.auth.collections import Group, User, UserGroups
from pfunk.testcase import APITestCase
class TestWebSignup(APITestCase):
- collections = [User, Group]
+ collections = [User, Group, UserGroups]
def setUp(self) -> None:
super(TestWebSignup, self).setUp()
@@ -27,7 +27,7 @@ def test_signup(self):
"email": "testemail@email.com",
"first_name": "Forest",
"last_name": "Gump",
- "_credential_field": "password"
+ "_credential_field": "password"
})
# token = User.login(username="new_user", password="password")
@@ -41,9 +41,9 @@ def test_signup_not_unique(self):
"email": "testemail@email.com",
"first_name": "Forest",
"last_name": "Gump",
- "_credential_field": "password"
+ "_credential_field": "password"
})
self.assertFalse(res.json['success'])
self.assertEqual(res.status_code, 400)
- self.assertEqual('document is not unique.', res.json['data'])
+ self.assertEqual('User document is not unique.', res.json['data'])
diff --git a/pfunk/tests/test_web_json_stripe.py b/pfunk/tests/test_web_json_stripe.py
new file mode 100644
index 0000000..1b7dcc5
--- /dev/null
+++ b/pfunk/tests/test_web_json_stripe.py
@@ -0,0 +1,289 @@
+import tempfile
+from types import SimpleNamespace
+from unittest import mock
+
+from werkzeug.test import Client
+
+from pfunk.contrib.auth.collections import Group, User, UserGroups
+from pfunk.contrib.auth.key import PermissionGroup
+from pfunk.contrib.ecommerce.collections import StripePackage, StripeCustomer
+from pfunk.contrib.ecommerce.views import BaseWebhookView
+from pfunk.testcase import APITestCase
+from pfunk.web.request import HTTPRequest
+
+
+class TestWebStripeCrud(APITestCase):
+ collections = [User, Group, UserGroups, StripePackage, StripeCustomer]
+
+ def setUp(self) -> None:
+ super(TestWebStripeCrud, self).setUp()
+ self.group = Group.create(name='Power Users', slug='power-users')
+ self.user = User.create(username='test', email='tlasso@example.org', first_name='Ted',
+ last_name='Lasso', _credentials='abc123', account_status='ACTIVE',
+ groups=[self.group])
+ self.stripe_pkg = StripePackage.create(group=self.group,
+ stripe_id='100', price='10', description='unit testing...',
+ name='unit test package')
+ self.stripe_cus = StripeCustomer.create(
+ user=self.user, stripe_id='100')
+
+ self.token, self.exp = User.api_login("test", "abc123")
+ self.app = self.project.wsgi_app
+ self.c = Client(self.app)
+ self.user.add_permissions(self.group, [PermissionGroup(
+ StripePackage, ['create', 'read', 'write', 'delete'])])
+
+ def test_list_package(self):
+ res = self.c.get('/json/stripepackage/list/', headers={
+ "Content-Type": "application/json"
+ })
+ self.assertTrue(res.json['success'])
+ self.assertEqual(
+ res.json['data']['data'][0]['data']['name'],
+ self.stripe_pkg.name)
+
+ def test_get_package(self):
+ res = self.c.get(f'/json/stripepackage/detail/{self.stripe_pkg.ref.id()}/', headers={
+ "Content-Type": "application/json"
+ })
+ self.assertTrue(res.json['success'])
+ self.assertEqual(
+ res.json['data']['data']['name'],
+ self.stripe_pkg.name)
+
+ def test_create_package(self):
+ self.assertNotIn("new stripe pkg", [
+ pkg.name for pkg in StripePackage.all()])
+ res = self.c.post('/json/stripepackage/create/',
+ json={
+ 'stripe_id': '123',
+ 'name': 'new stripe pkg',
+ 'price': 10.10,
+ 'description': 'a test package',
+ 'group': self.group.ref.id()
+ },
+ headers={
+ "Authorization": self.token
+ })
+
+ self.assertTrue(res.json['success'])
+ self.assertIn("new stripe pkg", [
+ pkg.name for pkg in StripePackage.all()])
+
+ def test_update_package(self):
+ self.assertNotIn("updated pkg", [
+ pkg.name for pkg in StripePackage.all()])
+ updated_name = 'updated pkg'
+ res = self.c.put(f'/json/stripepackage/update/{self.stripe_pkg.ref.id()}/',
+ json={
+ 'stripe_id': '123',
+ 'name': updated_name,
+ 'price': 10.10,
+ 'description': 'a test package'
+ },
+ headers={
+ "Authorization": self.token,
+ "Content-Type": "application/json"
+ })
+
+ self.assertTrue(res.json['success'])
+ self.assertEqual(
+ res.json['data']['data']['name'],
+ updated_name)
+
+ def test_delete_package(self):
+ res = self.c.delete(f'/json/stripepackage/delete/{self.stripe_pkg.ref.id()}/',
+ headers={
+ "Authorization": self.token,
+ "Content-Type": "application/json"
+ })
+
+ self.assertTrue(res.json['success'])
+ self.assertNotIn(
+ self.stripe_pkg.ref.id(),
+ [pkg.ref.id() for pkg in StripePackage.all()]
+ )
+
+ def test_create_customer(self):
+ stripe_id = '201'
+ self.assertNotIn(stripe_id, [
+ cus.stripe_id for cus in StripeCustomer.all()])
+ res = self.c.post(f'/json/stripecustomer/create/',
+ json={
+ "user": self.user.ref.id(),
+ "stripe_id": stripe_id
+ },
+ headers={
+ "Authorization": self.token,
+ "Content-Type": "application/json"
+ })
+
+ self.assertTrue(res.json['success'])
+ self.assertIn(stripe_id, [
+ cus.stripe_id for cus in StripeCustomer.all()])
+
+ def test_list_customers(self):
+ res = self.c.get('/json/stripecustomer/list/', headers={
+ "Authorization": self.token,
+ "Content-Type": "application/json"
+ })
+
+ self.assertTrue(res.json['success'])
+ self.assertEqual(
+ res.json['data']['data'][0]['data']['stripe_id'],
+ '100')
+
+ def test_get_customer(self):
+ res = self.c.get(f'/json/stripecustomer/detail/{self.stripe_cus.ref.id()}/', headers={
+ "Authorization": self.token,
+ "Content-Type": "application/json"
+ })
+
+ self.assertTrue(res.json['success'])
+ self.assertEqual(
+ res.json['data']['data']['stripe_id'],
+ '100')
+
+ def test_update_customer(self):
+ updated_stripe_id = '101'
+ self.assertNotIn(updated_stripe_id, [
+ cus.stripe_id for cus in StripeCustomer.all()])
+ res = self.c.put(f'/json/stripecustomer/update/{self.stripe_cus.ref.id()}/',
+ json={
+ "stripe_id": updated_stripe_id
+ },
+ headers={
+ "Authorization": self.token,
+ "Content-Type": "application/json"
+ })
+
+ self.assertTrue(res.json['success'])
+ self.assertEqual(
+ res.json['data']['data']['stripe_id'],
+ updated_stripe_id)
+
+ def test_delete_customer(self):
+ res = self.c.delete(f'/json/stripecustomer/delete/{self.stripe_cus.ref.id()}/',
+ headers={
+ "Authorization": self.token,
+ "Content-Type": "application/json"
+ })
+
+ self.assertTrue(res.json['success'])
+ self.assertNotIn(
+ self.stripe_cus.ref.id(),
+ [cus.ref.id() for cus in StripeCustomer.all()]
+ )
+
+
+class TestStripeWebhook(APITestCase):
+ collections = [User, Group, UserGroups, StripeCustomer]
+
+ def setUp(self) -> None:
+ super(TestStripeWebhook, self).setUp()
+ self.group = Group.create(name='Power Users', slug='power-users')
+ self.user = User.create(username='test', email='tlasso@example.org', first_name='Ted',
+ last_name='Lasso', _credentials='abc123', account_status='ACTIVE',
+ groups=[self.group])
+ self.token, self.exp = User.api_login("test", "abc123")
+ self.app = self.project.wsgi_app
+ self.view = BaseWebhookView()
+ self.stripe_req_body = {
+ "id": "evt_1CiPtv2eZvKYlo2CcUZsDcO6",
+ "object": "event",
+ "api_version": "2018-05-21",
+ "created": 1530291411,
+ "data": {
+ "object": {}
+ },
+ "livemode": False,
+ "pending_webhooks": 0,
+ "request": {
+ "id": None,
+ "idempotency_key": None
+ },
+ "type": "source.chargeable"
+ }
+ headers = {'HTTP_STRIPE_SIGNATURE': 'sig_112233'}
+ event = {
+ 'body': self.stripe_req_body,
+ 'requestContext': {
+ 'web': {
+ 'method': 'post',
+ 'path': '/webhook',
+ 'source_ip': '192.168.1.30'
+ }
+ },
+ 'headers': headers
+ }
+ self.view.request = HTTPRequest(event=event)
+ self.c = Client(self.app)
+
+ def test_event_action(self):
+ # event_dict = {'type': 'checkout.session.completed'}
+ with self.assertRaises(NotImplementedError):
+ self.view.event = SimpleNamespace(**self.view.request.body)
+ res = self.view.event_action()
+
+ def test_check_ip(self):
+ res = self.view.check_ip()
+ self.assertFalse(res)
+
+ @mock.patch('boto3.client')
+ def test_send_html_email(self, mocked):
+ # Requires to have `TEMPLATE_ROOT_DIR=/` in your .env file
+ res = self.view.send_html_email(
+ subject='Test Subject',
+ from_email='unittesting@email.com',
+ to_email_list=['recipient@email.com'],
+ template_name='/code/pfunk/tests/templates/email/email_template.html'
+ )
+ self.assertTrue(True) # if there are no exceptions, then it passed
+
+ @mock.patch('stripe.Webhook')
+ def test_check_signing_secret(self, mocked):
+ res = self.view.check_signing_secret()
+ self.assertTrue(True) # if there are no exceptions, then it passed
+
+ def test_get_transfer_data(self):
+ self.view.event_json = self.view.request.body
+ res = self.view.get_transfer_data()
+ self.assertTrue(True)
+
+ @mock.patch('stripe.Webhook')
+ def test_receive_post_req(self, mocked):
+ with self.assertRaises(NotImplementedError):
+ self.view.event = SimpleNamespace(**self.view.request.body)
+ res = self.c.post('/json/stripecustomer/webhook/',
+ json=self.stripe_req_body,
+ headers={
+ 'HTTP_STRIPE_SIGNATURE': 'sig_1113'
+ })
+
+
+class TestStripeCheckoutView(APITestCase):
+ collections = [User, Group, UserGroups, StripePackage]
+
+ def setUp(self) -> None:
+ super(TestStripeCheckoutView, self).setUp()
+ self.group = Group.create(name='Power Users', slug='power-users')
+ self.user = User.create(username='test', email='tlasso@example.org', first_name='Ted',
+ last_name='Lasso', _credentials='abc123', account_status='ACTIVE',
+ groups=[self.group])
+ self.token, self.exp = User.api_login("test", "abc123")
+ self.stripe_pkg = StripePackage.create(group=self.group,
+ stripe_id='100', price='10', description='unit testing...',
+ name='unit test package')
+ self.app = self.project.wsgi_app
+ self.c = Client(self.app)
+
+ @mock.patch('stripe.checkout', spec=True)
+ def test_checkout_success_view(self, mocked):
+ session_id = 'session_123'
+ res = self.c.get(f'/json/stripepackage/checkout-success/{session_id}/', headers={
+ 'Authorization': self.token,
+ 'Content-Type': 'application/json'
+ })
+
+ self.assertTrue(True)
+ self.assertDictEqual({'success': False, 'data': 'Not Found'}, res.json)
diff --git a/pfunk/tests/test_web_stripe.py b/pfunk/tests/test_web_stripe.py
deleted file mode 100644
index ab20d42..0000000
--- a/pfunk/tests/test_web_stripe.py
+++ /dev/null
@@ -1,86 +0,0 @@
-from werkzeug.test import Client
-
-from pfunk.tests import User, Group
-from pfunk.contrib.auth.collections import PermissionGroup
-from pfunk.contrib.ecommerce.collections import StripePackage, StripeCustomer
-from pfunk.testcase import APITestCase
-
-
-class TestWebStripe(APITestCase):
- # TODO: Add `StripeCustomer`
- collections = [User, Group, StripePackage]
-
- def setUp(self) -> None:
- super(TestWebStripe, self).setUp()
- self.group = Group.create(name='Power Users', slug='power-users')
- self.user = User.create(username='test', email='tlasso@example.org', first_name='Ted',
- last_name='Lasso', _credentials='abc123', account_status='ACTIVE',
- groups=[self.group])
- self.stripe_pkg = StripePackage.create(
- stripe_id='100', price='10', description='unit testing...', name='unit test package')
- # self.stripe_customer = StripeCustomer.create(user=self.user, customer_id='100', package=self.stripe_pkg)
-
- self.token, self.exp = User.api_login("test", "abc123")
- self.app = self.project.wsgi_app
- self.c = Client(self.app)
- # self.user.add_permissions(self.group, [PermissionGroup(StripePackage, ['create', 'read', 'write', 'delete'])])
-
- def test_list_package(self):
- res = self.c.get('/stripepackage/list/', headers={
- "Content-Type": "application/json"
- })
- self.assertTrue(res.json['success'])
- self.assertEqual(
- res.json['data']['data'][0]['data']['name'],
- self.stripe_pkg.name)
-
- def test_get_package(self):
- res = self.c.get(f'/stripepackage/detail/{self.stripe_pkg.ref.id()}/', headers={
- "Content-Type": "application/json"
- })
- self.assertTrue(res.json['success'])
- self.assertEqual(
- res.json['data']['data']['name'],
- self.stripe_pkg.name)
-
- # TODO: Fix `forbidden` error in stripe views
- def test_create_package(self):
- res = self.c.post('/stripepackage/create/',
- json={
- 'stripe_id': '123',
- 'name': 'stripe_pkg',
- 'price': 10.10,
- 'description': 'a test package'
- },
- headers={
- "Authorization": self.token,
- "Content-Type": "application/json"
- })
-
-
-
- # TODO: Fix `forbidden` error in stripe views
- def test_update_package(self):
- res = self.c.put(f'/stripepackage/update/{self.stripe_pkg.ref.id()}/',
- json={
- 'stripe_id': '123',
- 'name': 'stripe_pkg',
- 'price': 10.10,
- 'description': 'a test package'
- },
- headers={
- "Authorization": self.token,
- "Content-Type": "application/json"
- })
-
-
-
- # TODO: Fix `forbidden` error in stripe views
- def test_delete_package(self):
- res = self.c.delete(f'/stripepackage/delete/{self.stripe_pkg.ref.id()}/',
- headers={
- "Authorization": self.token,
- "Content-Type": "application/json"
- })
-
-
\ No newline at end of file
diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py
new file mode 100644
index 0000000..93836bc
--- /dev/null
+++ b/pfunk/utils/aws.py
@@ -0,0 +1,170 @@
+import datetime
+import boto3
+import json
+import swaggyp as sw
+from botocore.exceptions import ClientError, NoCredentialsError
+from envs import env
+from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator
+from openapi_spec_validator.readers import read_from_filename
+from openapi_spec_validator.exceptions import OpenAPIValidationError
+
+AWS_ACCESS_KEY = env('AWS_ACCESS_KEY')
+AWS_SECRET_ACCESS_KEY = env('AWS_SECRET_ACCESS_KEY')
+AWS_DEFAULT_REGION = env('AWS_DEFAULT_REGION')
+
+
+def _json_dt_helper(o):
+ """ Helps serializing `datetime` objects to a readable string """
+ if isinstance(o, (datetime.date, datetime.datetime)):
+ return o.isoformat()
+
+
+def write_to_config(obj, config_file_dir='pfunk.json'):
+ """ Appends object to pfunk config file
+
+ Args:
+ obj (dict, required):
+ key, value pairs to write to json file
+ config_file_dir (str, optional):
+ directory of the config json file, default='pfunk.json'
+ Returns:
+ config_file (dict, required):
+ the current value of config file (pfunk.json)
+ """
+ with open(config_file_dir, 'r+') as f:
+ data = json.load(f)
+ data.update(obj)
+ f.seek(0)
+ f.truncate()
+ json.dump(data, f, indent=4, sort_keys=True, default=_json_dt_helper)
+ return data
+
+
+def read_from_config_file(config_file_dir='pfunk.json'):
+ """ Returns data from config file in dict form """
+ with open(config_file_dir, 'r') as f:
+ data = json.load(f)
+ return data
+
+
+class ApiGateway(object):
+ region_name = env('SES_REGION_NAME', 'us-east-1')
+
+ def __init__(self):
+ self.client = boto3.client(
+ 'apigateway',
+ aws_access_key_id=AWS_ACCESS_KEY,
+ aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
+ region_name=AWS_DEFAULT_REGION)
+
+ def validate_yaml(self, yaml_file):
+ """ Validate YAML file if it is valid for using OpenAPI Spec v2"""
+ try:
+ spec_dict, spec_url = read_from_filename(yaml_file)
+ validate_v2_spec(spec_dict)
+ except (OSError, AttributeError) as err:
+ return {'errors': str(err)}
+ except OpenAPIValidationError as err:
+ return self._iterate_validator_errors(spec_dict)
+ return None
+
+ def _iterate_validator_errors(self, spec_dict):
+ """ Iterates through list of errors that the `openapi_spec_validator` returned
+
+ This method was implemented due to `openapi_spec_validator` design
+ that if an error happened while iterating through the YAML file
+ it returns a Python error.
+ Args:
+ spec_dict (dict, required):
+ `spec_dict` generated from `openapi_spec_validator.readers.read_from_filename`
+ Returns:
+ list of errors
+ """
+ try:
+ errors = [{err.message: err.json_path}
+ for err in openapi_v2_spec_validator.iter_errors(spec_dict)]
+ return errors
+ except (OSError, AttributeError) as err:
+ return str(err)
+
+ def create_api_from_yaml(self, yaml_file, fail_on_warnings=True):
+ """ Creates an API for AWS API Gateway from a YAML swagger file
+
+ Args:
+ yaml_file (yaml file, required):
+ The OpenAPI swagger file to create API from
+ fail_on_warnings (bool, optional):
+ Specifies if the method will error on warnings. Default: `True`
+ """
+ _yaml_valid = self.validate_yaml(yaml_file)
+ if _yaml_valid:
+ return {
+ "error": 'Bad Request. YAML is not valid.',
+ "yaml_err": _yaml_valid
+ }
+
+ try:
+ if not type(yaml_file) == 'string':
+ with open(yaml_file, 'r') as file:
+ yaml_file = file.read()
+ response = self.client.import_rest_api(
+ failOnWarnings=fail_on_warnings,
+ body=yaml_file)
+ if response:
+ write_to_config({'api': response})
+ return {
+ 'success': True,
+ 'response': response
+ }
+ except (ClientError, NoCredentialsError) as err:
+ return {
+ 'error': str(err)
+ }
+
+ def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnings=True):
+ """ Updates rest API using yaml file
+
+ Args:
+ rest_api_id (string, required):
+ ID of the API for updating, if not provided, use API ID from `pfunk.json`
+ yaml_file (yaml file, required):
+ The OpenAPI swagger file to create API from
+ mode (string, required):
+ Mode of update, choice=['merge', 'overwrite']
+ fail_on_warnings (bool, optional):
+ Specifies if the method will error on warnings. Default: `True`
+ """
+ _yaml_valid = self.validate_yaml(yaml_file)
+ if _yaml_valid:
+ return {
+ "error": 'Bad Request. YAML is not valid.',
+ "yaml_err": _yaml_valid
+ }
+
+ try:
+ if not type(yaml_file) == 'string':
+ with open(yaml_file, 'r') as file:
+ yaml_file = file.read()
+ # Acquire REST API ID from config file if not provided
+ if not rest_api_id:
+ data = read_from_config_file()
+ if data.get('api'):
+ rest_api_id = (data.get('api')
+ .get('id'))
+
+ response = self.client.put_rest_api(
+ restApiId=rest_api_id,
+ mode=mode,
+ failOnWarnings=fail_on_warnings,
+ body=yaml_file
+ )
+
+ if response:
+ return {
+ 'success': True,
+ 'response': response
+ }
+ except (ClientError, NoCredentialsError) as err:
+ return {
+ 'error': str(err)
+ }
\ No newline at end of file
diff --git a/pfunk/utils/deploy.py b/pfunk/utils/deploy.py
index f07ec12..a78cae2 100644
--- a/pfunk/utils/deploy.py
+++ b/pfunk/utils/deploy.py
@@ -1,9 +1,10 @@
-import boto3
import datetime
import json
import os
-import pip
import shutil
+
+import boto3
+import pip
import sammy as sm
s3 = boto3.client('s3')
diff --git a/pfunk/utils/json_utils.py b/pfunk/utils/json_utils.py
index ee7342f..15de3a0 100644
--- a/pfunk/utils/json_utils.py
+++ b/pfunk/utils/json_utils.py
@@ -20,4 +20,4 @@ def default(self, obj):
try:
return super(PFunkEncoder, self).default(obj)
except AttributeError:
- return str(obj)
\ No newline at end of file
+ return str(obj)
diff --git a/pfunk/utils/publishing.py b/pfunk/utils/publishing.py
index 4d08373..f98efe4 100644
--- a/pfunk/utils/publishing.py
+++ b/pfunk/utils/publishing.py
@@ -1,3 +1,5 @@
+import logging
+
import requests
from faunadb import query as q
from faunadb.errors import BadRequest
@@ -7,6 +9,7 @@ class BearerAuth(requests.auth.AuthBase):
"""
Bearer Token Auth class for the requests library.
"""
+
def __init__(self, token):
"""
@@ -19,7 +22,8 @@ def __call__(self, r):
r.headers["authorization"] = "Bearer " + self.token
return r
-def create_or_update_role(client, payload:dict={}):
+
+def create_or_update_role(client, payload: dict = {}):
"""
Utility that attempts to create a role and if that fails it attempts to update it.
Args:
@@ -34,7 +38,6 @@ def create_or_update_role(client, payload:dict={}):
q.create_role(payload)
)
except BadRequest as err:
-
payload_copy = payload.copy()
role_name = payload_copy.pop("name")
@@ -58,13 +61,12 @@ def create_or_pass_index(client, payload):
Returns: query
"""
+
try:
response = client.query(
q.create_index(payload)
)
except BadRequest as err:
- print('Warning: You cannot update an index please delete the index and publish it again.')
- print(err)
return
return response
@@ -96,4 +98,4 @@ def create_or_update_function(client, payload):
)
)
- return response
\ No newline at end of file
+ return response
diff --git a/pfunk/utils/routing.py b/pfunk/utils/routing.py
new file mode 100644
index 0000000..2dc92b8
--- /dev/null
+++ b/pfunk/utils/routing.py
@@ -0,0 +1,48 @@
+import typing as t
+import re
+
+_rule_re = re.compile(
+ r"""
+ (?P[^<]*) # static rule data
+ <
+ (?:
+ (?P[a-zA-Z_][a-zA-Z0-9_]*) # converter name
+ (?:\((?P.*?)\))? # converter arguments
+ \: # variable delimiter
+ )?
+ (?P[a-zA-Z_][a-zA-Z0-9_]*) # variable name
+ >
+ """,
+ re.VERBOSE,
+)
+
+
+def parse_rule(rule: str) -> t.Iterator[t.Tuple[t.Optional[str], t.Optional[str], str]]:
+ """Parse a rule and return it as generator. Each iteration yields tuples
+ in the form ``(converter, arguments, variable)``. If the converter is
+ `None` it's a static url part, otherwise it's a dynamic one.
+ :internal:
+ """
+ pos = 0
+ end = len(rule)
+ do_match = _rule_re.match
+ used_names = set()
+ while pos < end:
+ m = do_match(rule, pos)
+ if m is None:
+ break
+ data = m.groupdict()
+ if data["static"]:
+ yield None, None, data["static"]
+ variable = data["variable"]
+ converter = data["converter"] or "default"
+ if variable in used_names:
+ raise ValueError(f"variable name {variable!r} used twice.")
+ used_names.add(variable)
+ yield converter, data["args"] or None, variable
+ pos = m.end()
+ if pos < end:
+ remaining = rule[pos:]
+ if ">" in remaining or "<" in remaining:
+ raise ValueError(f"malformed url rule: {rule!r}")
+ yield None, None, remaining
\ No newline at end of file
diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py
new file mode 100644
index 0000000..a0f108c
--- /dev/null
+++ b/pfunk/utils/swagger.py
@@ -0,0 +1,327 @@
+import json
+import os
+import re
+import swaggyp as sw
+
+from pfunk.web.views.html import HTMLView
+from pfunk.collection import Collection
+from pfunk.utils.routing import parse_rule
+
+GRAPHQL_TO_YAML_TYPES = {
+ "String": "string",
+ "Int": "integer",
+ "Float": "integer",
+ "Boolean": "boolean"
+}
+
+PFUNK_TO_YAML_TYPES = {
+ "StringField": "string",
+ "SlugField": "string",
+ "EmailField": "string",
+ "EnumField": "string",
+ "ManyToManyField": "#/definitions/",
+ "ReferenceField": "#/definitions/",
+ "ForeignList": "#/definitions/",
+ "IntegerField": "integer",
+ "FloatField": "integer",
+ "BooleanField": "boolean",
+ "ListField": "array"
+}
+
+WERKZEUG_URL_TO_YAML_TYPES = {
+ "int": "integer",
+ "string": "string",
+ "float": "integer",
+ "path": "string",
+ "uuid": "string"
+}
+
+
+class SwaggerDoc(object):
+
+ def __init__(self, collections, rules=[], config_file='pfunk.json'):
+ """ Generates swagger doc. Details are going to be acquired from the collections
+
+ The acquisition of the information needed for docs are as follows:
+ ```
+ Response:
+ Description (str): View's `get_query` docstrings
+ Status Code (int):
+ Acquired from `response_class` class variable of a view
+ Error status_codes are acquired too in class variables
+ Operation:
+ HTTP Methods (arr): Defined `http_methods` in a view.
+ Summary (str): ({http_method}) -> {collection_name}
+ Description (str): Docstring of the view
+ Path:
+ Endpoint (str): Path of the function. You can see it in `url` method of a view.
+ Model:
+ Name (str): The class name of the `collection`
+ Properties (str): The fields of the collection and their type
+ ```
+
+ Args:
+ collections ([`pfunk.collection.Collection`]):
+ array of collection of the project to generate models from
+ rules ([`werkzeug.routing.Rule`]):
+ array of additional URLs that the given collection doesn't have
+ config_file (str, optional):
+ directory of the config_file
+
+ Returns:
+ swagger.yaml (yaml, required):
+ Generated YAML file
+ """
+ self.collections = collections
+ self.rules = rules
+ self.paths = []
+ self.definitions = []
+ self.responses = []
+ self.config_file = config_file
+ self._response_classes = [
+ 'response_class',
+ 'not_found_class',
+ 'bad_request_class',
+ 'method_not_allowed_class',
+ 'unauthorized_class',
+ 'forbidden_class'
+ ]
+
+ def _convert_url_to_swagger(self, replacement: str, to_replace: str) -> str:
+ return re.sub('<\w+:\w+>', f'{{{replacement}}}', to_replace)
+
+ def write_to_yaml(self, dir=''):
+ """ Using the class' variables, write it to a swagger (yaml) file
+
+ It will create `swagger.yaml` file in current directory, if
+ there is already one, it will print the yaml file instead.
+
+ Args:
+ dir (str, optional):
+ custom directory of the swagger file. If there are no provided, create one in current dir.
+ Returns:
+ dir (str, required):
+ directory of the created swagger file
+ swagger_file (str, required):
+ the contents of the swagger yaml file
+ """
+ if not os.path.exists(self.config_file):
+ raise Exception('Missing JSON Config file.')
+ else:
+ with open(self.config_file, 'r') as f:
+ data = json.loads(f.read())
+ proj_title = data.get('name')
+ proj_desc = data.get('description', 'A Pfunk project')
+ proj_ver = data.get('ver', '1.0')
+ host = data.get('host', 'pfunk.com')
+ basePath = data.get('basePath', '/')
+ schemes = ['https']
+
+ if dir:
+ if not dir.endswith('/'):
+ dir = dir + "/"
+
+ info = sw.Info(
+ title=proj_title,
+ description=proj_desc,
+ version=proj_ver)
+ t = sw.SwaggerTemplate(
+ host=host,
+ basePath=basePath,
+ info=info,
+ paths=self.paths,
+ schemes=schemes,
+ definitions=self.definitions)
+
+ if not os.path.exists(f'{dir}swagger.yaml'):
+ with open(f'{dir}swagger.yaml', 'x') as swag_doc:
+ swag_doc.write(t.to_yaml())
+ else:
+ print(
+ 'There is an existing swagger file. Kindly move/delete it to generate a new one.')
+ # print(t.to_yaml())
+ return {
+ "dir": f'{dir}swagger.yaml',
+ "swagger_file": t.to_yaml()
+ }
+
+ def get_operations(self, col: Collection):
+ """ Acquires all of the endpoint in the collections and make it
+ as an `operation` for swagger doc
+
+ Appends all of the acquired paths here in `self.paths`
+ array class variable
+
+ Args:
+ col (`pfunk.collection.Collection`, required):
+ The collection that has views
+
+ Returns:
+ paths ([`swaggyp.Path`], required):
+ An array of `Path` that can be consumed using
+ `swaggyp.SwaggerTemplate` to show
+ available paths
+ ```
+ """
+ for view in col.collection_views:
+ # We skip HTML Views
+ if issubclass(view, HTMLView):
+ continue
+ route = view.url(col)
+ rule = route.rule
+ methods = route.methods
+ args = route.arguments
+ arg_type = None
+ responses = []
+ for rsp_cls in self._response_classes:
+ if rsp_cls == 'response_class':
+ responses.append(
+ sw.Response(
+ status_code=view.response_class.status_code,
+ description=view.get_query.__doc__ or 'Fill the docstrings to show description')
+ )
+ else:
+ responses.append(
+ sw.Response(
+ status_code=getattr(view, rsp_cls).status_code,
+ description=getattr(view, rsp_cls).default_payload)
+ )
+
+ view_methods = list(methods)
+ for method in view_methods:
+ if method == 'HEAD':
+ # Skip HEAD operations
+ continue
+
+ # Acquire path parameters of URL
+ if args is None or len(args) == 0:
+ # if `defaults` weren't used in URL building, use the argument defined in the URL string
+ for converter, arguments, variable in parse_rule(rule):
+ if variable.startswith('/') or converter is None:
+ continue
+ args = variable
+ arg_type = converter
+
+ params = []
+ # Construct path parameters for swagger generation
+ if arg_type:
+ path_params = sw.Parameter(
+ name=args,
+ _type=WERKZEUG_URL_TO_YAML_TYPES.get(arg_type),
+ _in='path',
+ description='',
+ required=True,
+ allowEmptyValue=False
+ )
+ params.append(path_params)
+
+ # Acquire payload of the view from the View's `_payload_docs`
+ view_payload = view(col)._payload_docs()
+
+ # Construct payload for swagger generation
+ if view_payload:
+ for field in view_payload.get('data'):
+ if field.get('schema'):
+ schema = sw.SwagSchema(ref=field.get('schema'))
+ param = sw.Parameter(
+ name=field.get('name'),
+ _in=field.get('in'),
+ description=field.get('description'),
+ required=field.get('required'),
+ schema=schema
+ )
+ else:
+ param = sw.Parameter(
+ name=field.get('name'),
+ _type=field.get('type'),
+ _in=field.get('in'),
+ description=field.get('description'),
+ required=field.get('required'),
+ allowEmptyValue=False,
+ _format=field.get('format')
+ )
+ params.append(param)
+
+ consumes = ['application/json',
+ 'application/x-www-form-urlencoded']
+ produces = ['application/json',
+ 'application/x-www-form-urlencoded']
+ view_docs = view.__doc__
+ if params:
+ op = sw.Operation(
+ http_method=method.lower(),
+ summary=f'({method}) -> {col.__class__.__name__}',
+ description=view_docs,
+ responses=responses,
+ consumes=consumes,
+ produces=produces,
+ parameters=params)
+ else:
+ op = sw.Operation(
+ http_method=method.lower(),
+ summary=f'({method}) -> {col.__class__.__name__}',
+ description=view_docs,
+ responses=responses,
+ consumes=consumes,
+ produces=produces)
+
+ # Replace werkzeug params () to swagger-style params ({id})
+ swagger_rule = self._convert_url_to_swagger(args, rule)
+ p = sw.Path(endpoint=swagger_rule, operations=[op])
+ self.paths.append(p)
+ return self.paths
+
+ def get_model_definitions(self, col: Collection):
+ """ Acquires collection's name, fields, and relationships to
+ convert it to a swagger `Definition`
+
+ Converts `ReferenceField` and `ManyToManyField` to
+ reference other definitions as a characterization
+ of relationships defined on models
+
+ Args:
+ col (`pfunk.collection.Collection`, required):
+ The collection that has views
+
+ Returns:
+ definitions ([`swaggyp.Definition`], required):
+ An array of `Definition` that can be consumed using
+ `swaggyp.SwaggerTemplate` to show
+ available models
+
+ Payload:
+
+
+ """
+ # Define model definitions by iterating through collection's fields for its properties
+ col_properties = {}
+ for property, field_type in col._base_properties.items():
+ # Get pfunk field specifier
+ field_type_class = field_type.__class__.__name__
+
+ if field_type_class in ['ReferenceField', 'ManyToManyField']:
+ # Acquire the class that the collection is referencing to
+ foreign_class = field_type.get_foreign_class().__name__
+ ref_field = PFUNK_TO_YAML_TYPES.get(field_type_class)
+ col_properties[property] = {
+ "$ref": ref_field + foreign_class}
+ else:
+ col_properties[property] = {
+ "type": PFUNK_TO_YAML_TYPES.get(field_type_class)}
+ model_schema = sw.SwagSchema(properties=col_properties)
+ model = sw.Definition(name=type(col).__name__, schema=model_schema)
+ self.definitions.append(model)
+ return self.definitions
+
+ def generate_swagger(self, dir=''):
+ """ One-function-to-call needed function to generate a swagger documentation
+
+ Args:
+ dir (str, optional):
+ directory to create the yaml file
+ """
+ for i in self.collections:
+ col = i()
+ self.get_operations(col)
+ self.get_model_definitions(col)
+ return self.write_to_yaml(dir)
diff --git a/pfunk/utils/templates.py b/pfunk/utils/templates.py
new file mode 100644
index 0000000..f27ae1b
--- /dev/null
+++ b/pfunk/utils/templates.py
@@ -0,0 +1,21 @@
+from envs import env
+from jinja2 import Environment
+from jinja2.loaders import ChoiceLoader, PackageLoader, FileSystemLoader
+
+
+def get_loaders():
+ """
+ Get the Jinja2 loaders for the project.
+ Returns: list
+ """
+ loaders = [
+ FileSystemLoader(env('TEMPLATE_ROOT_DIR')),
+ PackageLoader('pfunk.contrib.auth'),
+ PackageLoader('pfunk.forms'),
+ ]
+ for i in env('TEMPLATE_PACKAGES', [], var_type='list'):
+ loaders.append(PackageLoader(i))
+ return loaders
+
+
+temp_env = Environment(loader=ChoiceLoader(get_loaders()))
diff --git a/pfunk/web/request.py b/pfunk/web/request.py
index c19a0b6..5540c7d 100644
--- a/pfunk/web/request.py
+++ b/pfunk/web/request.py
@@ -20,7 +20,7 @@ def __init__(self, event, kwargs):
self.user = None
self.token: str = None
self.jwt: str = None
-
+
def get_cookies(self, raw_cookies):
"""
Returns dict of cookies
@@ -51,6 +51,7 @@ def __init__(self, event, kwargs=None):
super(BaseAPIGatewayRequest, self).__init__(event, kwargs)
self.is_base64_encoded = event.get('isBase64Encoded')
self.body = event.get('body')
+ self.form_data = event.get('body')
self.headers = event.get('headers') or dict()
self.query_params = event.get('queryStringParameters') or dict()
@@ -59,15 +60,23 @@ class WSGIRequest(Request):
"""
WSGI Request
"""
+
def __init__(self, event, kwargs=None):
super(WSGIRequest, self).__init__(event, kwargs=kwargs)
self.method = event.method
self.query_params = event.args
+ self.form_data = self.build_form_data()
self.body = event.data
self.headers = event.headers
self.path = event.path
self.cookies = event.cookies
self.source_ip = event.remote_addr
+ self.reverse = event.reverse
+
+ def build_form_data(self):
+ """ Builds the form data """
+ if self.raw_event.form:
+ return {k: v for k, v in self.raw_event.form.items()}
class RESTRequest(BaseAPIGatewayRequest):
@@ -98,6 +107,7 @@ class HTTPRequest(BaseAPIGatewayRequest):
"""
HTTP Request: For HTTP API Gateway
"""
+
def __init__(self, event, kwargs=None):
super(HTTPRequest, self).__init__(event, kwargs=kwargs)
self.raw_event = event
@@ -114,5 +124,3 @@ def __init__(self, event, kwargs=None):
def get_cookies(self, raw_cookies):
return parse_cookie(';'.join(raw_cookies))
-
-
diff --git a/pfunk/web/response.py b/pfunk/web/response.py
index 0feef25..441e196 100644
--- a/pfunk/web/response.py
+++ b/pfunk/web/response.py
@@ -33,7 +33,7 @@ def response(self):
'statusCode': self.status_code,
'body': self.body,
'headers': self.headers
- }
+ }
class NotFoundResponseMixin(object):
@@ -42,6 +42,12 @@ class NotFoundResponseMixin(object):
success: bool = False
+class RedirectResponseMixin(object):
+ status_code = 302
+ default_payload = 'Redirect'
+ success: bool = False
+
+
class BadRequestResponseMixin(object):
status_code = 400
default_payload = 'Bad Request'
@@ -122,4 +128,10 @@ class HttpBadRequestResponse(BadRequestResponseMixin, Response):
class JSONBadRequestResponse(BadRequestResponseMixin, JSONResponse):
- pass
\ No newline at end of file
+ pass
+
+
+class HttpRedirectResponse(RedirectResponseMixin, Response):
+ def __init__(self, location, payload=None, headers={}, *args, **kwargs):
+ super(HttpRedirectResponse, self).__init__(payload, headers, *args, **kwargs)
+ self.raw_headers['Location'] = location
diff --git a/pfunk/web/views/base.py b/pfunk/web/views/base.py
index 375e286..66248d9 100644
--- a/pfunk/web/views/base.py
+++ b/pfunk/web/views/base.py
@@ -1,12 +1,13 @@
from envs import env
-from faunadb.errors import NotFound as FaunaNotFound, PermissionDenied, BadRequest, ErrorData
+from faunadb.errors import NotFound as FaunaNotFound, PermissionDenied, BadRequest
from jwt import InvalidSignatureError
from valley.exceptions import ValidationException
+from valley.utils import import_util
from werkzeug.exceptions import NotFound, MethodNotAllowed
from werkzeug.http import dump_cookie
from werkzeug.routing import Rule
-from pfunk.exceptions import TokenValidationFailed, LoginFailed, Unauthorized, DocNotFound, GraphQLError
+from pfunk.exceptions import TokenValidationFailed, LoginFailed, Unauthorized, DocNotFound, GraphQLError, NotUniqueError
from pfunk.web.request import Request, RESTRequest, HTTPRequest
from pfunk.web.response import (Response, HttpNotFoundResponse, HttpForbiddenResponse, HttpBadRequestResponse,
HttpMethodNotAllowedResponse, HttpUnauthorizedResponse)
@@ -87,7 +88,7 @@ def process_lambda_request(self):
response = self.not_found_class().response
except PermissionDenied:
response = self.forbidden_class().response
- except (BadRequest, GraphQLError) as e:
+ except (BadRequest, NotUniqueError, GraphQLError) as e:
if isinstance(e, BadRequest):
payload = e._get_description()
else:
@@ -123,7 +124,7 @@ def process_wsgi_request(self):
response = self.not_found_class()
except PermissionDenied:
response = self.forbidden_class()
- except (BadRequest, GraphQLError) as e:
+ except (BadRequest, NotUniqueError, GraphQLError) as e:
if isinstance(e, BadRequest):
payload = e._get_description()
else:
@@ -153,10 +154,10 @@ def get_token(self):
returns the decrypted token
Returns:
- token (`contrib.auth.collections.Key`, required): token of Fauna
+ token (`contrib.auth.key.Key`, required): token of Fauna
"""
- from pfunk.contrib.auth.collections import Key
+ from pfunk.contrib.auth.key import Key
enc_token = self.request.cookies.get(env('TOKEN_COOKIE_NAME', 'tk'))
if not enc_token:
@@ -321,7 +322,7 @@ def get_query(self):
return self.collection.all(**self.get_query_kwargs())
def get_query_kwargs(self):
- """ Acquires the addutional generic kwargs in a query
+ """ Acquires the additional generic kwargs in a query
This includes the keys that are generic
to queries. ['after, 'before', 'page_size']
@@ -341,7 +342,7 @@ class ObjectMixin(object):
""" Generic GET mixin for a Fauna object. """
def get_query(self):
- """ Acuires """
+ """ Acuires the entity in a collection using by ID """
return self.collection.get(self.request.kwargs.get('id'), **self.get_query_kwargs())
def get_query_kwargs(self):
@@ -350,18 +351,24 @@ def get_query_kwargs(self):
class UpdateMixin(object):
""" Generic PUT mixin for a fauna object """
+ form_class = None
+
+ def get_data(self):
+ """ Acquires the data from the request body """
+ return self.request.get_json()
def get_query_kwargs(self):
- data = self.request.get_json()
+ data = self.get_data()
fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ReferenceField')
for k, v in fields.items():
current_value = data.get(k)
col = v.get('foreign_class')
+ if isinstance(col, str):
+ col = import_util(col)
if current_value:
obj = col.get(current_value)
data[k] = obj
-
return data
@@ -373,6 +380,7 @@ class ActionMixin(object):
action of the endpoint
"""
action: str
+ template_name = '{collection}/{action}.html'
@classmethod
def url(cls, collection):
@@ -380,6 +388,21 @@ def url(cls, collection):
methods=cls.http_methods)
+class JSONActionMixin(ActionMixin):
+ """ Mixin for specifying what action should an endpoint have
+
+ Attributes:
+ action (str, required):
+ action of the endpoint
+ """
+ action: str
+
+ @classmethod
+ def url(cls, collection):
+ return Rule(f'/json/{collection.get_class_name()}/{cls.action}/', endpoint=cls.as_view(collection),
+ methods=cls.http_methods)
+
+
class IDMixin(ActionMixin):
""" Mixin for specifying a URL that accepts an ID """
@@ -387,3 +410,12 @@ class IDMixin(ActionMixin):
def url(cls, collection):
return Rule(f'/{collection.get_class_name()}/{cls.action}//', endpoint=cls.as_view(collection),
methods=cls.http_methods)
+
+
+class JSONIDMixin(ActionMixin):
+ """ Mixin for specifying a URL that accepts an ID """
+
+ @classmethod
+ def url(cls, collection):
+ return Rule(f'/json/{collection.get_class_name()}/{cls.action}//', endpoint=cls.as_view(collection),
+ methods=cls.http_methods)
diff --git a/pfunk/web/views/graphql.py b/pfunk/web/views/graphql.py
index cbd6065..f0842d7 100644
--- a/pfunk/web/views/graphql.py
+++ b/pfunk/web/views/graphql.py
@@ -1,13 +1,13 @@
import requests
from envs import env
+from graphql.exceptions import SyntaxError as GQLSyntaxError
+from graphql.parser import GraphQLParser
from werkzeug.routing import Rule
from pfunk.exceptions import GraphQLError
from pfunk.utils.publishing import BearerAuth
from pfunk.web.response import GraphQLResponse
from pfunk.web.views.json import JSONView
-from graphql.parser import GraphQLParser
-from graphql.exceptions import SyntaxError as GQLSyntaxError
parser = GraphQLParser()
@@ -55,12 +55,12 @@ class GraphQLView(JSONView):
def get_query(self):
gql = self.process_graphql()
resp = requests.request(
- method='post',
- url=env('FAUNA_GRAPHQL_URL', 'https://graphql.fauna.com/graphql'),
- json=self.request.get_json(),
- auth=BearerAuth(self.request.token),
- allow_redirects=False
- )
+ method='post',
+ url=env('FAUNA_GRAPHQL_URL', 'https://graphql.fauna.com/graphql'),
+ json=self.request.get_json(),
+ auth=BearerAuth(self.request.token),
+ allow_redirects=False
+ )
return resp.json()
def process_graphql(self):
@@ -76,4 +76,4 @@ def process_graphql(self):
@classmethod
def url(cls, collection=None):
return Rule(f'/graphql/', endpoint=cls.as_view(),
- methods=cls.http_methods)
\ No newline at end of file
+ methods=cls.http_methods)
diff --git a/pfunk/web/views/html.py b/pfunk/web/views/html.py
new file mode 100644
index 0000000..dc66458
--- /dev/null
+++ b/pfunk/web/views/html.py
@@ -0,0 +1,232 @@
+from pfunk.client import q
+from pfunk.forms.collections import CollectionForm
+from pfunk.utils.templates import temp_env
+from pfunk.web.response import Response, HttpNotFoundResponse, HttpBadRequestResponse, HttpMethodNotAllowedResponse, \
+ HttpUnauthorizedResponse, HttpForbiddenResponse, HttpRedirectResponse
+from pfunk.web.views.base import UpdateMixin, ActionMixin, IDMixin, ObjectMixin, QuerysetMixin, HTTPView
+
+
+class HTMLView(HTTPView):
+ """
+ Base class for all HTML views
+ """
+ response_class = Response
+ content_type_accepted = 'text/html'
+ restrict_content_type = False
+ not_found_class = HttpNotFoundResponse
+ bad_request_class = HttpBadRequestResponse
+ method_not_allowed_class = HttpMethodNotAllowedResponse
+ unauthorized_class = HttpUnauthorizedResponse
+ forbidden_class = HttpForbiddenResponse
+ template_name = None
+
+ def get_template(self):
+ return temp_env.get_template(
+ self.template_name.format(
+ collection=self.collection.get_collection_name().lower(),
+ action=self.action
+ )
+ )
+
+ def get_response(self):
+ return self.response_class(
+ payload=self.get_template().render(**self.get_context()),
+ headers=self.get_headers()
+ )
+
+
+class FormMixin(UpdateMixin):
+ success_url = '/{collection}/{action}/'
+
+ def get_form(self, form_class=None):
+ """ Acquires the form for the request """
+ if form_class is None:
+ form_class = self.get_form_class()
+ return form_class(**self.get_form_kwargs())
+
+ def get_data(self):
+ return self.request.form_data
+
+ def get_object(self):
+ """ Acquires the object for the request """
+ return self.collection.get(self.request.kwargs.get('id'))
+
+ def get_context(self):
+ context = super(UpdateMixin, self).get_context()
+ context['form'] = self.get_form()
+ return context
+
+ def get_form_class(self):
+ """ Acquires or builds the form class to use for updating the object """
+ if self.form_class:
+ return self.form_class
+ return self.build_form_class()
+
+ def build_form_class(self):
+ """ Builds the form class to use for updating the object """
+
+ class Meta:
+ collection = self.collection
+
+ form_class = type(f"{self.collection.get_collection_name()}Form", (CollectionForm,), {
+ # constructor
+
+ "Meta": Meta,
+ })
+ return form_class
+
+ def get_form_kwargs(self):
+ """ Acquires the kwargs for the form """
+ data = self.request.form_data
+ if self.action == 'update':
+ if not data:
+ data = dict()
+ data['_instance'] = self.get_object()
+ return data
+
+ def form_valid(self, form):
+ """ Called when the form is valid """
+ q = self.get_query()
+ return HttpRedirectResponse(
+ location=self.get_success_url(),
+ )
+
+ def get_success_url(self):
+ """ Acquires the success url for the form """
+ return self.success_url.format(
+ collection=self.collection.get_collection_name().lower(),
+ action='list')
+
+ def form_invalid(self, form):
+ """ Called when the form is invalid """
+ print(self.action, "Form Invalid: Got Here")
+ return self.error_response(form._errors)
+
+ def get_response(self, form=None):
+ if self.request.method == 'POST':
+ form = self.get_form()
+ form.validate()
+ if form._is_valid:
+ return self.form_valid(form)
+ return self.response_class(
+ payload=self.get_template().render(**self.get_context()),
+ headers=self.get_headers()
+ )
+
+ def get_m2m_kwargs(self, obj):
+ """ Acquires the keyword-arguments for the many-to-many relationship
+
+ FaunaDB is only able to create a many-to-many relationship
+ by creating a collection that references both of the object.
+ So, when creating an entity, it is needed to create an entity to
+ make them related to each other.
+
+ Args:
+ obj (dict, required):
+
+ """
+ data = self.get_data()
+ fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField')
+ for k, v in fields.items():
+ current_value = data.get(k)
+ col = v.get('foreign_class')()
+ client = col().client()
+ client.query(
+ q.create(
+
+ )
+ )
+
+
+class HTMLCreateView(FormMixin, ActionMixin, HTMLView):
+ """
+ Define a `Create` view that allows `creation` of an entity in the collection
+ """
+ action = 'create'
+ http_methods = ['post']
+ login_required = True
+
+ def get_query(self):
+ """ Entity created in a collection """
+ obj = self.collection.create(**self.get_query_kwargs(), _token=self.request.token)
+ return obj
+
+
+class HTMLUpdateView(FormMixin, IDMixin, HTMLView):
+ """
+ Define a view to allow `Update` operations
+ """
+ action = 'update'
+ http_methods = ['post']
+ login_required = True
+
+ def get_query(self):
+ obj = self.collection.get(self.request.kwargs.get('id'), _token=self.request.token)
+ kwargs = self.get_query_kwargs()
+ try:
+ kwargs.pop('_instance')
+ except KeyError:
+ pass
+ obj._data.update(kwargs)
+ obj.save()
+ return obj
+
+
+class HTMLDetailView(ObjectMixin, IDMixin, HTMLView):
+ """ Define a view to allow single entity operations """
+ action = 'detail'
+ restrict_content_type = False
+ login_required = True
+
+ def get_context(self):
+ """ Context for the view """
+ context = super(HTMLDetailView, self).get_context()
+ context['object'] = self.get_query()
+ return context
+
+
+class HTMLDeleteView(ObjectMixin, IDMixin, HTMLView):
+ """ Define a view to allow `Delete` entity operations """
+ action = 'delete'
+ http_methods = ['get', 'post']
+ login_required = True
+ success_url = '/{collection}/{action}/'
+
+ def get_query(self):
+ """ Deleted an entity in the specified collection """
+ return self.collection.delete_from_id(self.request.kwargs.get('id'), _token=self.request.token)
+
+ def get_object(self):
+ """ Acquires the object for the request """
+ return self.collection.get(self.request.kwargs.get('id'), _token=self.request.token)
+
+ def get_context(self):
+ """ Context for the view """
+ context = super(HTMLDeleteView, self).get_context()
+ context['object'] = self.get_object()
+ return context
+
+ def get_success_url(self):
+ """ Acquires the success url for the form """
+ return self.success_url.format(
+ collection=self.collection.get_collection_name().lower(),
+ action='list')
+
+ def post(self, **kwargs):
+ self.get_query()
+ return HttpRedirectResponse(
+ location=self.get_success_url(),
+ )
+
+
+class HTMLListView(QuerysetMixin, ActionMixin, HTMLView):
+ """ Define a view to allow `All/List` entity operations """
+ restrict_content_type = False
+ action = 'list'
+ login_required = True
+
+ def get_context(self):
+ """ Context for the view """
+ context = super(HTMLListView, self).get_context()
+ context['object_list'] = self.get_query()
+ return context
diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py
index 1459001..84d3937 100644
--- a/pfunk/web/views/json.py
+++ b/pfunk/web/views/json.py
@@ -1,7 +1,10 @@
+from valley.utils import import_util
+
+from pfunk.client import q
from pfunk.web.response import JSONResponse, JSONNotFoundResponse, JSONBadRequestResponse, \
JSONMethodNotAllowedResponse, JSONUnauthorizedResponse, JSONForbiddenResponse
-from pfunk.client import q
-from pfunk.web.views.base import ActionMixin, HTTPView, IDMixin, ObjectMixin, QuerysetMixin, UpdateMixin
+from pfunk.web.views.base import HTTPView, ObjectMixin, QuerysetMixin, UpdateMixin, \
+ JSONActionMixin, JSONIDMixin
class JSONView(HTTPView):
@@ -25,15 +28,69 @@ def get_response(self):
headers=self.get_headers()
)
+ def _payload_docs(self):
+ """ Used in custom defining payload parameters for the view in Swagger generation.
+
+ Should return a dict that has the fields of a swagger parameter.
+ If there is an error in the swagger, it will not be raised.
+ Usage of `https://editor.swagger.io` to validate is recommended
+ e.g.
+ ```
+ # Defining formdata
+ {"data": [
+ {
+ "name":"name",
+ "in":"formData",
+ "description":"name of the pet",
+ "required": true,
+ "type": "string"
+ },
+ {
+ "name": "status",
+ "in": "formData",
+ "description": "status of the pet",
+ "required":true,
+ "type":"string"
+ }
+ ]}
+
+ # Defining a payload that references a model
+ {"data": [
+ {
+ "name": "body",
+ "in": "body",
+ "description": "Collection object to add",
+ "required": True,
+ "schema": "#/definitions/Person"
+ }
+ ]}
+ ```
+ """
+ return {}
+
+ def get_req_with_m2m(self, data):
+ """ Returns request with updated params that has the proper m2m entities """
+ fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField')
+ for k, v in fields.items():
+ col = import_util(v['foreign_class'])
+ entities = []
+ for ref in data[k]:
+ c = col.get(ref)
+ entities.append(c)
+ data[k] = entities
+ return data
-class CreateView(UpdateMixin, ActionMixin, JSONView):
+
+class CreateView(UpdateMixin, JSONActionMixin, JSONView):
""" Define a `Create` view that allows `creation` of an entity in the collection """
action = 'create'
http_methods = ['post']
login_required = True
def get_query(self):
- obj = self.collection.create(**self.get_query_kwargs(), _token=self.request.token)
+ """ Entity created in a collection """
+ obj = self.collection.create(
+ **self.get_query_kwargs(), _token=self.request.token)
return obj
def get_m2m_kwargs(self, obj):
@@ -43,13 +100,12 @@ def get_m2m_kwargs(self, obj):
by creating a collection that references both of the object.
So, when creating an entity, it is needed to create an entity to
make them related to each other.
-
Args:
obj (dict, required):
-
"""
data = self.request.get_json()
- fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField')
+ fields = self.collection.get_foreign_fields_by_type(
+ 'pfunk.fields.ManyToManyField')
for k, v in fields.items():
current_value = data.get(k)
col = v.get('foreign_class')()
@@ -60,43 +116,71 @@ def get_m2m_kwargs(self, obj):
)
)
-
-class UpdateView(UpdateMixin, IDMixin, JSONView):
+ def _payload_docs(self):
+ # Reference the collection by default
+ if self.collection:
+ return {"data": [
+ {
+ "name": "body",
+ "in": "body",
+ "description": "Collection object to add",
+ "required": True,
+ "schema": f"#/definitions/{self.collection.__class__.__name__}"
+ }
+ ]}
+
+class UpdateView(UpdateMixin, JSONIDMixin, JSONView):
""" Define a view to allow `Update` operations """
action = 'update'
http_methods = ['put']
login_required = True
def get_query(self):
- obj = self.collection.get(self.request.kwargs.get('id'), _token=self.request.token)
+ """ Entity in collection updated by an ID """
+ obj = self.collection.get(self.request.kwargs.get(
+ 'id'), _token=self.request.token)
obj._data.update(self.get_query_kwargs())
+ data = self.get_query_kwargs()
+ data = self.get_req_with_m2m(data)
+ obj = self.collection.get(self.request.kwargs.get('id'), _token=self.request.token)
+ obj._data.update(data)
obj.save()
return obj
-
-class DetailView(ObjectMixin, IDMixin, JSONView):
+ def _payload_docs(self):
+ # Reference the collection by default
+ if self.collection:
+ return {"data": [
+ {
+ "name": "body",
+ "in": "body",
+ "description": "Collection object to add",
+ "required": True,
+ "schema": f"#/definitions/{self.collection.__class__.__name__}"
+ }
+ ]}
+
+
+class DetailView(ObjectMixin, JSONIDMixin, JSONView):
""" Define a view to allow single entity operations """
action = 'detail'
restrict_content_type = False
login_required = True
-class DeleteView(ObjectMixin, IDMixin, JSONView):
+class DeleteView(ObjectMixin, JSONIDMixin, JSONView):
""" Define a view to allow `Delete` entity operations """
action = 'delete'
http_methods = ['delete']
login_required = True
def get_query(self):
+ """ Deleted an entity in the specified collection """
return self.collection.delete_from_id(self.request.kwargs.get('id'), _token=self.request.token)
-class ListView(QuerysetMixin, ActionMixin, JSONView):
+class ListView(QuerysetMixin, JSONActionMixin, JSONView):
""" Define a view to allow `All/List` entity operations """
restrict_content_type = False
action = 'list'
- login_required = True
-
-
-class GraphQLView(HTTPView):
- pass
\ No newline at end of file
+ login_required = True
\ No newline at end of file
diff --git a/poetry.lock b/poetry.lock
index 619a60b..72aad27 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,6 +1,23 @@
+[[package]]
+name = "anyio"
+version = "3.6.2"
+description = "High level compatibility layer for multiple asynchronous event loop implementations"
+category = "main"
+optional = false
+python-versions = ">=3.6.2"
+
+[package.dependencies]
+idna = ">=2.8"
+sniffio = ">=1.1"
+
+[package.extras]
+doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"]
+test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"]
+trio = ["trio (>=0.16,<0.22)"]
+
[[package]]
name = "appnope"
-version = "0.1.2"
+version = "0.1.3"
description = "Disable App Nap on macOS >= 10.9"
category = "dev"
optional = false
@@ -16,7 +33,6 @@ python-versions = ">=3.6"
[package.dependencies]
argon2-cffi-bindings = "*"
-typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
[package.extras]
dev = ["pre-commit", "cogapp", "tomli", "coverage[toml] (>=5.0.2)", "hypothesis", "pytest", "sphinx", "sphinx-notfound-page", "furo"]
@@ -38,6 +54,31 @@ cffi = ">=1.0.1"
dev = ["pytest", "cogapp", "pre-commit", "wheel"]
tests = ["pytest"]
+[[package]]
+name = "arrow"
+version = "1.2.3"
+description = "Better dates & times for Python"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+python-dateutil = ">=2.7.0"
+
+[[package]]
+name = "asttokens"
+version = "2.2.1"
+description = "Annotate AST trees with source code positions"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+six = "*"
+
+[package.extras]
+test = ["astroid (<=2.5.3)", "pytest"]
+
[[package]]
name = "astunparse"
version = "1.6.3"
@@ -51,17 +92,19 @@ six = ">=1.6.1,<2.0"
[[package]]
name = "attrs"
-version = "21.4.0"
+version = "22.2.0"
description = "Classes Without Boilerplate"
-category = "dev"
+category = "main"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=3.6"
[package.extras]
-dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
-docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
-tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
-tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"]
+cov = ["attrs", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"]
+dev = ["attrs"]
+docs = ["furo", "sphinx", "myst-parser", "zope.interface", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"]
+tests = ["attrs", "zope.interface"]
+tests-no-zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"]
+tests_no_zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"]
[[package]]
name = "backcall"
@@ -71,6 +114,21 @@ category = "dev"
optional = false
python-versions = "*"
+[[package]]
+name = "beautifulsoup4"
+version = "4.12.0"
+description = "Screen-scraping library"
+category = "dev"
+optional = false
+python-versions = ">=3.6.0"
+
+[package.dependencies]
+soupsieve = ">1.2"
+
+[package.extras]
+html5lib = ["html5lib"]
+lxml = ["lxml"]
+
[[package]]
name = "bleach"
version = "4.1.0"
@@ -86,35 +144,35 @@ webencodings = "*"
[[package]]
name = "boto3"
-version = "1.20.46"
+version = "1.26.104"
description = "The AWS SDK for Python"
category = "main"
optional = false
-python-versions = ">= 3.6"
+python-versions = ">= 3.7"
[package.dependencies]
-botocore = ">=1.23.46,<1.24.0"
-jmespath = ">=0.7.1,<1.0.0"
-s3transfer = ">=0.5.0,<0.6.0"
+botocore = ">=1.29.104,<1.30.0"
+jmespath = ">=0.7.1,<2.0.0"
+s3transfer = ">=0.6.0,<0.7.0"
[package.extras]
crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
-version = "1.23.46"
+version = "1.29.104"
description = "Low-level, data-driven core of boto 3."
category = "main"
optional = false
-python-versions = ">= 3.6"
+python-versions = ">= 3.7"
[package.dependencies]
-jmespath = ">=0.7.1,<1.0.0"
+jmespath = ">=0.7.1,<2.0.0"
python-dateutil = ">=2.1,<3.0.0"
urllib3 = ">=1.25.4,<1.27"
[package.extras]
-crt = ["awscrt (==0.12.5)"]
+crt = ["awscrt (==0.16.9)"]
[[package]]
name = "cachetools"
@@ -126,15 +184,15 @@ python-versions = "~=3.5"
[[package]]
name = "certifi"
-version = "2021.10.8"
+version = "2022.12.7"
description = "Python package for providing Mozilla's CA Bundle."
category = "main"
optional = false
-python-versions = "*"
+python-versions = ">=3.6"
[[package]]
name = "cffi"
-version = "1.15.0"
+version = "1.15.1"
description = "Foreign Function Interface for Python calling C code."
category = "main"
optional = false
@@ -145,34 +203,46 @@ pycparser = "*"
[[package]]
name = "charset-normalizer"
-version = "2.0.10"
+version = "3.1.0"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "main"
optional = false
-python-versions = ">=3.5.0"
-
-[package.extras]
-unicode_backport = ["unicodedata2"]
+python-versions = ">=3.7.0"
[[package]]
name = "click"
-version = "8.0.3"
+version = "8.1.3"
description = "Composable command line interface toolkit"
category = "main"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
-importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
[[package]]
name = "colorama"
-version = "0.4.4"
+version = "0.4.6"
description = "Cross-platform colored terminal text."
category = "main"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+
+[[package]]
+name = "comm"
+version = "0.1.3"
+description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc."
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+traitlets = ">=5.3"
+
+[package.extras]
+lint = ["black (>=22.6.0)", "mdformat-gfm (>=0.3.5)", "mdformat (>0.7)", "ruff (>=0.0.156)"]
+test = ["pytest"]
+typing = ["mypy (>=0.990)"]
[[package]]
name = "coverage"
@@ -206,11 +276,11 @@ test = ["pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pret
[[package]]
name = "debugpy"
-version = "1.5.1"
+version = "1.6.6"
description = "An implementation of the Debug Adapter Protocol for Python"
category = "dev"
optional = false
-python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
+python-versions = ">=3.7"
[[package]]
name = "decorator"
@@ -228,14 +298,6 @@ category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
-[[package]]
-name = "entrypoints"
-version = "0.3"
-description = "Discover and load entry points from installed packages."
-category = "dev"
-optional = false
-python-versions = ">=2.7"
-
[[package]]
name = "envs"
version = "1.4"
@@ -247,9 +309,31 @@ python-versions = ">=3.6,<4.0"
[package.extras]
cli = ["click[cli] (>=8.0.3,<9.0.0)", "Jinja2[cli] (>=3.0.3,<4.0.0)", "terminaltables[cli] (>=3.1.10,<4.0.0)"]
+[[package]]
+name = "executing"
+version = "1.2.0"
+description = "Get the currently executing AST node of a frame, and other information"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[package.extras]
+tests = ["asttokens", "pytest", "littleutils", "rich"]
+
+[[package]]
+name = "fastjsonschema"
+version = "2.16.3"
+description = "Fastest Python implementation of JSON schema"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[package.extras]
+devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"]
+
[[package]]
name = "faunadb"
-version = "4.1.1"
+version = "4.5.0"
description = "FaunaDB Python driver"
category = "main"
optional = false
@@ -257,7 +341,7 @@ python-versions = "*"
[package.dependencies]
future = "*"
-hyper = "*"
+httpx = {version = "*", extras = ["http2"]}
iso8601 = "*"
requests = "*"
@@ -265,9 +349,29 @@ requests = "*"
lint = ["pylint"]
test = ["nose2", "nose2"]
+[[package]]
+name = "formy"
+version = "1.3.1"
+description = "Valley is a Python forms library that allows you to use Jinja2 templates to create and manage the HTML of your forms."
+category = "main"
+optional = false
+python-versions = ">=3.8,<4.0"
+
+[package.dependencies]
+Jinja2 = ">=3.1.2,<4.0.0"
+valley = ">=1.5.8,<2.0.0"
+
+[[package]]
+name = "fqdn"
+version = "1.5.1"
+description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4"
+
[[package]]
name = "future"
-version = "0.18.2"
+version = "0.18.3"
description = "Clean single-source support for Python 3 and 2"
category = "main"
optional = false
@@ -284,52 +388,84 @@ python-versions = "*"
[package.dependencies]
ply = ">=3.6"
+[[package]]
+name = "h11"
+version = "0.14.0"
+description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
[[package]]
name = "h2"
-version = "2.6.2"
+version = "4.1.0"
description = "HTTP/2 State-Machine based protocol implementation"
category = "main"
optional = false
-python-versions = "*"
+python-versions = ">=3.6.1"
[package.dependencies]
-hpack = ">=2.2,<4"
-hyperframe = ">=3.1,<4.0.0 || >4.0.0,<6"
+hpack = ">=4.0,<5"
+hyperframe = ">=6.0,<7"
[[package]]
name = "hpack"
-version = "3.0.0"
+version = "4.0.0"
description = "Pure-Python HPACK header compression"
category = "main"
optional = false
-python-versions = "*"
+python-versions = ">=3.6.1"
[[package]]
-name = "hyper"
-version = "0.7.0"
-description = "HTTP/2 Client for Python"
+name = "httpcore"
+version = "0.16.3"
+description = "A minimal low-level HTTP client."
category = "main"
optional = false
-python-versions = "*"
+python-versions = ">=3.7"
[package.dependencies]
-h2 = ">=2.4,<3.0"
-hyperframe = ">=3.2,<4.0"
+anyio = ">=3.0,<5.0"
+certifi = "*"
+h11 = ">=0.13,<0.15"
+sniffio = ">=1.0.0,<2.0.0"
[package.extras]
-fast = ["pycohttpparser"]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (>=1.0.0,<2.0.0)"]
+
+[[package]]
+name = "httpx"
+version = "0.23.3"
+description = "The next generation HTTP client."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+certifi = "*"
+h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""}
+httpcore = ">=0.15.0,<0.17.0"
+rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]}
+sniffio = "*"
+
+[package.extras]
+brotli = ["brotli", "brotlicffi"]
+cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (>=1.0.0,<2.0.0)"]
[[package]]
name = "hyperframe"
-version = "3.2.0"
+version = "6.0.1"
description = "HTTP/2 framing layer for Python"
category = "main"
optional = false
-python-versions = "*"
+python-versions = ">=3.6.1"
[[package]]
name = "idna"
-version = "3.3"
+version = "3.4"
description = "Internationalized Domain Names in Applications (IDNA)"
category = "main"
optional = false
@@ -337,64 +473,72 @@ python-versions = ">=3.5"
[[package]]
name = "importlib-metadata"
-version = "4.10.1"
+version = "6.1.0"
description = "Read metadata from Python packages"
-category = "main"
+category = "dev"
optional = false
python-versions = ">=3.7"
[package.dependencies]
-typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
zipp = ">=0.5"
[package.extras]
-docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
+docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"]
perf = ["ipython"]
-testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"]
+testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8", "importlib-resources (>=1.3)"]
[[package]]
name = "importlib-resources"
-version = "5.4.0"
+version = "5.12.0"
description = "Read resources from Python packages"
-category = "dev"
+category = "main"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[package.dependencies]
zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
[package.extras]
-docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
-testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"]
+docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"]
+testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"]
[[package]]
name = "ipykernel"
-version = "6.7.0"
+version = "6.22.0"
description = "IPython Kernel for Jupyter"
category = "dev"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
[package.dependencies]
appnope = {version = "*", markers = "platform_system == \"Darwin\""}
-debugpy = ">=1.0.0,<2.0"
+comm = ">=0.1.1"
+debugpy = ">=1.6.5"
ipython = ">=7.23.1"
-jupyter-client = "<8.0"
-matplotlib-inline = ">=0.1.0,<0.2.0"
+jupyter-client = ">=6.1.12"
+jupyter-core = ">=4.12,<5.0.0 || >=5.1.0"
+matplotlib-inline = ">=0.1"
nest-asyncio = "*"
-tornado = ">=4.2,<7.0"
-traitlets = ">=5.1.0,<6.0"
+packaging = "*"
+psutil = "*"
+pyzmq = ">=20"
+tornado = ">=6.1"
+traitlets = ">=5.4.0"
[package.extras]
-test = ["pytest (!=5.3.4)", "pytest-cov", "flaky", "ipyparallel"]
+cov = ["coverage", "curio", "matplotlib", "pytest-cov", "trio"]
+docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"]
+pyqt5 = ["pyqt5"]
+pyside6 = ["pyside6"]
+test = ["flaky", "ipyparallel", "pre-commit", "pytest-asyncio", "pytest-cov", "pytest-timeout", "pytest (>=7.0)"]
[[package]]
name = "ipython"
-version = "7.31.1"
+version = "8.12.0"
description = "IPython: Productive Interactive Computing"
category = "dev"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
[package.dependencies]
appnope = {version = "*", markers = "sys_platform == \"darwin\""}
@@ -405,20 +549,24 @@ jedi = ">=0.16"
matplotlib-inline = "*"
pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""}
pickleshare = "*"
-prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0"
-pygments = "*"
-traitlets = ">=4.2"
+prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0"
+pygments = ">=2.4.0"
+stack-data = "*"
+traitlets = ">=5"
+typing-extensions = {version = "*", markers = "python_version < \"3.10\""}
[package.extras]
-all = ["Sphinx (>=1.3)", "ipykernel", "ipyparallel", "ipywidgets", "nbconvert", "nbformat", "nose (>=0.10.1)", "notebook", "numpy (>=1.17)", "pygments", "qtconsole", "requests", "testpath"]
-doc = ["Sphinx (>=1.3)"]
+all = ["black", "ipykernel", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "docrepr", "matplotlib", "stack-data", "pytest (<7)", "typing-extensions", "pytest (<7.1)", "pytest-asyncio", "testpath", "nbconvert", "nbformat", "ipywidgets", "notebook", "ipyparallel", "qtconsole", "curio", "matplotlib (!=3.2.0)", "numpy (>=1.21)", "pandas", "trio"]
+black = ["black"]
+doc = ["ipykernel", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "docrepr", "matplotlib", "stack-data", "pytest (<7)", "typing-extensions", "pytest (<7.1)", "pytest-asyncio", "testpath"]
kernel = ["ipykernel"]
nbconvert = ["nbconvert"]
nbformat = ["nbformat"]
-notebook = ["notebook", "ipywidgets"]
+notebook = ["ipywidgets", "notebook"]
parallel = ["ipyparallel"]
qtconsole = ["qtconsole"]
-test = ["nose (>=0.10.1)", "requests", "testpath", "pygments", "nbformat", "ipykernel", "numpy (>=1.17)"]
+test = ["pytest (<7.1)", "pytest-asyncio", "testpath"]
+test_extra = ["pytest (<7.1)", "pytest-asyncio", "testpath", "curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "trio"]
[[package]]
name = "ipython-genutils"
@@ -430,35 +578,44 @@ python-versions = "*"
[[package]]
name = "ipywidgets"
-version = "7.6.5"
-description = "IPython HTML widgets for Jupyter"
+version = "8.0.6"
+description = "Jupyter interactive widgets"
category = "dev"
optional = false
-python-versions = "*"
+python-versions = ">=3.7"
[package.dependencies]
ipykernel = ">=4.5.1"
-ipython = {version = ">=4.0.0", markers = "python_version >= \"3.3\""}
-ipython-genutils = ">=0.2.0,<0.3.0"
-jupyterlab-widgets = {version = ">=1.0.0", markers = "python_version >= \"3.6\""}
-nbformat = ">=4.2.0"
+ipython = ">=6.1.0"
+jupyterlab-widgets = ">=3.0.7,<3.1.0"
traitlets = ">=4.3.1"
-widgetsnbextension = ">=3.5.0,<3.6.0"
+widgetsnbextension = ">=4.0.7,<4.1.0"
[package.extras]
-test = ["pytest (>=3.6.0)", "pytest-cov", "mock"]
+test = ["jsonschema", "ipykernel", "pytest (>=3.6.0)", "pytest-cov", "pytz"]
[[package]]
name = "iso8601"
-version = "1.0.2"
+version = "1.1.0"
description = "Simple module to parse ISO 8601 dates"
category = "main"
optional = false
python-versions = ">=3.6.2,<4.0"
+[[package]]
+name = "isoduration"
+version = "20.11.0"
+description = "Operations with ISO 8601 durations"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+arrow = ">=0.15.0"
+
[[package]]
name = "jedi"
-version = "0.18.1"
+version = "0.18.2"
description = "An autocompletion tool for Python that can be used for text editors."
category = "dev"
optional = false
@@ -468,16 +625,17 @@ python-versions = ">=3.6"
parso = ">=0.8.0,<0.9.0"
[package.extras]
+docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx-rtd-theme (==0.4.3)", "sphinx (==1.8.5)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"]
qa = ["flake8 (==3.8.3)", "mypy (==0.782)"]
-testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<7.0.0)"]
+testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"]
[[package]]
name = "jinja2"
-version = "3.0.1"
+version = "3.1.2"
description = "A very fast and expressive template engine."
category = "main"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[package.dependencies]
MarkupSafe = ">=2.0"
@@ -487,30 +645,45 @@ i18n = ["Babel (>=2.7)"]
[[package]]
name = "jmespath"
-version = "0.10.0"
+version = "1.0.1"
description = "JSON Matching Expressions"
category = "main"
optional = false
-python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+python-versions = ">=3.7"
+
+[[package]]
+name = "jsonpointer"
+version = "2.3"
+description = "Identify specific nodes in a JSON document (RFC 6901)"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "jsonschema"
-version = "4.4.0"
+version = "4.17.3"
description = "An implementation of JSON Schema validation for Python"
-category = "dev"
+category = "main"
optional = false
python-versions = ">=3.7"
[package.dependencies]
attrs = ">=17.4.0"
-importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
+fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""}
+idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""}
importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""}
+isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""}
+jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""}
+pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""}
pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2"
-typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
+rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""}
+rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""}
+uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""}
+webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""}
[package.extras]
format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
-format_nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"]
+format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"]
[[package]]
name = "jupyter"
@@ -530,85 +703,158 @@ qtconsole = "*"
[[package]]
name = "jupyter-client"
-version = "7.1.2"
+version = "8.1.0"
description = "Jupyter protocol implementation and client libraries"
category = "dev"
optional = false
-python-versions = ">=3.6.1"
+python-versions = ">=3.8"
[package.dependencies]
-entrypoints = "*"
-jupyter-core = ">=4.6.0"
-nest-asyncio = ">=1.5"
-python-dateutil = ">=2.1"
-pyzmq = ">=13"
-tornado = ">=4.1"
-traitlets = "*"
+importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""}
+jupyter-core = ">=4.12,<5.0.0 || >=5.1.0"
+python-dateutil = ">=2.8.2"
+pyzmq = ">=23.0"
+tornado = ">=6.2"
+traitlets = ">=5.3"
[package.extras]
-doc = ["myst-parser", "sphinx (>=1.3.6)", "sphinx-rtd-theme", "sphinxcontrib-github-alt"]
-test = ["codecov", "coverage", "ipykernel", "ipython", "mock", "mypy", "pre-commit", "pytest", "pytest-asyncio", "pytest-cov", "pytest-timeout", "jedi (<0.18)"]
+docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinx (>=4)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"]
+test = ["codecov", "coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"]
[[package]]
name = "jupyter-console"
-version = "6.4.0"
+version = "6.6.3"
description = "Jupyter terminal console"
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[package.dependencies]
-ipykernel = "*"
+ipykernel = ">=6.14"
ipython = "*"
-jupyter-client = "*"
-prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0"
+jupyter-client = ">=7.0.0"
+jupyter-core = ">=4.12,<5.0.0 || >=5.1.0"
+prompt-toolkit = ">=3.0.30"
pygments = "*"
+pyzmq = ">=17"
+traitlets = ">=5.4"
[package.extras]
-test = ["pexpect"]
+test = ["flaky", "pexpect", "pytest"]
[[package]]
name = "jupyter-core"
-version = "4.9.1"
+version = "5.3.0"
description = "Jupyter core package. A base package on which Jupyter projects rely."
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.8"
[package.dependencies]
-pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""}
-traitlets = "*"
+platformdirs = ">=2.5"
+pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""}
+traitlets = ">=5.3"
+
+[package.extras]
+docs = ["myst-parser", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"]
+test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"]
[[package]]
-name = "jupyterlab-pygments"
-version = "0.1.2"
-description = "Pygments theme using JupyterLab CSS variables"
+name = "jupyter-events"
+version = "0.6.3"
+description = "Jupyter Event System library"
category = "dev"
optional = false
-python-versions = "*"
+python-versions = ">=3.7"
+
+[package.dependencies]
+jsonschema = {version = ">=3.2.0", extras = ["format-nongpl"]}
+python-json-logger = ">=2.0.4"
+pyyaml = ">=5.3"
+rfc3339-validator = "*"
+rfc3986-validator = ">=0.1.1"
+traitlets = ">=5.3"
+
+[package.extras]
+cli = ["click", "rich"]
+docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"]
+test = ["click", "coverage", "pre-commit", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "pytest-cov", "pytest (>=7.0)", "rich"]
+
+[[package]]
+name = "jupyter-server"
+version = "2.5.0"
+description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications."
+category = "dev"
+optional = false
+python-versions = ">=3.8"
[package.dependencies]
-pygments = ">=2.4.1,<3"
+anyio = ">=3.1.0"
+argon2-cffi = "*"
+jinja2 = "*"
+jupyter-client = ">=7.4.4"
+jupyter-core = ">=4.12,<5.0.0 || >=5.1.0"
+jupyter-events = ">=0.4.0"
+jupyter-server-terminals = "*"
+nbconvert = ">=6.4.4"
+nbformat = ">=5.3.0"
+packaging = "*"
+prometheus-client = "*"
+pywinpty = {version = "*", markers = "os_name == \"nt\""}
+pyzmq = ">=24"
+send2trash = "*"
+terminado = ">=0.8.3"
+tornado = ">=6.2.0"
+traitlets = ">=5.6.0"
+websocket-client = "*"
+
+[package.extras]
+docs = ["docutils (<0.20)", "ipykernel", "jinja2", "jupyter-client", "jupyter-server", "mistune (<1.0.0)", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"]
+test = ["ipykernel", "pre-commit", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "pytest (>=7.0)", "requests"]
+
+[[package]]
+name = "jupyter-server-terminals"
+version = "0.4.4"
+description = "A Jupyter Server Extension Providing Terminals."
+category = "dev"
+optional = false
+python-versions = ">=3.8"
+
+[package.dependencies]
+pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""}
+terminado = ">=0.8.3"
+
+[package.extras]
+docs = ["jinja2", "jupyter-server", "mistune (<3.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"]
+test = ["coverage", "jupyter-server (>=2.0.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout", "pytest (>=7.0)"]
+
+[[package]]
+name = "jupyterlab-pygments"
+version = "0.2.2"
+description = "Pygments theme using JupyterLab CSS variables"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
[[package]]
name = "jupyterlab-widgets"
-version = "1.0.2"
-description = "A JupyterLab extension."
+version = "3.0.7"
+description = "Jupyter interactive widgets for JupyterLab"
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[[package]]
name = "markupsafe"
-version = "2.0.1"
+version = "2.1.2"
description = "Safely add untrusted strings to HTML/XML markup."
category = "main"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[[package]]
name = "matplotlib-inline"
-version = "0.1.3"
+version = "0.1.6"
description = "Inline Matplotlib backend for Jupyter"
category = "dev"
optional = false
@@ -619,81 +865,119 @@ traitlets = "*"
[[package]]
name = "mistune"
-version = "0.8.4"
-description = "The fastest markdown parser in pure Python"
+version = "2.0.5"
+description = "A sane Markdown parser with useful plugins and renderers"
category = "dev"
optional = false
python-versions = "*"
+[[package]]
+name = "nbclassic"
+version = "0.5.4"
+description = "Jupyter Notebook as a Jupyter Server extension."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+argon2-cffi = "*"
+ipykernel = "*"
+ipython-genutils = "*"
+jinja2 = "*"
+jupyter-client = ">=6.1.1"
+jupyter-core = ">=4.6.1"
+jupyter-server = ">=1.8"
+nbconvert = ">=5"
+nbformat = "*"
+nest-asyncio = ">=1.5"
+notebook-shim = ">=0.1.0"
+prometheus-client = "*"
+pyzmq = ">=17"
+Send2Trash = ">=1.8.0"
+terminado = ">=0.8.3"
+tornado = ">=6.1"
+traitlets = ">=4.2.1"
+
+[package.extras]
+docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"]
+json-logging = ["json-logging"]
+test = ["pytest", "coverage", "requests", "testpath", "nbval", "pytest-playwright", "pytest-cov", "pytest-jupyter", "pytest-tornasync", "requests-unixsocket"]
+
[[package]]
name = "nbclient"
-version = "0.5.10"
+version = "0.7.2"
description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor."
category = "dev"
optional = false
python-versions = ">=3.7.0"
[package.dependencies]
-jupyter-client = ">=6.1.5"
-nbformat = ">=5.0"
-nest-asyncio = "*"
-traitlets = ">=4.2"
+jupyter-client = ">=6.1.12"
+jupyter-core = ">=4.12,<5.0.0 || >=5.1.0"
+nbformat = ">=5.1"
+traitlets = ">=5.3"
[package.extras]
-sphinx = ["Sphinx (>=1.7)", "sphinx-book-theme", "mock", "moto", "myst-parser"]
-test = ["ipython", "ipykernel", "ipywidgets (<8.0.0)", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "check-manifest", "flake8", "mypy", "xmltodict", "black", "pip (>=18.1)", "wheel (>=0.31.0)", "setuptools (>=38.6.0)", "twine (>=1.11.0)"]
+dev = ["pre-commit"]
+docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient", "sphinx-book-theme", "sphinx (>=1.7)"]
+test = ["ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "pytest (>=7.0)", "testpath", "xmltodict"]
[[package]]
name = "nbconvert"
-version = "6.4.1"
+version = "7.2.10"
description = "Converting Jupyter Notebooks"
category = "dev"
optional = false
python-versions = ">=3.7"
[package.dependencies]
+beautifulsoup4 = "*"
bleach = "*"
defusedxml = "*"
-entrypoints = ">=0.2.2"
-jinja2 = ">=2.4"
-jupyter-core = "*"
+importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""}
+jinja2 = ">=3.0"
+jupyter-core = ">=4.7"
jupyterlab-pygments = "*"
-mistune = ">=0.8.1,<2"
-nbclient = ">=0.5.0,<0.6.0"
-nbformat = ">=4.4"
+markupsafe = ">=2.0"
+mistune = ">=2.0.3,<3"
+nbclient = ">=0.5.0"
+nbformat = ">=5.1"
+packaging = "*"
pandocfilters = ">=1.4.1"
pygments = ">=2.4.1"
-testpath = "*"
+tinycss2 = "*"
traitlets = ">=5.0"
[package.extras]
-all = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (==0.2.6)", "tornado (>=4.0)", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"]
-docs = ["sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"]
-serve = ["tornado (>=4.0)"]
-test = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (==0.2.6)"]
-webpdf = ["pyppeteer (==0.2.6)"]
+all = ["nbconvert"]
+docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"]
+qtpdf = ["nbconvert"]
+qtpng = ["pyqtwebengine (>=5.15)"]
+serve = ["tornado (>=6.1)"]
+test = ["ipykernel", "ipywidgets (>=7)", "pre-commit", "pytest", "pytest-dependency"]
+webpdf = ["pyppeteer (>=1,<1.1)"]
[[package]]
name = "nbformat"
-version = "5.1.3"
+version = "5.8.0"
description = "The Jupyter Notebook format"
category = "dev"
optional = false
-python-versions = ">=3.5"
+python-versions = ">=3.7"
[package.dependencies]
-ipython-genutils = "*"
-jsonschema = ">=2.4,<2.5.0 || >2.5.0"
+fastjsonschema = "*"
+jsonschema = ">=2.6"
jupyter-core = "*"
-traitlets = ">=4.1"
+traitlets = ">=5.1"
[package.extras]
-fast = ["fastjsonschema"]
-test = ["check-manifest", "fastjsonschema", "testpath", "pytest", "pytest-cov"]
+docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"]
+test = ["pep440", "pre-commit", "pytest", "testpath"]
[[package]]
name = "nest-asyncio"
-version = "1.5.4"
+version = "1.5.6"
description = "Patch asyncio to allow nested event loops"
category = "dev"
optional = false
@@ -701,11 +985,11 @@ python-versions = ">=3.5"
[[package]]
name = "notebook"
-version = "6.4.10"
+version = "6.5.3"
description = "A web-based notebook environment for interactive computing"
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[package.dependencies]
argon2-cffi = "*"
@@ -714,6 +998,7 @@ ipython-genutils = "*"
jinja2 = "*"
jupyter-client = ">=5.3.4"
jupyter-core = ">=4.6.1"
+nbclassic = ">=0.4.7"
nbconvert = ">=5"
nbformat = "*"
nest-asyncio = ">=1.5"
@@ -727,18 +1012,61 @@ traitlets = ">=4.2.1"
[package.extras]
docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"]
json-logging = ["json-logging"]
-test = ["pytest", "coverage", "requests", "nbval", "selenium", "pytest-cov", "requests-unixsocket"]
+test = ["pytest", "coverage", "requests", "testpath", "nbval", "selenium (==4.1.5)", "pytest-cov", "requests-unixsocket"]
[[package]]
-name = "packaging"
-version = "21.3"
-description = "Core utilities for Python packages"
+name = "notebook-shim"
+version = "0.2.2"
+description = "A shim layer for notebook traits and config"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+jupyter-server = ">=1.8,<3"
+
+[package.extras]
+test = ["pytest", "pytest-console-scripts", "pytest-tornasync"]
+
+[[package]]
+name = "openapi-schema-validator"
+version = "0.2.3"
+description = "OpenAPI schema validation for Python"
category = "main"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7.0,<4.0.0"
+
+[package.dependencies]
+jsonschema = ">=3.0.0,<5.0.0"
+
+[package.extras]
+rfc3339-validator = ["rfc3339-validator"]
+strict-rfc3339 = ["strict-rfc3339"]
+isodate = ["isodate"]
+
+[[package]]
+name = "openapi-spec-validator"
+version = "0.4.0"
+description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3.0 spec validator"
+category = "main"
+optional = false
+python-versions = ">=3.7.0,<4.0.0"
[package.dependencies]
-pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
+jsonschema = ">=3.2.0,<5.0.0"
+openapi-schema-validator = ">=0.2.0,<0.3.0"
+PyYAML = ">=5.1"
+
+[package.extras]
+requests = ["requests"]
+
+[[package]]
+name = "packaging"
+version = "23.0"
+description = "Core utilities for Python packages"
+category = "main"
+optional = false
+python-versions = ">=3.7"
[[package]]
name = "pandocfilters"
@@ -796,6 +1124,26 @@ category = "dev"
optional = false
python-versions = "*"
+[[package]]
+name = "pkgutil-resolve-name"
+version = "1.3.10"
+description = "Resolve a name to an object."
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[[package]]
+name = "platformdirs"
+version = "3.2.0"
+description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.extras]
+docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)", "sphinx (>=6.1.3)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest (>=7.2.2)"]
+
[[package]]
name = "ply"
version = "3.11"
@@ -806,7 +1154,7 @@ python-versions = "*"
[[package]]
name = "prometheus-client"
-version = "0.13.1"
+version = "0.16.0"
description = "Python client for the Prometheus monitoring system."
category = "dev"
optional = false
@@ -817,15 +1165,26 @@ twisted = ["twisted"]
[[package]]
name = "prompt-toolkit"
-version = "3.0.26"
+version = "3.0.38"
description = "Library for building powerful interactive command lines in Python"
category = "dev"
optional = false
-python-versions = ">=3.6.2"
+python-versions = ">=3.7.0"
[package.dependencies]
wcwidth = "*"
+[[package]]
+name = "psutil"
+version = "5.9.4"
+description = "Cross-platform lib for process and system monitoring in Python."
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
+[package.extras]
+test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"]
+
[[package]]
name = "ptyprocess"
version = "0.7.0"
@@ -835,12 +1194,15 @@ optional = false
python-versions = "*"
[[package]]
-name = "py"
-version = "1.11.0"
-description = "library with cross-python path, ini-parsing, io, code, log facilities"
+name = "pure-eval"
+version = "0.2.2"
+description = "Safely evaluate AST nodes without side effects"
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = "*"
+
+[package.extras]
+tests = ["pytest"]
[[package]]
name = "pycparser"
@@ -852,42 +1214,34 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "pygments"
-version = "2.11.2"
+version = "2.14.0"
description = "Pygments is a syntax highlighting package written in Python."
category = "dev"
optional = false
-python-versions = ">=3.5"
-
-[[package]]
-name = "pyjwt"
-version = "2.3.0"
-description = "JSON Web Token implementation in Python"
-category = "main"
-optional = false
python-versions = ">=3.6"
[package.extras]
-crypto = ["cryptography (>=3.3.1)"]
-dev = ["sphinx", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.3.1)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "mypy", "pre-commit"]
-docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"]
-tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"]
+plugins = ["importlib-metadata"]
[[package]]
-name = "pyparsing"
-version = "3.0.7"
-description = "Python parsing module"
+name = "pyjwt"
+version = "2.6.0"
+description = "JSON Web Token implementation in Python"
category = "main"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[package.extras]
-diagrams = ["jinja2", "railroad-diagrams"]
+crypto = ["cryptography (>=3.4.0)"]
+dev = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.4.0)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "pre-commit"]
+docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"]
+tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"]
[[package]]
name = "pyrsistent"
-version = "0.18.1"
+version = "0.19.3"
description = "Persistent/Functional/Immutable data structures"
-category = "dev"
+category = "main"
optional = false
python-versions = ">=3.7"
@@ -902,6 +1256,14 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
[package.dependencies]
six = ">=1.5"
+[[package]]
+name = "python-json-logger"
+version = "2.0.7"
+description = "A python library adding a json log formatter"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
[[package]]
name = "pytz"
version = "2021.3"
@@ -912,7 +1274,7 @@ python-versions = "*"
[[package]]
name = "pywin32"
-version = "303"
+version = "306"
description = "Python for Window Extensions"
category = "dev"
optional = false
@@ -920,11 +1282,11 @@ python-versions = "*"
[[package]]
name = "pywinpty"
-version = "2.0.1"
+version = "2.0.10"
description = "Pseudo terminal support for Windows from Python."
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[[package]]
name = "pyyaml"
@@ -936,7 +1298,7 @@ python-versions = ">=3.6"
[[package]]
name = "pyzmq"
-version = "22.3.0"
+version = "25.0.2"
description = "Python bindings for 0MQ"
category = "dev"
optional = false
@@ -944,25 +1306,25 @@ python-versions = ">=3.6"
[package.dependencies]
cffi = {version = "*", markers = "implementation_name == \"pypy\""}
-py = {version = "*", markers = "implementation_name == \"pypy\""}
[[package]]
name = "qtconsole"
-version = "5.2.2"
+version = "5.4.2"
description = "Jupyter Qt console"
category = "dev"
optional = false
-python-versions = ">= 3.6"
+python-versions = ">= 3.7"
[package.dependencies]
ipykernel = ">=4.1"
ipython-genutils = "*"
jupyter-client = ">=4.1"
jupyter-core = "*"
+packaging = "*"
pygments = "*"
pyzmq = ">=17.1"
-qtpy = "*"
-traitlets = "*"
+qtpy = ">=2.0.1"
+traitlets = "<5.2.1 || >5.2.1,<5.2.2 || >5.2.2"
[package.extras]
doc = ["Sphinx (>=1.3)"]
@@ -970,43 +1332,76 @@ test = ["flaky", "pytest", "pytest-qt"]
[[package]]
name = "qtpy"
-version = "2.0.0"
+version = "2.3.1"
description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)."
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[package.dependencies]
packaging = "*"
[package.extras]
-test = ["pytest (>=6.0.0,<7.0)", "pytest-cov (>=2.11.0)"]
+test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"]
[[package]]
name = "requests"
-version = "2.27.1"
+version = "2.28.2"
description = "Python HTTP for Humans."
category = "main"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
+python-versions = ">=3.7, <4"
[package.dependencies]
certifi = ">=2017.4.17"
-charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""}
-idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""}
+charset-normalizer = ">=2,<4"
+idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<1.27"
[package.extras]
-socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
-use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
+socks = ["PySocks (>=1.5.6,!=1.5.7)"]
+use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
+
+[[package]]
+name = "rfc3339-validator"
+version = "0.1.4"
+description = "A pure python RFC3339 validator"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+
+[package.dependencies]
+six = "*"
+
+[[package]]
+name = "rfc3986"
+version = "1.5.0"
+description = "Validating URI References per RFC 3986"
+category = "main"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+idna = {version = "*", optional = true, markers = "extra == \"idna2008\""}
+
+[package.extras]
+idna2008 = ["idna"]
+
+[[package]]
+name = "rfc3986-validator"
+version = "0.1.1"
+description = "Pure python rfc3986 validator"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "s3transfer"
-version = "0.5.0"
+version = "0.6.0"
description = "An Amazon S3 Transfer Manager"
category = "main"
optional = false
-python-versions = ">= 3.6"
+python-versions = ">= 3.7"
[package.dependencies]
botocore = ">=1.12.36,<2.0a.0"
@@ -1048,9 +1443,41 @@ category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+[[package]]
+name = "sniffio"
+version = "1.3.0"
+description = "Sniff out which async library your code is running under"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
+[[package]]
+name = "soupsieve"
+version = "2.4"
+description = "A modern CSS selector implementation for Beautiful Soup."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[[package]]
+name = "stack-data"
+version = "0.6.2"
+description = "Extract data from python stack frames and tracebacks for informative displays"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+asttokens = ">=2.1.0"
+executing = ">=1.2.0"
+pure-eval = "*"
+
+[package.extras]
+tests = ["pytest", "typeguard", "pygments", "littleutils", "cython"]
+
[[package]]
name = "stripe"
-version = "2.65.0"
+version = "2.76.0"
description = "Python bindings for the Stripe API"
category = "main"
optional = false
@@ -1059,9 +1486,21 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[package.dependencies]
requests = {version = ">=2.20", markers = "python_version >= \"3.0\""}
+[[package]]
+name = "swaggyp"
+version = "0.3.0"
+description = "Python library for generating Swagger templates based on valley"
+category = "main"
+optional = false
+python-versions = ">=3.8,<4.0"
+
+[package.dependencies]
+PyYAML = ">=6.0,<7.0"
+valley = ">=1.5.6,<2.0.0"
+
[[package]]
name = "terminado"
-version = "0.13.1"
+version = "0.17.1"
description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library."
category = "dev"
optional = false
@@ -1070,65 +1509,82 @@ python-versions = ">=3.7"
[package.dependencies]
ptyprocess = {version = "*", markers = "os_name != \"nt\""}
pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""}
-tornado = ">=4"
+tornado = ">=6.1.0"
[package.extras]
-test = ["pytest"]
+docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"]
+test = ["pre-commit", "pytest-timeout", "pytest (>=7.0)"]
[[package]]
-name = "testpath"
-version = "0.5.0"
-description = "Test utilities for code working with files and commands"
+name = "tinycss2"
+version = "1.2.1"
+description = "A tiny CSS parser"
category = "dev"
optional = false
-python-versions = ">= 3.5"
+python-versions = ">=3.7"
+
+[package.dependencies]
+webencodings = ">=0.4"
[package.extras]
-test = ["pytest", "pathlib2"]
+doc = ["sphinx", "sphinx-rtd-theme"]
+test = ["pytest", "isort", "flake8"]
[[package]]
name = "tornado"
-version = "6.1"
+version = "6.2"
description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed."
category = "dev"
optional = false
-python-versions = ">= 3.5"
+python-versions = ">= 3.7"
[[package]]
name = "traitlets"
-version = "5.1.1"
+version = "5.9.0"
description = "Traitlets Python configuration system"
category = "dev"
optional = false
python-versions = ">=3.7"
[package.extras]
-test = ["pytest"]
+docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"]
+test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"]
[[package]]
name = "typing-extensions"
-version = "4.0.1"
-description = "Backported and Experimental Type Hints for Python 3.6+"
-category = "main"
+version = "4.5.0"
+description = "Backported and Experimental Type Hints for Python 3.7+"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[[package]]
+name = "uri-template"
+version = "1.2.0"
+description = "RFC 6570 URI Template Processor"
+category = "dev"
optional = false
python-versions = ">=3.6"
+[package.extras]
+dev = ["mypy", "flake8 (<4.0.0)", "flake8-annotations", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-noqa", "flake8-requirements", "flake8-type-annotations", "flake8-use-fstring", "pep8-naming"]
+
[[package]]
name = "urllib3"
-version = "1.26.8"
+version = "1.26.15"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "main"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
[package.extras]
-brotli = ["brotlipy (>=0.6.0)"]
-secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
+brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"]
+secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "valley"
-version = "1.5.6"
+version = "1.5.8"
description = "Python extensible schema validations and declarative syntax helpers."
category = "main"
optional = false
@@ -1139,12 +1595,24 @@ envs = ">=1.3,<2.0"
[[package]]
name = "wcwidth"
-version = "0.2.5"
+version = "0.2.6"
description = "Measures the displayed width of unicode strings in a terminal"
category = "dev"
optional = false
python-versions = "*"
+[[package]]
+name = "webcolors"
+version = "1.13"
+description = "A library for working with the color formats defined by HTML and CSS."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.extras]
+docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"]
+tests = ["pytest", "pytest-cov"]
+
[[package]]
name = "webencodings"
version = "0.5.1"
@@ -1153,795 +1621,185 @@ category = "main"
optional = false
python-versions = "*"
+[[package]]
+name = "websocket-client"
+version = "1.5.1"
+description = "WebSocket client for Python with low level API options"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.extras]
+docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"]
+optional = ["python-socks", "wsaccel"]
+test = ["websockets"]
+
[[package]]
name = "werkzeug"
-version = "2.0.2"
+version = "2.1.2"
description = "The comprehensive WSGI web application library."
category = "main"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
+
+[package.dependencies]
+MarkupSafe = ">=2.1.1"
[package.extras]
watchdog = ["watchdog"]
[[package]]
name = "widgetsnbextension"
-version = "3.5.2"
-description = "IPython HTML widgets for Jupyter"
+version = "4.0.7"
+description = "Jupyter interactive widgets for Jupyter Notebook"
category = "dev"
optional = false
-python-versions = "*"
-
-[package.dependencies]
-notebook = ">=4.4.1"
+python-versions = ">=3.7"
[[package]]
name = "zipp"
-version = "3.7.0"
+version = "3.15.0"
description = "Backport of pathlib-compatible object wrapper for zip files"
category = "main"
optional = false
python-versions = ">=3.7"
[package.extras]
-docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
-testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"]
+docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"]
+testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "jaraco.functools", "more-itertools", "big-o", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"]
[metadata]
lock-version = "1.1"
-python-versions = "^3.7"
-content-hash = "4e8046eb9b632ff1bbcc07c5141f30c51bc7d7ff11b8a22cc5a38b250d46afcd"
+python-versions = "^3.8"
+content-hash = "115514b1f1229bd8bf8ae3bbf89d647aea751f1d261ebe80e7beef93315170eb"
[metadata.files]
-appnope = [
- {file = "appnope-0.1.2-py2.py3-none-any.whl", hash = "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442"},
- {file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"},
-]
-argon2-cffi = [
- {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"},
- {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"},
-]
-argon2-cffi-bindings = [
- {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"},
- {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"},
- {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"},
- {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"},
- {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"},
- {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"},
- {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"},
- {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"},
- {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"},
- {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"},
- {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"},
- {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"},
- {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"},
- {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"},
- {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"},
- {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"},
- {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"},
- {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"},
- {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"},
- {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"},
- {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"},
-]
-astunparse = [
- {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"},
- {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"},
-]
-attrs = [
- {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
- {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
-]
-backcall = [
- {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"},
- {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"},
-]
-bleach = [
- {file = "bleach-4.1.0-py2.py3-none-any.whl", hash = "sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994"},
- {file = "bleach-4.1.0.tar.gz", hash = "sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da"},
-]
-boto3 = [
- {file = "boto3-1.20.46-py3-none-any.whl", hash = "sha256:a2ffce001160d7e7c72a90c3084700d50eb64ea4a3aae8afe21566971d1fd611"},
- {file = "boto3-1.20.46.tar.gz", hash = "sha256:d7effba509d7298ef49316ba2da7a2ea115f2a7ff691f875f6354666663cf386"},
-]
-botocore = [
- {file = "botocore-1.23.46-py3-none-any.whl", hash = "sha256:354bce55e5adc8e2fe106acfd455ce448f9b920d7b697d06faa8cf200fd6566b"},
- {file = "botocore-1.23.46.tar.gz", hash = "sha256:38dd4564839f531725b667db360ba7df2125ceb3752b0ba12759c3e918015b95"},
-]
-cachetools = [
- {file = "cachetools-4.2.4-py3-none-any.whl", hash = "sha256:92971d3cb7d2a97efff7c7bb1657f21a8f5fb309a37530537c71b1774189f2d1"},
- {file = "cachetools-4.2.4.tar.gz", hash = "sha256:89ea6f1b638d5a73a4f9226be57ac5e4f399d22770b92355f92dcb0f7f001693"},
-]
-certifi = [
- {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"},
- {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"},
-]
-cffi = [
- {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"},
- {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"},
- {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"},
- {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"},
- {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"},
- {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"},
- {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"},
- {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"},
- {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"},
- {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"},
- {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"},
- {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"},
- {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"},
- {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"},
- {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"},
- {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"},
- {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"},
- {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"},
- {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"},
- {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"},
- {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"},
- {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"},
- {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"},
- {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"},
- {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"},
- {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"},
- {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"},
- {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"},
- {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"},
- {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"},
- {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"},
- {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"},
- {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"},
- {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"},
- {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"},
- {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"},
- {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"},
- {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"},
- {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"},
- {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"},
- {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"},
- {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"},
- {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"},
- {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"},
- {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"},
- {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"},
- {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"},
- {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"},
- {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"},
- {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"},
-]
-charset-normalizer = [
- {file = "charset-normalizer-2.0.10.tar.gz", hash = "sha256:876d180e9d7432c5d1dfd4c5d26b72f099d503e8fcc0feb7532c9289be60fcbd"},
- {file = "charset_normalizer-2.0.10-py3-none-any.whl", hash = "sha256:cb957888737fc0bbcd78e3df769addb41fd1ff8cf950dc9e7ad7793f1bf44455"},
-]
-click = [
- {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"},
- {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"},
-]
-colorama = [
- {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
- {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
-]
-coverage = [
- {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"},
- {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"},
- {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"},
- {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"},
- {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"},
- {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"},
- {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"},
- {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"},
- {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"},
- {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"},
- {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"},
- {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"},
- {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"},
- {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"},
- {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"},
- {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"},
- {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"},
- {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"},
- {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"},
- {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"},
- {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"},
- {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"},
- {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"},
- {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"},
- {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"},
- {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"},
- {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"},
- {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"},
- {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"},
- {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"},
- {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"},
- {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"},
- {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"},
- {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"},
- {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"},
- {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"},
- {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"},
- {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"},
- {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"},
- {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"},
- {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"},
- {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"},
- {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"},
- {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"},
- {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"},
- {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"},
- {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"},
- {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"},
- {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"},
- {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"},
- {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"},
- {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"},
-]
-cryptography = [
- {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"},
- {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"},
- {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"},
- {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"},
- {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"},
- {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"},
- {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c4129fc3fdc0fa8e40861b5ac0c673315b3c902bbdc05fc176764815b43dd1d"},
- {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:695104a9223a7239d155d7627ad912953b540929ef97ae0c34c7b8bf30857e89"},
- {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"},
- {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"},
- {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"},
- {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"},
- {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"},
- {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"},
- {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"},
- {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"},
- {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"},
- {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"},
- {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"},
-]
-debugpy = [
- {file = "debugpy-1.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:70b422c63a833630c33e3f9cdbd9b6971f8c5afd452697e464339a21bbe862ba"},
- {file = "debugpy-1.5.1-cp310-cp310-win32.whl", hash = "sha256:3a457ad9c0059a21a6c7d563c1f18e924f5cf90278c722bd50ede6f56b77c7fe"},
- {file = "debugpy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:5d76a4fd028d8009c3faf1185b4b78ceb2273dd2499447664b03939e0368bb90"},
- {file = "debugpy-1.5.1-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:16db27b4b91991442f91d73604d32080b30de655aca9ba821b1972ea8171021b"},
- {file = "debugpy-1.5.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2b073ad5e8d8c488fbb6a116986858bab0c9c4558f28deb8832c7a5a27405bd6"},
- {file = "debugpy-1.5.1-cp36-cp36m-win32.whl", hash = "sha256:318f81f37341e4e054b4267d39896b73cddb3612ca13b39d7eea45af65165e1d"},
- {file = "debugpy-1.5.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b5b3157372e0e0a1297a8b6b5280bcf1d35a40f436c7973771c972726d1e32d5"},
- {file = "debugpy-1.5.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:1ec3a086e14bba6c472632025b8fe5bdfbaef2afa1ebd5c6615ce6ed8d89bc67"},
- {file = "debugpy-1.5.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:26fbe53cca45a608679094791ce587b6e2798acd1d4777a8b303b07622e85182"},
- {file = "debugpy-1.5.1-cp37-cp37m-win32.whl", hash = "sha256:d876db8c312eeb02d85611e0f696abe66a2c1515e6405943609e725d5ff36f2a"},
- {file = "debugpy-1.5.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4404a62fb5332ea5c8c9132290eef50b3a0ba38cecacad5529e969a783bcbdd7"},
- {file = "debugpy-1.5.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f3a3dca9104aa14fd4210edcce6d9ce2b65bd9618c0b222135a40b9d6e2a9eeb"},
- {file = "debugpy-1.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2df2c373e85871086bd55271c929670cd4e1dba63e94a08d442db830646203b"},
- {file = "debugpy-1.5.1-cp38-cp38-win32.whl", hash = "sha256:82f5f9ce93af6861a0713f804e62ab390bb12a17f113153e47fea8bbb1dfbe36"},
- {file = "debugpy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:17a25ce9d7714f92fc97ef00cc06269d7c2b163094990ada30156ed31d9a5030"},
- {file = "debugpy-1.5.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:01e98c594b3e66d529e40edf314f849cd1a21f7a013298df58cd8e263bf8e184"},
- {file = "debugpy-1.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f73988422b17f071ad3c4383551ace1ba5ed810cbab5f9c362783d22d40a08dc"},
- {file = "debugpy-1.5.1-cp39-cp39-win32.whl", hash = "sha256:23df67fc56d59e386c342428a7953c2c06cc226d8525b11319153e96afb65b0c"},
- {file = "debugpy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:a2aa64f6d2ca7ded8a7e8a4e7cae3bc71866b09876b7b05cecad231779cb9156"},
- {file = "debugpy-1.5.1-py2.py3-none-any.whl", hash = "sha256:194f95dd3e84568b5489aab5689a3a2c044e8fdc06f1890b8b4f70b6b89f2778"},
- {file = "debugpy-1.5.1.zip", hash = "sha256:d2b09e91fbd1efa4f4fda121d49af89501beda50c18ed7499712c71a4bf3452e"},
-]
-decorator = [
- {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
- {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
-]
-defusedxml = [
- {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"},
- {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"},
-]
-entrypoints = [
- {file = "entrypoints-0.3-py2.py3-none-any.whl", hash = "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19"},
- {file = "entrypoints-0.3.tar.gz", hash = "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"},
-]
-envs = [
- {file = "envs-1.4-py3-none-any.whl", hash = "sha256:4a1fcf85e4d4443e77c348ff7cdd3bfc4c0178b181d447057de342e4172e5ed1"},
- {file = "envs-1.4.tar.gz", hash = "sha256:9d8435c6985d1cdd68299e04c58e2bdb8ae6cf66b2596a8079e6f9a93f2a0398"},
-]
-faunadb = [
- {file = "faunadb-4.1.1-py2.py3-none-any.whl", hash = "sha256:e197d356b783dfade08a1ffa7a4b32f2156c165c44d4e29b6605a97d38dfca02"},
-]
-future = [
- {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"},
-]
-graphql-py = [
- {file = "graphql-py-0.8.1.tar.gz", hash = "sha256:a03557d67817a0f23c5bef83ce8791c8fa3f5f9d165e1408abc6393def1d720c"},
-]
-h2 = [
- {file = "h2-2.6.2-py2.py3-none-any.whl", hash = "sha256:93cbd1013a2218539af05cdf9fc37b786655b93bbc94f5296b7dabd1c5cadf41"},
- {file = "h2-2.6.2.tar.gz", hash = "sha256:af35878673c83a44afbc12b13ac91a489da2819b5dc1e11768f3c2406f740fe9"},
-]
-hpack = [
- {file = "hpack-3.0.0-py2.py3-none-any.whl", hash = "sha256:0edd79eda27a53ba5be2dfabf3b15780928a0dff6eb0c60a3d6767720e970c89"},
- {file = "hpack-3.0.0.tar.gz", hash = "sha256:8eec9c1f4bfae3408a3f30500261f7e6a65912dc138526ea054f9ad98892e9d2"},
-]
-hyper = [
- {file = "hyper-0.7.0-py2.py3-none-any.whl", hash = "sha256:069514f54231fb7b5df2fb910a114663a83306d5296f588fffcb0a9be19407fc"},
- {file = "hyper-0.7.0.tar.gz", hash = "sha256:12c82eacd122a659673484c1ea0d34576430afbe5aa6b8f63fe37fcb06a2458c"},
-]
-hyperframe = [
- {file = "hyperframe-3.2.0-py2.py3-none-any.whl", hash = "sha256:4dcab11967482d400853b396d042038e4c492a15a5d2f57259e2b5f89a32f755"},
- {file = "hyperframe-3.2.0.tar.gz", hash = "sha256:05f0e063e117c16fcdd13c12c93a4424a2c40668abfac3bb419a10f57698204e"},
-]
-idna = [
- {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
- {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
-]
-importlib-metadata = [
- {file = "importlib_metadata-4.10.1-py3-none-any.whl", hash = "sha256:899e2a40a8c4a1aec681feef45733de8a6c58f3f6a0dbed2eb6574b4387a77b6"},
- {file = "importlib_metadata-4.10.1.tar.gz", hash = "sha256:951f0d8a5b7260e9db5e41d429285b5f451e928479f19d80818878527d36e95e"},
-]
-importlib-resources = [
- {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"},
- {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"},
-]
-ipykernel = [
- {file = "ipykernel-6.7.0-py3-none-any.whl", hash = "sha256:6203ccd5510ff148e9433fd4a2707c5ce8d688f026427f46e13d7ebf9b3e9787"},
- {file = "ipykernel-6.7.0.tar.gz", hash = "sha256:d82b904fdc2fd8c7b1fbe0fa481c68a11b4cd4c8ef07e6517da1f10cc3114d24"},
-]
-ipython = [
- {file = "ipython-7.31.1-py3-none-any.whl", hash = "sha256:55df3e0bd0f94e715abd968bedd89d4e8a7bce4bf498fb123fed4f5398fea874"},
- {file = "ipython-7.31.1.tar.gz", hash = "sha256:b5548ec5329a4bcf054a5deed5099b0f9622eb9ea51aaa7104d215fece201d8c"},
-]
-ipython-genutils = [
- {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"},
- {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"},
-]
-ipywidgets = [
- {file = "ipywidgets-7.6.5-py2.py3-none-any.whl", hash = "sha256:d258f582f915c62ea91023299603be095de19afb5ee271698f88327b9fe9bf43"},
- {file = "ipywidgets-7.6.5.tar.gz", hash = "sha256:00974f7cb4d5f8d494c19810fedb9fa9b64bffd3cda7c2be23c133a1ad3c99c5"},
-]
-iso8601 = [
- {file = "iso8601-1.0.2-py3-none-any.whl", hash = "sha256:d7bc01b1c2a43b259570bb307f057abc578786ea734ba2b87b836c5efc5bd443"},
- {file = "iso8601-1.0.2.tar.gz", hash = "sha256:27f503220e6845d9db954fb212b95b0362d8b7e6c1b2326a87061c3de93594b1"},
-]
-jedi = [
- {file = "jedi-0.18.1-py2.py3-none-any.whl", hash = "sha256:637c9635fcf47945ceb91cd7f320234a7be540ded6f3e99a50cb6febdfd1ba8d"},
- {file = "jedi-0.18.1.tar.gz", hash = "sha256:74137626a64a99c8eb6ae5832d99b3bdd7d29a3850fe2aa80a4126b2a7d949ab"},
-]
-jinja2 = [
- {file = "Jinja2-3.0.1-py3-none-any.whl", hash = "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4"},
- {file = "Jinja2-3.0.1.tar.gz", hash = "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4"},
-]
-jmespath = [
- {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"},
- {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"},
-]
-jsonschema = [
- {file = "jsonschema-4.4.0-py3-none-any.whl", hash = "sha256:77281a1f71684953ee8b3d488371b162419767973789272434bbc3f29d9c8823"},
- {file = "jsonschema-4.4.0.tar.gz", hash = "sha256:636694eb41b3535ed608fe04129f26542b59ed99808b4f688aa32dcf55317a83"},
-]
-jupyter = [
- {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"},
- {file = "jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"},
- {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"},
-]
-jupyter-client = [
- {file = "jupyter_client-7.1.2-py3-none-any.whl", hash = "sha256:d56f1c57bef42ff31e61b1185d3348a5b2bcde7c9a05523ae4dbe5ee0871797c"},
- {file = "jupyter_client-7.1.2.tar.gz", hash = "sha256:4ea61033726c8e579edb55626d8ee2e6bf0a83158ddf3751b8dd46b2c5cd1e96"},
-]
-jupyter-console = [
- {file = "jupyter_console-6.4.0-py3-none-any.whl", hash = "sha256:7799c4ea951e0e96ba8260575423cb323ea5a03fcf5503560fa3e15748869e27"},
- {file = "jupyter_console-6.4.0.tar.gz", hash = "sha256:242248e1685039cd8bff2c2ecb7ce6c1546eb50ee3b08519729e6e881aec19c7"},
-]
-jupyter-core = [
- {file = "jupyter_core-4.9.1-py3-none-any.whl", hash = "sha256:1c091f3bbefd6f2a8782f2c1db662ca8478ac240e962ae2c66f0b87c818154ea"},
- {file = "jupyter_core-4.9.1.tar.gz", hash = "sha256:dce8a7499da5a53ae3afd5a9f4b02e5df1d57250cf48f3ad79da23b4778cd6fa"},
-]
-jupyterlab-pygments = [
- {file = "jupyterlab_pygments-0.1.2-py2.py3-none-any.whl", hash = "sha256:abfb880fd1561987efaefcb2d2ac75145d2a5d0139b1876d5be806e32f630008"},
- {file = "jupyterlab_pygments-0.1.2.tar.gz", hash = "sha256:cfcda0873626150932f438eccf0f8bf22bfa92345b814890ab360d666b254146"},
-]
-jupyterlab-widgets = [
- {file = "jupyterlab_widgets-1.0.2-py3-none-any.whl", hash = "sha256:f5d9efface8ec62941173ba1cffb2edd0ecddc801c11ae2931e30b50492eb8f7"},
- {file = "jupyterlab_widgets-1.0.2.tar.gz", hash = "sha256:7885092b2b96bf189c3a705cc3c412a4472ec5e8382d0b47219a66cccae73cfa"},
-]
-markupsafe = [
- {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"},
- {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"},
- {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"},
- {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"},
- {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"},
- {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"},
- {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"},
- {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"},
- {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"},
- {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"},
- {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"},
- {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"},
- {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"},
- {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"},
- {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"},
- {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"},
- {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"},
- {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"},
- {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"},
- {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"},
- {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"},
- {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"},
- {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"},
- {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"},
- {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"},
- {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"},
- {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"},
- {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"},
- {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"},
- {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"},
- {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"},
- {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"},
- {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"},
- {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"},
- {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"},
- {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"},
- {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"},
- {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"},
- {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"},
- {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"},
- {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"},
- {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"},
- {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"},
- {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"},
- {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"},
- {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"},
- {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"},
- {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"},
- {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"},
- {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"},
- {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"},
- {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"},
- {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"},
- {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"},
- {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"},
- {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"},
- {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"},
- {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"},
- {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"},
- {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"},
- {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"},
- {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"},
- {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"},
- {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"},
- {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"},
- {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"},
- {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"},
- {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"},
- {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"},
-]
-matplotlib-inline = [
- {file = "matplotlib-inline-0.1.3.tar.gz", hash = "sha256:a04bfba22e0d1395479f866853ec1ee28eea1485c1d69a6faf00dc3e24ff34ee"},
- {file = "matplotlib_inline-0.1.3-py3-none-any.whl", hash = "sha256:aed605ba3b72462d64d475a21a9296f400a19c4f74a31b59103d2a99ffd5aa5c"},
-]
-mistune = [
- {file = "mistune-0.8.4-py2.py3-none-any.whl", hash = "sha256:88a1051873018da288eee8538d476dffe1262495144b33ecb586c4ab266bb8d4"},
- {file = "mistune-0.8.4.tar.gz", hash = "sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e"},
-]
-nbclient = [
- {file = "nbclient-0.5.10-py3-none-any.whl", hash = "sha256:5b582e21c8b464e6676a9d60acc6871d7fbc3b080f74bef265a9f90411b31f6f"},
- {file = "nbclient-0.5.10.tar.gz", hash = "sha256:b5fdea88d6fa52ca38de6c2361401cfe7aaa7cd24c74effc5e489cec04d79088"},
-]
-nbconvert = [
- {file = "nbconvert-6.4.1-py3-none-any.whl", hash = "sha256:fe93bc42485c54c5a49a2324c834aca1ff315f320a535bed3e3c4e085d3eebe3"},
- {file = "nbconvert-6.4.1.tar.gz", hash = "sha256:7dce3f977c2f9651841a3c49b5b7314c742f24dd118b99e51b8eec13c504f555"},
-]
-nbformat = [
- {file = "nbformat-5.1.3-py3-none-any.whl", hash = "sha256:eb8447edd7127d043361bc17f2f5a807626bc8e878c7709a1c647abda28a9171"},
- {file = "nbformat-5.1.3.tar.gz", hash = "sha256:b516788ad70771c6250977c1374fcca6edebe6126fd2adb5a69aa5c2356fd1c8"},
-]
-nest-asyncio = [
- {file = "nest_asyncio-1.5.4-py3-none-any.whl", hash = "sha256:3fdd0d6061a2bb16f21fe8a9c6a7945be83521d81a0d15cff52e9edee50101d6"},
- {file = "nest_asyncio-1.5.4.tar.gz", hash = "sha256:f969f6013a16fadb4adcf09d11a68a4f617c6049d7af7ac2c676110169a63abd"},
-]
-notebook = [
- {file = "notebook-6.4.10-py3-none-any.whl", hash = "sha256:49cead814bff0945fcb2ee07579259418672ac175d3dc3d8102a4b0a656ed4df"},
- {file = "notebook-6.4.10.tar.gz", hash = "sha256:2408a76bc6289283a8eecfca67e298ec83c67db51a4c2e1b713dd180bb39e90e"},
-]
-packaging = [
- {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
- {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
-]
-pandocfilters = [
- {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"},
- {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"},
-]
-parso = [
- {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"},
- {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"},
-]
-pdoc = [
- {file = "pdoc-7.4.0-py3-none-any.whl", hash = "sha256:681a2f243e4ca51bedd0645c2d18275b8b83444e9b6e42b502882ec45369e679"},
-]
-pexpect = [
- {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"},
- {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"},
-]
-pickleshare = [
- {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"},
- {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"},
-]
-ply = [
- {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"},
- {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"},
-]
-prometheus-client = [
- {file = "prometheus_client-0.13.1-py3-none-any.whl", hash = "sha256:357a447fd2359b0a1d2e9b311a0c5778c330cfbe186d880ad5a6b39884652316"},
- {file = "prometheus_client-0.13.1.tar.gz", hash = "sha256:ada41b891b79fca5638bd5cfe149efa86512eaa55987893becd2c6d8d0a5dfc5"},
-]
-prompt-toolkit = [
- {file = "prompt_toolkit-3.0.26-py3-none-any.whl", hash = "sha256:4bcf119be2200c17ed0d518872ef922f1de336eb6d1ddbd1e089ceb6447d97c6"},
- {file = "prompt_toolkit-3.0.26.tar.gz", hash = "sha256:a51d41a6a45fd9def54365bca8f0402c8f182f2b6f7e29c74d55faeb9fb38ac4"},
-]
-ptyprocess = [
- {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"},
- {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"},
-]
-py = [
- {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
- {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
-]
-pycparser = [
- {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
- {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
-]
-pygments = [
- {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"},
- {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"},
-]
-pyjwt = [
- {file = "PyJWT-2.3.0-py3-none-any.whl", hash = "sha256:e0c4bb8d9f0af0c7f5b1ec4c5036309617d03d56932877f2f7a0beeb5318322f"},
- {file = "PyJWT-2.3.0.tar.gz", hash = "sha256:b888b4d56f06f6dcd777210c334e69c737be74755d3e5e9ee3fe67dc18a0ee41"},
-]
-pyparsing = [
- {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"},
- {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"},
-]
-pyrsistent = [
- {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"},
- {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"},
- {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"},
- {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"},
- {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"},
- {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"},
- {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"},
- {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"},
- {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"},
- {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"},
- {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"},
- {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"},
- {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"},
- {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"},
- {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"},
- {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"},
- {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"},
- {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"},
- {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"},
- {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"},
- {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"},
-]
-python-dateutil = [
- {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
- {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
-]
-pytz = [
- {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"},
- {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"},
-]
-pywin32 = [
- {file = "pywin32-303-cp310-cp310-win32.whl", hash = "sha256:6fed4af057039f309263fd3285d7b8042d41507343cd5fa781d98fcc5b90e8bb"},
- {file = "pywin32-303-cp310-cp310-win_amd64.whl", hash = "sha256:51cb52c5ec6709f96c3f26e7795b0bf169ee0d8395b2c1d7eb2c029a5008ed51"},
- {file = "pywin32-303-cp311-cp311-win32.whl", hash = "sha256:d9b5d87ca944eb3aa4cd45516203ead4b37ab06b8b777c54aedc35975dec0dee"},
- {file = "pywin32-303-cp311-cp311-win_amd64.whl", hash = "sha256:fcf44032f5b14fcda86028cdf49b6ebdaea091230eb0a757282aa656e4732439"},
- {file = "pywin32-303-cp36-cp36m-win32.whl", hash = "sha256:aad484d52ec58008ca36bd4ad14a71d7dd0a99db1a4ca71072213f63bf49c7d9"},
- {file = "pywin32-303-cp36-cp36m-win_amd64.whl", hash = "sha256:2a09632916b6bb231ba49983fe989f2f625cea237219530e81a69239cd0c4559"},
- {file = "pywin32-303-cp37-cp37m-win32.whl", hash = "sha256:b1675d82bcf6dbc96363fca747bac8bff6f6e4a447a4287ac652aa4b9adc796e"},
- {file = "pywin32-303-cp37-cp37m-win_amd64.whl", hash = "sha256:c268040769b48a13367221fced6d4232ed52f044ffafeda247bd9d2c6bdc29ca"},
- {file = "pywin32-303-cp38-cp38-win32.whl", hash = "sha256:5f9ec054f5a46a0f4dfd72af2ce1372f3d5a6e4052af20b858aa7df2df7d355b"},
- {file = "pywin32-303-cp38-cp38-win_amd64.whl", hash = "sha256:793bf74fce164bcffd9d57bb13c2c15d56e43c9542a7b9687b4fccf8f8a41aba"},
- {file = "pywin32-303-cp39-cp39-win32.whl", hash = "sha256:7d3271c98434617a11921c5ccf74615794d97b079e22ed7773790822735cc352"},
- {file = "pywin32-303-cp39-cp39-win_amd64.whl", hash = "sha256:79cbb862c11b9af19bcb682891c1b91942ec2ff7de8151e2aea2e175899cda34"},
-]
-pywinpty = [
- {file = "pywinpty-2.0.1-cp310-none-win_amd64.whl", hash = "sha256:ec7d4841c82980519f31d2c61b7f93db4b773a66fce489a8a72377045fe04c4b"},
- {file = "pywinpty-2.0.1-cp37-none-win_amd64.whl", hash = "sha256:29550aafda86962b3b68e3454c11e26c1b8cf646dfafec33a4325c8d70ab4f36"},
- {file = "pywinpty-2.0.1-cp38-none-win_amd64.whl", hash = "sha256:dfdbcd0407c157c2024b0ea91b855caae25510fcf6c4da21c075253f05991a3a"},
- {file = "pywinpty-2.0.1-cp39-none-win_amd64.whl", hash = "sha256:c7cd0b30da5edd3e0b967842baa2aef1d205d991aa63a13c05afdb95d0812e69"},
- {file = "pywinpty-2.0.1.tar.gz", hash = "sha256:14e7321c6d43743af0de175fca9f111c5cc8d0a9f7c608c9e1cc69ec0d6ac146"},
-]
-pyyaml = [
- {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
- {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
- {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"},
- {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"},
- {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
- {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
- {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
- {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
- {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
- {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
- {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"},
- {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"},
- {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"},
- {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"},
- {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"},
- {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"},
- {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"},
- {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"},
- {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"},
- {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"},
- {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"},
- {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"},
- {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"},
- {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"},
- {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"},
- {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"},
- {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"},
- {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"},
- {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"},
- {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"},
- {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"},
- {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
- {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
-]
-pyzmq = [
- {file = "pyzmq-22.3.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:6b217b8f9dfb6628f74b94bdaf9f7408708cb02167d644edca33f38746ca12dd"},
- {file = "pyzmq-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2841997a0d85b998cbafecb4183caf51fd19c4357075dfd33eb7efea57e4c149"},
- {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f89468059ebc519a7acde1ee50b779019535db8dcf9b8c162ef669257fef7a93"},
- {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea12133df25e3a6918718fbb9a510c6ee5d3fdd5a346320421aac3882f4feeea"},
- {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c532fd68b93998aab92356be280deec5de8f8fe59cd28763d2cc8a58747b7f"},
- {file = "pyzmq-22.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f907c7359ce8bf7f7e63c82f75ad0223384105f5126f313400b7e8004d9b33c3"},
- {file = "pyzmq-22.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:902319cfe23366595d3fa769b5b751e6ee6750a0a64c5d9f757d624b2ac3519e"},
- {file = "pyzmq-22.3.0-cp310-cp310-win32.whl", hash = "sha256:67db33bea0a29d03e6eeec55a8190e033318cee3cbc732ba8fd939617cbf762d"},
- {file = "pyzmq-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:7661fc1d5cb73481cf710a1418a4e1e301ed7d5d924f91c67ba84b2a1b89defd"},
- {file = "pyzmq-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79244b9e97948eaf38695f4b8e6fc63b14b78cc37f403c6642ba555517ac1268"},
- {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab888624ed68930442a3f3b0b921ad7439c51ba122dbc8c386e6487a658e4a4e"},
- {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18cd854b423fce44951c3a4d3e686bac8f1243d954f579e120a1714096637cc0"},
- {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:de8df0684398bd74ad160afdc2a118ca28384ac6f5e234eb0508858d8d2d9364"},
- {file = "pyzmq-22.3.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:62bcade20813796c426409a3e7423862d50ff0639f5a2a95be4b85b09a618666"},
- {file = "pyzmq-22.3.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ea5a79e808baef98c48c884effce05c31a0698c1057de8fc1c688891043c1ce1"},
- {file = "pyzmq-22.3.0-cp36-cp36m-win32.whl", hash = "sha256:3c1895c95be92600233e476fe283f042e71cf8f0b938aabf21b7aafa62a8dac9"},
- {file = "pyzmq-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:851977788b9caa8ed011f5f643d3ee8653af02c5fc723fa350db5125abf2be7b"},
- {file = "pyzmq-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b4ebed0977f92320f6686c96e9e8dd29eed199eb8d066936bac991afc37cbb70"},
- {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42abddebe2c6a35180ca549fadc7228d23c1e1f76167c5ebc8a936b5804ea2df"},
- {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1e41b32d6f7f9c26bc731a8b529ff592f31fc8b6ef2be9fa74abd05c8a342d7"},
- {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:be4e0f229cf3a71f9ecd633566bd6f80d9fa6afaaff5489492be63fe459ef98c"},
- {file = "pyzmq-22.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08c4e315a76ef26eb833511ebf3fa87d182152adf43dedee8d79f998a2162a0b"},
- {file = "pyzmq-22.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:badb868fff14cfd0e200eaa845887b1011146a7d26d579aaa7f966c203736b92"},
- {file = "pyzmq-22.3.0-cp37-cp37m-win32.whl", hash = "sha256:7c58f598d9fcc52772b89a92d72bf8829c12d09746a6d2c724c5b30076c1f11d"},
- {file = "pyzmq-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2b97502c16a5ec611cd52410bdfaab264997c627a46b0f98d3f666227fd1ea2d"},
- {file = "pyzmq-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d728b08448e5ac3e4d886b165385a262883c34b84a7fe1166277fe675e1c197a"},
- {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:480b9931bfb08bf8b094edd4836271d4d6b44150da051547d8c7113bf947a8b0"},
- {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7dc09198e4073e6015d9a8ea093fc348d4e59de49382476940c3dd9ae156fba8"},
- {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ca6cd58f62a2751728016d40082008d3b3412a7f28ddfb4a2f0d3c130f69e74"},
- {file = "pyzmq-22.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:468bd59a588e276961a918a3060948ae68f6ff5a7fa10bb2f9160c18fe341067"},
- {file = "pyzmq-22.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c88fa7410e9fc471e0858638f403739ee869924dd8e4ae26748496466e27ac59"},
- {file = "pyzmq-22.3.0-cp38-cp38-win32.whl", hash = "sha256:c0f84360dcca3481e8674393bdf931f9f10470988f87311b19d23cda869bb6b7"},
- {file = "pyzmq-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:f762442bab706fd874064ca218b33a1d8e40d4938e96c24dafd9b12e28017f45"},
- {file = "pyzmq-22.3.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:954e73c9cd4d6ae319f1c936ad159072b6d356a92dcbbabfd6e6204b9a79d356"},
- {file = "pyzmq-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f43b4a2e6218371dd4f41e547bd919ceeb6ebf4abf31a7a0669cd11cd91ea973"},
- {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:acebba1a23fb9d72b42471c3771b6f2f18dcd46df77482612054bd45c07dfa36"},
- {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cf98fd7a6c8aaa08dbc699ffae33fd71175696d78028281bc7b832b26f00ca57"},
- {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d072f7dfbdb184f0786d63bda26e8a0882041b1e393fbe98940395f7fab4c5e2"},
- {file = "pyzmq-22.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:53f4fd13976789ffafedd4d46f954c7bb01146121812b72b4ddca286034df966"},
- {file = "pyzmq-22.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1b5d457acbadcf8b27561deeaa386b0217f47626b29672fa7bd31deb6e91e1b"},
- {file = "pyzmq-22.3.0-cp39-cp39-win32.whl", hash = "sha256:e6a02cf7271ee94674a44f4e62aa061d2d049001c844657740e156596298b70b"},
- {file = "pyzmq-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d3dcb5548ead4f1123851a5ced467791f6986d68c656bc63bfff1bf9e36671e2"},
- {file = "pyzmq-22.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3a4c9886d61d386b2b493377d980f502186cd71d501fffdba52bd2a0880cef4f"},
- {file = "pyzmq-22.3.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:80e043a89c6cadefd3a0712f8a1322038e819ebe9dbac7eca3bce1721bcb63bf"},
- {file = "pyzmq-22.3.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1621e7a2af72cced1f6ec8ca8ca91d0f76ac236ab2e8828ac8fe909512d566cb"},
- {file = "pyzmq-22.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:d6157793719de168b199194f6b6173f0ccd3bf3499e6870fac17086072e39115"},
- {file = "pyzmq-22.3.0.tar.gz", hash = "sha256:8eddc033e716f8c91c6a2112f0a8ebc5e00532b4a6ae1eb0ccc48e027f9c671c"},
-]
-qtconsole = [
- {file = "qtconsole-5.2.2-py3-none-any.whl", hash = "sha256:4aa6a3e600e0c8cf16853f2378311bc2371f57cb0f22ecfc28994f4cf409ee2e"},
- {file = "qtconsole-5.2.2.tar.gz", hash = "sha256:8f9db97b27782184efd0a0f2d57ea3bd852d053747a2e442a9011329c082976d"},
-]
-qtpy = [
- {file = "QtPy-2.0.0-py3-none-any.whl", hash = "sha256:74bf26be3288aadc843cf3381d5ef0b82f11417ecdcbf26718a408f32590f1ac"},
- {file = "QtPy-2.0.0.tar.gz", hash = "sha256:777e333df4d711b2ec9743117ab319dadfbd743a5a0eee35923855ca3d35cd9d"},
-]
-requests = [
- {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"},
- {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"},
-]
-s3transfer = [
- {file = "s3transfer-0.5.0-py3-none-any.whl", hash = "sha256:9c1dc369814391a6bda20ebbf4b70a0f34630592c9aa520856bf384916af2803"},
- {file = "s3transfer-0.5.0.tar.gz", hash = "sha256:50ed823e1dc5868ad40c8dc92072f757aa0e653a192845c94a3b676f4a62da4c"},
-]
-sammy = [
- {file = "sammy-0.4.3-py2.py3-none-any.whl", hash = "sha256:f7fc12fae537ba22830a9de761686fac439e23c67339807f2f3d026d019bae9e"},
- {file = "sammy-0.4.3.tar.gz", hash = "sha256:dff98017d59c8860349cb07bd8af2eb843ccfc7a78d11ed82599b306ef4ce7ca"},
-]
-send2trash = [
- {file = "Send2Trash-1.8.0-py3-none-any.whl", hash = "sha256:f20eaadfdb517eaca5ce077640cb261c7d2698385a6a0f072a4a5447fd49fa08"},
- {file = "Send2Trash-1.8.0.tar.gz", hash = "sha256:d2c24762fd3759860a0aff155e45871447ea58d2be6bdd39b5c8f966a0c99c2d"},
-]
-six = [
- {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
- {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
-]
-stripe = [
- {file = "stripe-2.65.0-py2.py3-none-any.whl", hash = "sha256:16a8d1dfc0ba414b24612d31ede0f57ff260bccebd6dc18e17277cb24f58c6b7"},
- {file = "stripe-2.65.0.tar.gz", hash = "sha256:2e55d4d7262085de9cef2228f14581925c35350ba58a332352b1ec9e19a7b7a6"},
-]
-terminado = [
- {file = "terminado-0.13.1-py3-none-any.whl", hash = "sha256:f446b522b50a7aa68b5def0a02893978fb48cb82298b0ebdae13003c6ee6f198"},
- {file = "terminado-0.13.1.tar.gz", hash = "sha256:5b82b5c6e991f0705a76f961f43262a7fb1e55b093c16dca83f16384a7f39b7b"},
-]
-testpath = [
- {file = "testpath-0.5.0-py3-none-any.whl", hash = "sha256:8044f9a0bab6567fc644a3593164e872543bb44225b0e24846e2c89237937589"},
- {file = "testpath-0.5.0.tar.gz", hash = "sha256:1acf7a0bcd3004ae8357409fc33751e16d37ccc650921da1094a86581ad1e417"},
-]
-tornado = [
- {file = "tornado-6.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:d371e811d6b156d82aa5f9a4e08b58debf97c302a35714f6f45e35139c332e32"},
- {file = "tornado-6.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:0d321a39c36e5f2c4ff12b4ed58d41390460f798422c4504e09eb5678e09998c"},
- {file = "tornado-6.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9de9e5188a782be6b1ce866e8a51bc76a0fbaa0e16613823fc38e4fc2556ad05"},
- {file = "tornado-6.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:61b32d06ae8a036a6607805e6720ef00a3c98207038444ba7fd3d169cd998910"},
- {file = "tornado-6.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:3e63498f680547ed24d2c71e6497f24bca791aca2fe116dbc2bd0ac7f191691b"},
- {file = "tornado-6.1-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:6c77c9937962577a6a76917845d06af6ab9197702a42e1346d8ae2e76b5e3675"},
- {file = "tornado-6.1-cp35-cp35m-win32.whl", hash = "sha256:6286efab1ed6e74b7028327365cf7346b1d777d63ab30e21a0f4d5b275fc17d5"},
- {file = "tornado-6.1-cp35-cp35m-win_amd64.whl", hash = "sha256:fa2ba70284fa42c2a5ecb35e322e68823288a4251f9ba9cc77be04ae15eada68"},
- {file = "tornado-6.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a00ff4561e2929a2c37ce706cb8233b7907e0cdc22eab98888aca5dd3775feb"},
- {file = "tornado-6.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:748290bf9112b581c525e6e6d3820621ff020ed95af6f17fedef416b27ed564c"},
- {file = "tornado-6.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e385b637ac3acaae8022e7e47dfa7b83d3620e432e3ecb9a3f7f58f150e50921"},
- {file = "tornado-6.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:25ad220258349a12ae87ede08a7b04aca51237721f63b1808d39bdb4b2164558"},
- {file = "tornado-6.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:65d98939f1a2e74b58839f8c4dab3b6b3c1ce84972ae712be02845e65391ac7c"},
- {file = "tornado-6.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:e519d64089b0876c7b467274468709dadf11e41d65f63bba207e04217f47c085"},
- {file = "tornado-6.1-cp36-cp36m-win32.whl", hash = "sha256:b87936fd2c317b6ee08a5741ea06b9d11a6074ef4cc42e031bc6403f82a32575"},
- {file = "tornado-6.1-cp36-cp36m-win_amd64.whl", hash = "sha256:cc0ee35043162abbf717b7df924597ade8e5395e7b66d18270116f8745ceb795"},
- {file = "tornado-6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7250a3fa399f08ec9cb3f7b1b987955d17e044f1ade821b32e5f435130250d7f"},
- {file = "tornado-6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ed3ad863b1b40cd1d4bd21e7498329ccaece75db5a5bf58cd3c9f130843e7102"},
- {file = "tornado-6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:dcef026f608f678c118779cd6591c8af6e9b4155c44e0d1bc0c87c036fb8c8c4"},
- {file = "tornado-6.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:70dec29e8ac485dbf57481baee40781c63e381bebea080991893cd297742b8fd"},
- {file = "tornado-6.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:d3f7594930c423fd9f5d1a76bee85a2c36fd8b4b16921cae7e965f22575e9c01"},
- {file = "tornado-6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3447475585bae2e77ecb832fc0300c3695516a47d46cefa0528181a34c5b9d3d"},
- {file = "tornado-6.1-cp37-cp37m-win32.whl", hash = "sha256:e7229e60ac41a1202444497ddde70a48d33909e484f96eb0da9baf8dc68541df"},
- {file = "tornado-6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:cb5ec8eead331e3bb4ce8066cf06d2dfef1bfb1b2a73082dfe8a161301b76e37"},
- {file = "tornado-6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:20241b3cb4f425e971cb0a8e4ffc9b0a861530ae3c52f2b0434e6c1b57e9fd95"},
- {file = "tornado-6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c77da1263aa361938476f04c4b6c8916001b90b2c2fdd92d8d535e1af48fba5a"},
- {file = "tornado-6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fba85b6cd9c39be262fcd23865652920832b61583de2a2ca907dbd8e8a8c81e5"},
- {file = "tornado-6.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:1e8225a1070cd8eec59a996c43229fe8f95689cb16e552d130b9793cb570a288"},
- {file = "tornado-6.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d14d30e7f46a0476efb0deb5b61343b1526f73ebb5ed84f23dc794bdb88f9d9f"},
- {file = "tornado-6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8f959b26f2634a091bb42241c3ed8d3cedb506e7c27b8dd5c7b9f745318ddbb6"},
- {file = "tornado-6.1-cp38-cp38-win32.whl", hash = "sha256:34ca2dac9e4d7afb0bed4677512e36a52f09caa6fded70b4e3e1c89dbd92c326"},
- {file = "tornado-6.1-cp38-cp38-win_amd64.whl", hash = "sha256:6196a5c39286cc37c024cd78834fb9345e464525d8991c21e908cc046d1cc02c"},
- {file = "tornado-6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0ba29bafd8e7e22920567ce0d232c26d4d47c8b5cf4ed7b562b5db39fa199c5"},
- {file = "tornado-6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:33892118b165401f291070100d6d09359ca74addda679b60390b09f8ef325ffe"},
- {file = "tornado-6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7da13da6f985aab7f6f28debab00c67ff9cbacd588e8477034c0652ac141feea"},
- {file = "tornado-6.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:e0791ac58d91ac58f694d8d2957884df8e4e2f6687cdf367ef7eb7497f79eaa2"},
- {file = "tornado-6.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:66324e4e1beede9ac79e60f88de548da58b1f8ab4b2f1354d8375774f997e6c0"},
- {file = "tornado-6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:a48900ecea1cbb71b8c71c620dee15b62f85f7c14189bdeee54966fbd9a0c5bd"},
- {file = "tornado-6.1-cp39-cp39-win32.whl", hash = "sha256:d3d20ea5782ba63ed13bc2b8c291a053c8d807a8fa927d941bd718468f7b950c"},
- {file = "tornado-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:548430be2740e327b3fe0201abe471f314741efcb0067ec4f2d7dcfb4825f3e4"},
- {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"},
-]
-traitlets = [
- {file = "traitlets-5.1.1-py3-none-any.whl", hash = "sha256:2d313cc50a42cd6c277e7d7dc8d4d7fedd06a2c215f78766ae7b1a66277e0033"},
- {file = "traitlets-5.1.1.tar.gz", hash = "sha256:059f456c5a7c1c82b98c2e8c799f39c9b8128f6d0d46941ee118daace9eb70c7"},
-]
-typing-extensions = [
- {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"},
- {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"},
-]
-urllib3 = [
- {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"},
- {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"},
-]
-valley = [
- {file = "valley-1.5.6-py3-none-any.whl", hash = "sha256:fa2e5fc51d59901e5eb178116a4fb15b712928b4c87809f59cdf02a934d63cf6"},
- {file = "valley-1.5.6.tar.gz", hash = "sha256:ec55f7df3512f0dfa23c9f253b414a02491dea41a62230ed459a43cf02fee9a3"},
-]
-wcwidth = [
- {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"},
- {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"},
-]
-webencodings = [
- {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"},
- {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"},
-]
-werkzeug = [
- {file = "Werkzeug-2.0.2-py3-none-any.whl", hash = "sha256:63d3dc1cf60e7b7e35e97fa9861f7397283b75d765afcaefd993d6046899de8f"},
- {file = "Werkzeug-2.0.2.tar.gz", hash = "sha256:aa2bb6fc8dee8d6c504c0ac1e7f5f7dc5810a9903e793b6f715a9f015bdadb9a"},
-]
-widgetsnbextension = [
- {file = "widgetsnbextension-3.5.2-py2.py3-none-any.whl", hash = "sha256:763a9fdc836d141fa080005a886d63f66f73d56dba1fb5961afc239c77708569"},
- {file = "widgetsnbextension-3.5.2.tar.gz", hash = "sha256:e0731a60ba540cd19bbbefe771a9076dcd2dde90713a8f87f27f53f2d1db7727"},
-]
-zipp = [
- {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"},
- {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"},
-]
+anyio = []
+appnope = []
+argon2-cffi = []
+argon2-cffi-bindings = []
+arrow = []
+asttokens = []
+astunparse = []
+attrs = []
+backcall = []
+beautifulsoup4 = []
+bleach = []
+boto3 = []
+botocore = []
+cachetools = []
+certifi = []
+cffi = []
+charset-normalizer = []
+click = []
+colorama = []
+comm = []
+coverage = []
+cryptography = []
+debugpy = []
+decorator = []
+defusedxml = []
+envs = []
+executing = []
+fastjsonschema = []
+faunadb = []
+formy = []
+fqdn = []
+future = []
+graphql-py = []
+h11 = []
+h2 = []
+hpack = []
+httpcore = []
+httpx = []
+hyperframe = []
+idna = []
+importlib-metadata = []
+importlib-resources = []
+ipykernel = []
+ipython = []
+ipython-genutils = []
+ipywidgets = []
+iso8601 = []
+isoduration = []
+jedi = []
+jinja2 = []
+jmespath = []
+jsonpointer = []
+jsonschema = []
+jupyter = []
+jupyter-client = []
+jupyter-console = []
+jupyter-core = []
+jupyter-events = []
+jupyter-server = []
+jupyter-server-terminals = []
+jupyterlab-pygments = []
+jupyterlab-widgets = []
+markupsafe = []
+matplotlib-inline = []
+mistune = []
+nbclassic = []
+nbclient = []
+nbconvert = []
+nbformat = []
+nest-asyncio = []
+notebook = []
+notebook-shim = []
+openapi-schema-validator = []
+openapi-spec-validator = []
+packaging = []
+pandocfilters = []
+parso = []
+pdoc = []
+pexpect = []
+pickleshare = []
+pkgutil-resolve-name = []
+platformdirs = []
+ply = []
+prometheus-client = []
+prompt-toolkit = []
+psutil = []
+ptyprocess = []
+pure-eval = []
+pycparser = []
+pygments = []
+pyjwt = []
+pyrsistent = []
+python-dateutil = []
+python-json-logger = []
+pytz = []
+pywin32 = []
+pywinpty = []
+pyyaml = []
+pyzmq = []
+qtconsole = []
+qtpy = []
+requests = []
+rfc3339-validator = []
+rfc3986 = []
+rfc3986-validator = []
+s3transfer = []
+sammy = []
+send2trash = []
+six = []
+sniffio = []
+soupsieve = []
+stack-data = []
+stripe = []
+swaggyp = []
+terminado = []
+tinycss2 = []
+tornado = []
+traitlets = []
+typing-extensions = []
+uri-template = []
+urllib3 = []
+valley = []
+wcwidth = []
+webcolors = []
+webencodings = []
+websocket-client = []
+werkzeug = []
+widgetsnbextension = []
+zipp = []
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index dcab6a2..4594977 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,22 +1,18 @@
[tool.poetry]
name = "pfunk"
-version = "0.5.9"
+version = "0.5.10"
description = "A Python library created make building FaunaDB GraphQL schemas and authentication code easier."
authors = ["Brian Jinwright"]
license = "Apache-2.0"
[tool.poetry.dependencies]
-python = "^3.7"
+python = "^3.8"
faunadb = "^4.0.1"
-valley = "^1.5.6"
-jinja2 = "3.0.1"
-envs = "^1.3"
+valley = "1.5.8"
requests = "^2.23.0"
pytz = "^2021.1"
decorator = "^5.0.9"
-Werkzeug = "^2.0.1"
-pyjwt = "^2.1.0"
-pip = "^21.2.4"
+pip = "^23.0.1"
cryptography = "^3.4.7"
cachetools = "^4.2.2"
click = "^8.0.1"
@@ -25,6 +21,12 @@ graphql-py = "^0.8.1"
sammy = "^0.4.3"
stripe = "^2.61.0"
bleach = "^4.1.0"
+openapi-spec-validator = "^0.4.0"
+swaggyp = "^0.3.0"
+formy = "1.3.1"
+Jinja2 = "^3.1.2"
+Werkzeug = "2.1.2"
+PyJWT = "^2.6.0"
[tool.poetry.dev-dependencies]
jupyter = "^1.0.0"
@@ -35,4 +37,4 @@ pdoc = "^7.2.0"
requires = ["poetry>=0.12"]
[tool.poetry.scripts]
-pfunk = 'pfunk.cli:pfunk'
+pfunk = 'pfunk.cli:pfunk'
\ No newline at end of file