From 0f346b201a9e049bfe5e08199afb315ac7459b31 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 17 Jan 2022 15:04:39 +0800 Subject: [PATCH 001/214] Completed CRUD test of StripePackage --- pfunk/tests/test_web_stripe.py | 66 ++++++++++++++++++++-------------- 1 file changed, 39 insertions(+), 27 deletions(-) diff --git a/pfunk/tests/test_web_stripe.py b/pfunk/tests/test_web_stripe.py index e399bb3..e151ea8 100644 --- a/pfunk/tests/test_web_stripe.py +++ b/pfunk/tests/test_web_stripe.py @@ -16,14 +16,15 @@ def setUp(self) -> None: self.user = User.create(username='test', email='tlasso@example.org', first_name='Ted', last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.group]) - self.stripe_pkg = StripePackage.create( - stripe_id='100', price='10', description='unit testing...', name='unit test package') + self.stripe_pkg = StripePackage.create(group=self.group, + stripe_id='100', price='10', description='unit testing...', name='unit test package') # self.stripe_customer = StripeCustomer.create(user=self.user, customer_id='100', package=self.stripe_pkg) self.token, self.exp = User.api_login("test", "abc123") self.app = self.project.wsgi_app self.c = Client(self.app) - # self.user.add_permissions(self.group, [PermissionGroup(StripePackage, ['create', 'read', 'write', 'delete'])]) + self.user.add_permissions(self.group, [PermissionGroup( + StripePackage, ['create', 'read', 'write', 'delete'])]) def test_list_package(self): res = self.c.get('/stripepackage/list/', headers={ @@ -43,44 +44,55 @@ def test_get_package(self): res.json['data']['data']['name'], self.stripe_pkg.name) - # TODO: Fix `forbidden` error in stripe views def test_create_package(self): + self.assertNotIn("new stripe pkg", [ + pkg.name for pkg in StripePackage.all()]) res = self.c.post('/stripepackage/create/', json={ 'stripe_id': '123', - 'name': 'stripe_pkg', + 'name': 'new stripe pkg', 'price': 10.10, - 'description': 'a test package' + 'description': 'a test package', + 'group': self.group.ref.id() }, headers={ - "Authorization": self.token, - "Content-Type": "application/json" + "Authorization": self.token }) - print(res.json) + self.assertTrue(res.json['success']) + self.assertIn("new stripe pkg", [ + pkg.name for pkg in StripePackage.all()]) - # TODO: Fix `forbidden` error in stripe views def test_update_package(self): + self.assertNotIn("updated pkg", [ + pkg.name for pkg in StripePackage.all()]) + updated_name = 'updated pkg' res = self.c.put(f'/stripepackage/update/{self.stripe_pkg.ref.id()}/', - json={ - 'stripe_id': '123', - 'name': 'stripe_pkg', - 'price': 10.10, - 'description': 'a test package' - }, - headers={ - "Authorization": self.token, - "Content-Type": "application/json" - }) + json={ + 'stripe_id': '123', + 'name': updated_name, + 'price': 10.10, + 'description': 'a test package' + }, + headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) - print(res.json) + self.assertTrue(res.json['success']) + self.assertEqual( + res.json['data']['data']['name'], + updated_name) - # TODO: Fix `forbidden` error in stripe views def test_delete_package(self): res = self.c.delete(f'/stripepackage/delete/{self.stripe_pkg.ref.id()}/', - headers={ - "Authorization": self.token, - "Content-Type": "application/json" - }) + headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) - print(res.json) \ No newline at end of file + self.assertTrue(res.json['success']) + self.assertNotIn( + self.stripe_pkg.ref.id(), + [pkg.ref.id() for pkg in StripePackage.all()] + ) From 5a6d5de277d829ae4c13a70cfd4df63e048893e2 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 19 Jan 2022 17:01:12 +0800 Subject: [PATCH 002/214] Finished stripe unittests --- pfunk/contrib/ecommerce/collections.py | 13 +++-- pfunk/tests/test_web_stripe.py | 79 +++++++++++++++++++++++++- 2 files changed, 85 insertions(+), 7 deletions(-) diff --git a/pfunk/contrib/ecommerce/collections.py b/pfunk/contrib/ecommerce/collections.py index 569d58e..b44f4a4 100644 --- a/pfunk/contrib/ecommerce/collections.py +++ b/pfunk/contrib/ecommerce/collections.py @@ -22,7 +22,10 @@ class StripePackage(Collection): fields and functions to match your system. Read and detail views are naturally public. Write operations - requires authentication from admin group. + requires authentication from admin group. While it grealty + depends on your app, it is recommended to have this only + modified by the admins and use `StripeCustomer` model to + attach a `stripe_id` to a model that is bound for payment. """ use_crud_views = False collection_roles = [GenericGroupBasedRole] @@ -47,11 +50,13 @@ class StripeCustomer(Collection): This is only a base model made to give an idea how can you structure your collections. Override the fields and functions to match your system. + + Should be used as the """ - collection_roles = [GenericUserBasedRole] user = ReferenceField(User) - customer_id = StringField(required=True) - package = ReferenceField(StripePackage) + collection_roles = [GenericUserBasedRole] + stripe_id = StringField(required=True, unique=True) + description = StringField() def __unicode__(self): return self.customer_id diff --git a/pfunk/tests/test_web_stripe.py b/pfunk/tests/test_web_stripe.py index e151ea8..8a3aac3 100644 --- a/pfunk/tests/test_web_stripe.py +++ b/pfunk/tests/test_web_stripe.py @@ -7,8 +7,7 @@ class TestWebStripe(APITestCase): - # TODO: Add `StripeCustomer` - collections = [User, Group, StripePackage] + collections = [User, Group, StripePackage, StripeCustomer] def setUp(self) -> None: super(TestWebStripe, self).setUp() @@ -18,7 +17,8 @@ def setUp(self) -> None: groups=[self.group]) self.stripe_pkg = StripePackage.create(group=self.group, stripe_id='100', price='10', description='unit testing...', name='unit test package') - # self.stripe_customer = StripeCustomer.create(user=self.user, customer_id='100', package=self.stripe_pkg) + self.stripe_cus = StripeCustomer.create( + user=self.user, stripe_id='100', description="information") self.token, self.exp = User.api_login("test", "abc123") self.app = self.project.wsgi_app @@ -96,3 +96,76 @@ def test_delete_package(self): self.stripe_pkg.ref.id(), [pkg.ref.id() for pkg in StripePackage.all()] ) + + def test_create_customer(self): + new_description = 'created description' + self.assertNotIn(new_description, [ + cus.description for cus in StripeCustomer.all()]) + res = self.c.post(f'/stripecustomer/create/', + json={ + "user": self.user.ref.id(), + "stripe_id": 201, + "description": new_description + }, + headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) + + self.assertTrue(res.json['success']) + self.assertIn(new_description, [ + cus.description for cus in StripeCustomer.all()]) + + def test_list_customers(self): + res = self.c.get('/stripecustomer/list/', headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) + + self.assertTrue(res.json['success']) + self.assertEqual( + res.json['data']['data'][0]['data']['description'], + 'information') + + def test_get_customer(self): + res = self.c.get(f'/stripecustomer/detail/{self.stripe_cus.ref.id()}/', headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) + + self.assertTrue(res.json['success']) + self.assertEqual( + res.json['data']['data']['description'], + 'information') + + def test_update_customer(self): + updated_description = 'an updated description' + self.assertNotIn(updated_description, [ + cus.description for cus in StripeCustomer.all()]) + res = self.c.put(f'/stripecustomer/update/{self.stripe_cus.ref.id()}/', + json={ + "description": updated_description + }, + headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) + + self.assertTrue(res.json['success']) + self.assertEqual( + res.json['data']['data']['description'], + updated_description) + + def test_delete_customer(self): + updated_description = 'an updated description' + res = self.c.delete(f'/stripecustomer/delete/{self.stripe_cus.ref.id()}/', + headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) + + self.assertTrue(res.json['success']) + self.assertNotIn( + self.stripe_cus.ref.id(), + [cus.ref.id() for cus in StripeCustomer.all()] + ) From 593f526566a32b2723931158a0d333ea14170294 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 20 Jan 2022 16:56:10 +0800 Subject: [PATCH 003/214] added partial stripe webhook views --- pfunk/contrib/ecommerce/views.py | 5 ++- pfunk/tests/test_web_stripe.py | 70 +++++++++++++++++++++++++++++++- 2 files changed, 72 insertions(+), 3 deletions(-) diff --git a/pfunk/contrib/ecommerce/views.py b/pfunk/contrib/ecommerce/views.py index e48813e..6015644 100644 --- a/pfunk/contrib/ecommerce/views.py +++ b/pfunk/contrib/ecommerce/views.py @@ -15,6 +15,7 @@ from pfunk.contrib.auth.collections import Group, User from pfunk.web.views.base import ActionMixin + stripe.api_key = env('STRIPE_API_KEY') STRIPE_PUBLISHABLE_KEY = env('STRIPE_PUBLISHABLE_KEY') STRIPE_WEBHOOK_SECRET = env('STRIPE_WEBHOOK_SECRET') @@ -110,7 +111,7 @@ def event_action(self): if isinstance(action, collections.Callable): action() return {'success': 'ok'} - raise super().not_found_class() + raise NotImplementedError def post(self, request, *args, **kwargs): self.request = request @@ -137,7 +138,7 @@ def check_ip(self): except (KeyError, JSONDecodeError): return True try: - return self.request.META['REMOTE_ADDR'] in valid_ips + return self.request.source_ip in valid_ips except KeyError: return False diff --git a/pfunk/tests/test_web_stripe.py b/pfunk/tests/test_web_stripe.py index 8a3aac3..943adef 100644 --- a/pfunk/tests/test_web_stripe.py +++ b/pfunk/tests/test_web_stripe.py @@ -1,10 +1,13 @@ from werkzeug.test import Client +from types import SimpleNamespace +from unittest import mock from pfunk.tests import User, Group from pfunk.contrib.auth.collections import PermissionGroup from pfunk.contrib.ecommerce.collections import StripePackage, StripeCustomer from pfunk.testcase import APITestCase - +from pfunk.contrib.ecommerce.views import BaseWebhookView +from pfunk.web.request import HTTPRequest class TestWebStripe(APITestCase): collections = [User, Group, StripePackage, StripeCustomer] @@ -169,3 +172,68 @@ def test_delete_customer(self): self.stripe_cus.ref.id(), [cus.ref.id() for cus in StripeCustomer.all()] ) + + +class TestStripeWebhook(APITestCase): + collections = [User, Group] + + def setUp(self) -> None: + super(TestStripeWebhook, self).setUp() + self.group = Group.create(name='Power Users', slug='power-users') + self.user = User.create(username='test', email='tlasso@example.org', first_name='Ted', + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.group]) + self.token, self.exp = User.api_login("test", "abc123") + self.app = self.project.wsgi_app + self.view = BaseWebhookView() + stripe_req_body = { + "id": "evt_1CiPtv2eZvKYlo2CcUZsDcO6", + "object": "event", + "api_version": "2018-05-21", + "created": 1530291411, + "data": { + "object": {} + }, + "livemode": False, + "pending_webhooks": 0, + "request": { + "id": None, + "idempotency_key": None + }, + "type": "source.chargeable" + } + event = { + 'body': stripe_req_body, + 'requestContext': { + 'web': { + 'method': 'post', + 'path': '/webhook', + 'source_ip': '192.168.1.30' + } + } + } + self.view.request = HTTPRequest(event=event) + self.c = Client(self.app) + + def test_event_action(self): + # event_dict = {'type': 'checkout.session.completed'} + with self.assertRaises(NotImplementedError): + self.view.event = SimpleNamespace(**self.view.request.body) + res = self.view.event_action() + + def test_check_ip(self): + res = self.view.check_ip() + self.assertFalse(res) + + @mock.patch('boto3.client') + def test_send_html_email(self, mocked): + pass + + def test_check_signing_secret(self): + pass + + def test_get_transfer_data(self): + pass + + def test_receive_post_req(self): + pass \ No newline at end of file From d28632ab5b3f76b9bc9ff476550cb49516b3f31c Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 21 Jan 2022 16:14:29 +0800 Subject: [PATCH 004/214] Finished refactoring stripe views and unittests --- pfunk/contrib/ecommerce/collections.py | 8 +-- pfunk/contrib/ecommerce/views.py | 85 +++++++++++++++----------- pfunk/tests/test_web_stripe.py | 75 +++++++++++++++++++---- 3 files changed, 115 insertions(+), 53 deletions(-) diff --git a/pfunk/contrib/ecommerce/collections.py b/pfunk/contrib/ecommerce/collections.py index b44f4a4..a66e5f2 100644 --- a/pfunk/contrib/ecommerce/collections.py +++ b/pfunk/contrib/ecommerce/collections.py @@ -7,7 +7,7 @@ from pfunk.fields import EmailField, SlugField, ManyToManyField, ListField, ReferenceField, StringField, EnumField, FloatField from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole, Public, UserRole from pfunk.contrib.ecommerce.resources import StripePublic -from pfunk.contrib.ecommerce.views import ListStripePackage, DetailStripePackage +from pfunk.contrib.ecommerce.views import BaseWebhookView, ListStripePackage, DetailStripePackage, CheckoutSuccessView from pfunk.web.views.json import CreateView, UpdateView, DeleteView @@ -29,7 +29,8 @@ class StripePackage(Collection): """ use_crud_views = False collection_roles = [GenericGroupBasedRole] - collection_views = [ListStripePackage, DetailStripePackage, CreateView, UpdateView, DeleteView] + collection_views = [ListStripePackage, DetailStripePackage, + CheckoutSuccessView, CreateView, UpdateView, DeleteView] stripe_id = StringField(required=True) name = StringField(required=True) price = FloatField(required=True) @@ -50,13 +51,12 @@ class StripeCustomer(Collection): This is only a base model made to give an idea how can you structure your collections. Override the fields and functions to match your system. - - Should be used as the """ user = ReferenceField(User) collection_roles = [GenericUserBasedRole] stripe_id = StringField(required=True, unique=True) description = StringField() + collection_views = [BaseWebhookView] def __unicode__(self): return self.customer_id diff --git a/pfunk/contrib/ecommerce/views.py b/pfunk/contrib/ecommerce/views.py index 6015644..a9ab3c6 100644 --- a/pfunk/contrib/ecommerce/views.py +++ b/pfunk/contrib/ecommerce/views.py @@ -6,6 +6,7 @@ from envs import env from datetime import datetime from json import JSONDecodeError +from werkzeug.routing import Rule from jinja2 import Environment, BaseLoader from pfunk.contrib.email import ses @@ -72,30 +73,51 @@ def get_context_data(self, **kwargs): return context -class CheckoutSuccessView(DetailView): +class CheckoutSuccessView(DetailView, ActionMixin): """ Defines action from the result of `CheckoutView` """ + action = 'checkout-success' + http_method_names = ['get'] - def get_object(self, queryset=None): + @classmethod + def url(cls, collection): + return Rule(f'/{collection.get_class_name()}/{cls.action}//', endpoint=cls.as_view(collection), + methods=cls.http_methods) + + def get_query(self, *args, **kwargs): """ Acquires the object from the `SessionView` """ - try: - session_id = self.request.GET['session_id'] - except KeyError: - raise DocNotFound + session_id = self.request.kwargs.get('id') self.stripe_session = stripe.checkout.Session.retrieve(session_id) - return self.model.objects.get(stripe_id=self.stripe_session.client_reference_id) - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - context['stripe_session'] = self.stripe_session - return context + # NOTE: Chose listing instead of indexing under the assumption of limited paid packages. Override if needed + pkg = [pkg for pkg in self.collection.all() if pkg.stripe_id == + self.stripe_session.client_reference_id] + if pkg: + return pkg + raise DocNotFound -class BaseWebhookView(JSONView, ActionMixin): +class BaseWebhookView(CreateView, ActionMixin): """ Base class to use for executing Stripe webhook actions """ + login_required = False action = 'webhook' http_method_names = ['post'] webhook_signing_secret = STRIPE_WEBHOOK_SECRET + def get_query(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + self.event = self.check_signing_secret() + try: + self.event_json = json.loads(self.request.body) + except TypeError: + self.event_json = self.request.body + + try: + self.object = self.event.data.object + except AttributeError: + self.object = None + + return self.event_action() + def event_action(self): """ Transforms Stripe action to snake case for easier calling in child class @@ -107,26 +129,13 @@ def event_action(self): that """ event_type = self.event.type.replace('.', '_') - action = getattr(self, event_type, None) - if isinstance(action, collections.Callable): - action() - return {'success': 'ok'} + if event_type is str: + action = getattr(self, event_type, None) + if isinstance(action, collections.Callable): + action() + return {'success': 'ok'} raise NotImplementedError - def post(self, request, *args, **kwargs): - self.request = request - self.args = args - self.kwargs = kwargs - self.event = self.check_signing_secret() - self.event_json = json.loads(self.request.body) - - try: - self.object = self.event.data.object - except AttributeError: - self.object = None - - return self.event_action() - def check_ip(self): """ Makes sure the request is coming from a known Stripe address. @@ -151,8 +160,7 @@ def send_html_email(self, subject, from_email: str, to_email_list: list, templat DEFAULT_FROM_EMAIL (str): default `from` email """ if not context: - context = {'object': self.object, - 'request_body': self.request.body} + context = {'request_body': self.request.body} if template_name: rtemplate = Environment( loader=BaseLoader()).from_string(template_name) @@ -173,13 +181,15 @@ def send_html_email(self, subject, from_email: str, to_email_list: list, templat def check_signing_secret(self): """ - Make sure the request's Stripe signature to make sure it matches our signing secret. - :return: HttpResponse or Stripe Event Object + Make sure the request's Stripe signature to make sure it matches our signing secret + then returns the event + + :return: Stripe Event Object """ # If we are running tests we can't verify the signature but we need the event objects event = stripe.Webhook.construct_event( - self.request.body, self.request.META['HTTP_STRIPE_SIGNATURE'], self.webhook_signing_secret + self.request.body, self.request.headers['HTTP_STRIPE_SIGNATURE'], self.webhook_signing_secret ) return event @@ -188,8 +198,9 @@ def get_transfer_data(self): def checkout_session_completed(self): """ A method to override to implement custom actions - after successful Stripe checkout + after successful Stripe checkout. + This is a Stripe event. Use this method by subclassing this class in your custom claas """ diff --git a/pfunk/tests/test_web_stripe.py b/pfunk/tests/test_web_stripe.py index 943adef..923a4e1 100644 --- a/pfunk/tests/test_web_stripe.py +++ b/pfunk/tests/test_web_stripe.py @@ -1,3 +1,6 @@ +import json +from lib2to3.pytree import Base +import tempfile from werkzeug.test import Client from types import SimpleNamespace from unittest import mock @@ -9,11 +12,12 @@ from pfunk.contrib.ecommerce.views import BaseWebhookView from pfunk.web.request import HTTPRequest -class TestWebStripe(APITestCase): + +class TestWebStripeCrud(APITestCase): collections = [User, Group, StripePackage, StripeCustomer] def setUp(self) -> None: - super(TestWebStripe, self).setUp() + super(TestWebStripeCrud, self).setUp() self.group = Group.create(name='Power Users', slug='power-users') self.user = User.create(username='test', email='tlasso@example.org', first_name='Ted', last_name='Lasso', _credentials='abc123', account_status='ACTIVE', @@ -175,7 +179,7 @@ def test_delete_customer(self): class TestStripeWebhook(APITestCase): - collections = [User, Group] + collections = [User, Group, StripeCustomer] def setUp(self) -> None: super(TestStripeWebhook, self).setUp() @@ -186,7 +190,7 @@ def setUp(self) -> None: self.token, self.exp = User.api_login("test", "abc123") self.app = self.project.wsgi_app self.view = BaseWebhookView() - stripe_req_body = { + self.stripe_req_body = { "id": "evt_1CiPtv2eZvKYlo2CcUZsDcO6", "object": "event", "api_version": "2018-05-21", @@ -202,15 +206,17 @@ def setUp(self) -> None: }, "type": "source.chargeable" } + headers = {'HTTP_STRIPE_SIGNATURE': 'sig_112233'} event = { - 'body': stripe_req_body, + 'body': self.stripe_req_body, 'requestContext': { 'web': { 'method': 'post', 'path': '/webhook', 'source_ip': '192.168.1.30' } - } + }, + 'headers': headers } self.view.request = HTTPRequest(event=event) self.c = Client(self.app) @@ -227,13 +233,58 @@ def test_check_ip(self): @mock.patch('boto3.client') def test_send_html_email(self, mocked): - pass + # Requires to have `TEMPLATE_ROOT_DIR=/tmp` in your .env file + with tempfile.NamedTemporaryFile(prefix='/tmp/', suffix='.html') as tmp: + res = self.view.send_html_email( + subject='Test Subject', + from_email='unittesting@email.com', + to_email_list=['recipient@email.com'], + template_name=(tmp.name.split("/")[-1]) + ) + self.assertTrue(True) # if there are no exceptions, then it passed - def test_check_signing_secret(self): - pass + @mock.patch('stripe.Webhook') + def test_check_signing_secret(self, mocked): + res = self.view.check_signing_secret() + self.assertTrue(True) # if there are no exceptions, then it passed def test_get_transfer_data(self): - pass + self.view.event_json = self.view.request.body + res = self.view.get_transfer_data() + self.assertTrue(True) + + @mock.patch('stripe.Webhook') + def test_receive_post_req(self, mocked): + with self.assertRaises(NotImplementedError): + self.view.event = SimpleNamespace(**self.view.request.body) + res = self.c.post('/stripecustomer/webhook/', + json=self.stripe_req_body, + headers={ + 'HTTP_STRIPE_SIGNATURE': 'sig_1113' + }) - def test_receive_post_req(self): - pass \ No newline at end of file + +class TestStripeCheckoutView(APITestCase): + collections = [User, Group, StripePackage] + + def setUp(self) -> None: + super(TestStripeCheckoutView, self).setUp() + self.group = Group.create(name='Power Users', slug='power-users') + self.user = User.create(username='test', email='tlasso@example.org', first_name='Ted', + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.group]) + self.token, self.exp = User.api_login("test", "abc123") + self.stripe_pkg = StripePackage.create(group=self.group, + stripe_id='100', price='10', description='unit testing...', name='unit test package') + self.app = self.project.wsgi_app + self.c = Client(self.app) + + @mock.patch('stripe.checkout', spec=True) + def test_checkout_success_view(self, mocked): + session_id = 'session_123' + res = self.c.get(f'/stripepackage/checkout-success/{session_id}/', headers={ + 'Authorization': self.token, + 'Content-Type': 'application/json' + }) + self.assertTrue(True) + self.assertDictEqual({'success': False, 'data': 'Not Found'}, res.json) From 52fe075fbcf56cc1cfe8dc55e7f8aef60d2a83aa Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 27 Jan 2022 17:12:49 +0800 Subject: [PATCH 005/214] added partially working swagger generation --- pfunk/project.py | 39 +++++++++++++++++++++++++++++++++++- pfunk/tests/test_project.py | 40 +++++++++++++++++++------------------ poetry.lock | 29 ++++++++++++++++----------- pyproject.toml | 1 + 4 files changed, 77 insertions(+), 32 deletions(-) diff --git a/pfunk/project.py b/pfunk/project.py index 0a384a7..35cb549 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -1,10 +1,11 @@ +from http.client import responses import logging import requests from io import BytesIO from envs import env - +import swaggyp as sw from faunadb.client import FaunaClient from jinja2 import Template from valley.contrib import Schema @@ -254,3 +255,39 @@ def wsgi_app(self, environ, start_response): start_response(status_str, response.wsgi_headers) return [str.encode(response.body)] + + def generate_swagger(self): + """ Gebnerates swagger doc """ + + rules = [GraphQLView.url()] + for i in self.collections: + col = i() + rules.extend(col.urls) + _map = Map( + rules=rules, + strict_slashes=True + ) + + paths = [] + for route in _map.iter_rules(): + rule = route.rule + methods = route.methods + args = route.arguments + + # TODO: Figure out a way to acquire response in collection view + rsp = sw.Response(status_code=200,description='test') + + # TODO: figure out a way to acquire endpoint summary and description + op = sw.Operation( + http_method=list(methods)[0], + summary='Test', + description='test', + responses=[rsp]) + p = sw.Path(endpoint=rule,operations=[op]) + paths.append(p) + + # print(operations) + + info = sw.Info(title='PFunk',description='Test site',version='dev') + t = sw.SwaggerTemplate(host='pfunk',basePath='/',info=info,paths=paths,schemes=['https']) + print(t.to_yaml()) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index db859a9..7065d88 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -8,23 +8,25 @@ class ProjectTestCase(unittest.TestCase): def setUp(self) -> None: self.project = Project() - def test_add_resource(self): + # def test_add_resource(self): + # self.project.add_resource(Person) + # self.project.add_resource(Person) + # # Test that no duplicates are there + # self.assertEqual(self.project.collections, set([Person])) + + # def test_add_resources(self): + # self.project.add_resources([Person, Sport]) + # self.assertEqual(self.project.collections, set([Person, Sport])) + + # def test_render(self): + # self.project.add_resources([Person, Sport]) + # gql = self.project.render() + # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + # self.assertTrue('enum gender_pronouns' in gql) + # self.assertTrue('type Person' in gql) + # self.assertTrue('type Sport' in gql) + # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + + def test_swagger(self): self.project.add_resource(Person) - self.project.add_resource(Person) - # Test that no duplicates are there - self.assertEqual(self.project.collections, set([Person])) - - def test_add_resources(self): - self.project.add_resources([Person, Sport]) - self.assertEqual(self.project.collections, set([Person, Sport])) - - def test_render(self): - self.project.add_resources([Person, Sport]) - gql = self.project.render() - self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - self.assertTrue('enum gender_pronouns' in gql) - self.assertTrue('type Person' in gql) - self.assertTrue('type Sport' in gql) - self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) - - + self.project.generate_swagger() \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index e68dafc..1980f79 100644 --- a/poetry.lock +++ b/poetry.lock @@ -210,14 +210,6 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -[[package]] -name = "dist" -version = "1.0.3" -description = "Compute distance between two coordinates on the map" -category = "main" -optional = false -python-versions = "*" - [[package]] name = "entrypoints" version = "0.3" @@ -1003,6 +995,18 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.dependencies] requests = {version = ">=2.20", markers = "python_version >= \"3.0\""} +[[package]] +name = "swaggyp" +version = "0.1.0" +description = "Python library for generating Swagger templates based on valley " +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pyyaml = ">=3.12" +valley = ">=1.5.1" + [[package]] name = "terminado" version = "0.9.4" @@ -1137,7 +1141,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pyt [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "6e3ebdfd969218d53ecbba8977b3f96609d7b731832acf00481ea6d4db67021a" +content-hash = "1d1b15d220f03648b1e636035cc1eed50d9db5c0b1845ad008400a4f8b2614a4" [metadata.files] appnope = [ @@ -1345,9 +1349,6 @@ defusedxml = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, ] -dist = [ - {file = "dist-1.0.3.zip", hash = "sha256:076020c825c45f69f988749fe06b0dd87590f44224bb08706086011e0c097950"}, -] entrypoints = [ {file = "entrypoints-0.3-py2.py3-none-any.whl", hash = "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19"}, {file = "entrypoints-0.3.tar.gz", hash = "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"}, @@ -1735,6 +1736,10 @@ stripe = [ {file = "stripe-2.61.0-py2.py3-none-any.whl", hash = "sha256:5c6016362031a585d4f1138aff6e99c4c6d70b22b152b91fb6202d36bd8ac8c2"}, {file = "stripe-2.61.0.tar.gz", hash = "sha256:8131addd3512a22c4c539dda2d869a8f488e06f1b02d1f3a5f0f4848fc56184e"}, ] +swaggyp = [ + {file = "swaggyp-0.1.0-py2.py3-none-any.whl", hash = "sha256:4c95967632c22fc821e6c1ac69a2be2e37467629c436ccca7946ad073b445268"}, + {file = "swaggyp-0.1.0.tar.gz", hash = "sha256:f72fe0855a8ce4e968030ada1448d876e054bb7c8e444f1c16295b5943696f5f"}, +] terminado = [ {file = "terminado-0.9.4-py3-none-any.whl", hash = "sha256:daed77f9fad7b32558fa84b226a76f45a02242c20813502f36c4e1ade6d8f1ad"}, {file = "terminado-0.9.4.tar.gz", hash = "sha256:9a7dbcfbc2778830eeb70261bf7aa9d98a3eac8631a3afe3febeb57c12f798be"}, diff --git a/pyproject.toml b/pyproject.toml index bf4ff76..2908f1e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ graphql-py = "^0.8.1" sammy = "^0.4.3" stripe = "^2.61.0" bleach = "^4.1.0" +swaggyp = "^0.1.0" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From 17dcc3a01cbcaf21d209c1d507dc0b8ad9bb8c50 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 31 Jan 2022 13:30:11 +0800 Subject: [PATCH 006/214] Removed description field in StripeCustomer model. Fixed unittests --- pfunk/contrib/ecommerce/collections.py | 1 - pfunk/tests/test_web_stripe.py | 36 ++++++++++++-------------- 2 files changed, 17 insertions(+), 20 deletions(-) diff --git a/pfunk/contrib/ecommerce/collections.py b/pfunk/contrib/ecommerce/collections.py index a66e5f2..a507904 100644 --- a/pfunk/contrib/ecommerce/collections.py +++ b/pfunk/contrib/ecommerce/collections.py @@ -55,7 +55,6 @@ class StripeCustomer(Collection): user = ReferenceField(User) collection_roles = [GenericUserBasedRole] stripe_id = StringField(required=True, unique=True) - description = StringField() collection_views = [BaseWebhookView] def __unicode__(self): diff --git a/pfunk/tests/test_web_stripe.py b/pfunk/tests/test_web_stripe.py index 923a4e1..5d41a66 100644 --- a/pfunk/tests/test_web_stripe.py +++ b/pfunk/tests/test_web_stripe.py @@ -25,7 +25,7 @@ def setUp(self) -> None: self.stripe_pkg = StripePackage.create(group=self.group, stripe_id='100', price='10', description='unit testing...', name='unit test package') self.stripe_cus = StripeCustomer.create( - user=self.user, stripe_id='100', description="information") + user=self.user, stripe_id='100') self.token, self.exp = User.api_login("test", "abc123") self.app = self.project.wsgi_app @@ -105,14 +105,13 @@ def test_delete_package(self): ) def test_create_customer(self): - new_description = 'created description' - self.assertNotIn(new_description, [ - cus.description for cus in StripeCustomer.all()]) + stripe_id = '201' + self.assertNotIn(stripe_id, [ + cus.stripe_id for cus in StripeCustomer.all()]) res = self.c.post(f'/stripecustomer/create/', json={ "user": self.user.ref.id(), - "stripe_id": 201, - "description": new_description + "stripe_id": stripe_id }, headers={ "Authorization": self.token, @@ -120,8 +119,8 @@ def test_create_customer(self): }) self.assertTrue(res.json['success']) - self.assertIn(new_description, [ - cus.description for cus in StripeCustomer.all()]) + self.assertIn(stripe_id, [ + cus.stripe_id for cus in StripeCustomer.all()]) def test_list_customers(self): res = self.c.get('/stripecustomer/list/', headers={ @@ -131,8 +130,8 @@ def test_list_customers(self): self.assertTrue(res.json['success']) self.assertEqual( - res.json['data']['data'][0]['data']['description'], - 'information') + res.json['data']['data'][0]['data']['stripe_id'], + '100') def test_get_customer(self): res = self.c.get(f'/stripecustomer/detail/{self.stripe_cus.ref.id()}/', headers={ @@ -142,16 +141,16 @@ def test_get_customer(self): self.assertTrue(res.json['success']) self.assertEqual( - res.json['data']['data']['description'], - 'information') + res.json['data']['data']['stripe_id'], + '100') def test_update_customer(self): - updated_description = 'an updated description' - self.assertNotIn(updated_description, [ - cus.description for cus in StripeCustomer.all()]) + updated_stripe_id = '101' + self.assertNotIn(updated_stripe_id, [ + cus.stripe_id for cus in StripeCustomer.all()]) res = self.c.put(f'/stripecustomer/update/{self.stripe_cus.ref.id()}/', json={ - "description": updated_description + "stripe_id": updated_stripe_id }, headers={ "Authorization": self.token, @@ -160,11 +159,10 @@ def test_update_customer(self): self.assertTrue(res.json['success']) self.assertEqual( - res.json['data']['data']['description'], - updated_description) + res.json['data']['data']['stripe_id'], + updated_stripe_id) def test_delete_customer(self): - updated_description = 'an updated description' res = self.c.delete(f'/stripecustomer/delete/{self.stripe_cus.ref.id()}/', headers={ "Authorization": self.token, From 0ff6fa0677dfb881931bf5a868b5c51052ad2cca Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 27 Jan 2022 17:12:49 +0800 Subject: [PATCH 007/214] added partially working swagger generation --- pfunk/project.py | 39 +++++++++++++++++++++++++++++++++++- pfunk/tests/test_project.py | 40 +++++++++++++++++++------------------ poetry.lock | 29 ++++++++++++++++----------- pyproject.toml | 1 + 4 files changed, 77 insertions(+), 32 deletions(-) diff --git a/pfunk/project.py b/pfunk/project.py index 0a384a7..35cb549 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -1,10 +1,11 @@ +from http.client import responses import logging import requests from io import BytesIO from envs import env - +import swaggyp as sw from faunadb.client import FaunaClient from jinja2 import Template from valley.contrib import Schema @@ -254,3 +255,39 @@ def wsgi_app(self, environ, start_response): start_response(status_str, response.wsgi_headers) return [str.encode(response.body)] + + def generate_swagger(self): + """ Gebnerates swagger doc """ + + rules = [GraphQLView.url()] + for i in self.collections: + col = i() + rules.extend(col.urls) + _map = Map( + rules=rules, + strict_slashes=True + ) + + paths = [] + for route in _map.iter_rules(): + rule = route.rule + methods = route.methods + args = route.arguments + + # TODO: Figure out a way to acquire response in collection view + rsp = sw.Response(status_code=200,description='test') + + # TODO: figure out a way to acquire endpoint summary and description + op = sw.Operation( + http_method=list(methods)[0], + summary='Test', + description='test', + responses=[rsp]) + p = sw.Path(endpoint=rule,operations=[op]) + paths.append(p) + + # print(operations) + + info = sw.Info(title='PFunk',description='Test site',version='dev') + t = sw.SwaggerTemplate(host='pfunk',basePath='/',info=info,paths=paths,schemes=['https']) + print(t.to_yaml()) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index db859a9..7065d88 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -8,23 +8,25 @@ class ProjectTestCase(unittest.TestCase): def setUp(self) -> None: self.project = Project() - def test_add_resource(self): + # def test_add_resource(self): + # self.project.add_resource(Person) + # self.project.add_resource(Person) + # # Test that no duplicates are there + # self.assertEqual(self.project.collections, set([Person])) + + # def test_add_resources(self): + # self.project.add_resources([Person, Sport]) + # self.assertEqual(self.project.collections, set([Person, Sport])) + + # def test_render(self): + # self.project.add_resources([Person, Sport]) + # gql = self.project.render() + # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + # self.assertTrue('enum gender_pronouns' in gql) + # self.assertTrue('type Person' in gql) + # self.assertTrue('type Sport' in gql) + # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + + def test_swagger(self): self.project.add_resource(Person) - self.project.add_resource(Person) - # Test that no duplicates are there - self.assertEqual(self.project.collections, set([Person])) - - def test_add_resources(self): - self.project.add_resources([Person, Sport]) - self.assertEqual(self.project.collections, set([Person, Sport])) - - def test_render(self): - self.project.add_resources([Person, Sport]) - gql = self.project.render() - self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - self.assertTrue('enum gender_pronouns' in gql) - self.assertTrue('type Person' in gql) - self.assertTrue('type Sport' in gql) - self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) - - + self.project.generate_swagger() \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index e68dafc..1980f79 100644 --- a/poetry.lock +++ b/poetry.lock @@ -210,14 +210,6 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -[[package]] -name = "dist" -version = "1.0.3" -description = "Compute distance between two coordinates on the map" -category = "main" -optional = false -python-versions = "*" - [[package]] name = "entrypoints" version = "0.3" @@ -1003,6 +995,18 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.dependencies] requests = {version = ">=2.20", markers = "python_version >= \"3.0\""} +[[package]] +name = "swaggyp" +version = "0.1.0" +description = "Python library for generating Swagger templates based on valley " +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pyyaml = ">=3.12" +valley = ">=1.5.1" + [[package]] name = "terminado" version = "0.9.4" @@ -1137,7 +1141,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pyt [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "6e3ebdfd969218d53ecbba8977b3f96609d7b731832acf00481ea6d4db67021a" +content-hash = "1d1b15d220f03648b1e636035cc1eed50d9db5c0b1845ad008400a4f8b2614a4" [metadata.files] appnope = [ @@ -1345,9 +1349,6 @@ defusedxml = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, ] -dist = [ - {file = "dist-1.0.3.zip", hash = "sha256:076020c825c45f69f988749fe06b0dd87590f44224bb08706086011e0c097950"}, -] entrypoints = [ {file = "entrypoints-0.3-py2.py3-none-any.whl", hash = "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19"}, {file = "entrypoints-0.3.tar.gz", hash = "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"}, @@ -1735,6 +1736,10 @@ stripe = [ {file = "stripe-2.61.0-py2.py3-none-any.whl", hash = "sha256:5c6016362031a585d4f1138aff6e99c4c6d70b22b152b91fb6202d36bd8ac8c2"}, {file = "stripe-2.61.0.tar.gz", hash = "sha256:8131addd3512a22c4c539dda2d869a8f488e06f1b02d1f3a5f0f4848fc56184e"}, ] +swaggyp = [ + {file = "swaggyp-0.1.0-py2.py3-none-any.whl", hash = "sha256:4c95967632c22fc821e6c1ac69a2be2e37467629c436ccca7946ad073b445268"}, + {file = "swaggyp-0.1.0.tar.gz", hash = "sha256:f72fe0855a8ce4e968030ada1448d876e054bb7c8e444f1c16295b5943696f5f"}, +] terminado = [ {file = "terminado-0.9.4-py3-none-any.whl", hash = "sha256:daed77f9fad7b32558fa84b226a76f45a02242c20813502f36c4e1ade6d8f1ad"}, {file = "terminado-0.9.4.tar.gz", hash = "sha256:9a7dbcfbc2778830eeb70261bf7aa9d98a3eac8631a3afe3febeb57c12f798be"}, diff --git a/pyproject.toml b/pyproject.toml index bf4ff76..2908f1e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ graphql-py = "^0.8.1" sammy = "^0.4.3" stripe = "^2.61.0" bleach = "^4.1.0" +swaggyp = "^0.1.0" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From b464eca146ea7f349e634674a43a77c39ed3cb25 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 2 Feb 2022 16:14:54 +0800 Subject: [PATCH 008/214] Reworked the acquisition of views in generate_swagger to acquire needed details in one loop --- pfunk/project.py | 55 +++++++++++++++++++++++++----------------------- 1 file changed, 29 insertions(+), 26 deletions(-) diff --git a/pfunk/project.py b/pfunk/project.py index 35cb549..1e7edc4 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -257,37 +257,40 @@ def wsgi_app(self, environ, start_response): return [str.encode(response.body)] def generate_swagger(self): - """ Gebnerates swagger doc """ - + """ Generates swagger doc """ + + paths = [] rules = [GraphQLView.url()] for i in self.collections: col = i() rules.extend(col.urls) - _map = Map( - rules=rules, - strict_slashes=True - ) - paths = [] - for route in _map.iter_rules(): - rule = route.rule - methods = route.methods - args = route.arguments - - # TODO: Figure out a way to acquire response in collection view - rsp = sw.Response(status_code=200,description='test') - - # TODO: figure out a way to acquire endpoint summary and description - op = sw.Operation( + for view in col.collection_views: + route = view.url(col) + rule = route.rule + methods = route.methods + args = route.arguments + + # TODO: Figure out a way to acquire response in collection view. + # NOTE: I thought of using the view's `get_query' function and do a mocked call to return a matching response + rsp = sw.Response(status_code=200, description='test') + + op = sw.Operation( http_method=list(methods)[0], - summary='Test', - description='test', + summary=f'({list(methods)[0]}) -> {col.__class__.__name__}', + description=view.__doc__, responses=[rsp]) - p = sw.Path(endpoint=rule,operations=[op]) - paths.append(p) - - # print(operations) - - info = sw.Info(title='PFunk',description='Test site',version='dev') - t = sw.SwaggerTemplate(host='pfunk',basePath='/',info=info,paths=paths,schemes=['https']) + p = sw.Path(endpoint=rule, operations=[op]) + paths.append(p) + + info = sw.Info( + title='PFunk', + description='Test site', + version='dev') + t = sw.SwaggerTemplate( + host='PFunk', + basePath='/', + info=info, + paths=paths, + schemes=['https']) print(t.to_yaml()) From f1fd103bfecb32c80c02b56bbdb58a024d9d6a8a Mon Sep 17 00:00:00 2001 From: Brian Jinwright Date: Sun, 6 Feb 2022 21:37:09 -0500 Subject: [PATCH 009/214] updated to version 0.5.10 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index dcab6a2..98bcbde 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "pfunk" -version = "0.5.9" +version = "0.5.10" description = "A Python library created make building FaunaDB GraphQL schemas and authentication code easier." authors = ["Brian Jinwright"] license = "Apache-2.0" From e315ddfdc87e9f38a631ea017c00f66a3555b90b Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 7 Feb 2022 16:05:55 +0800 Subject: [PATCH 010/214] Added response handling in generate_swagger doc method --- pfunk/project.py | 42 ++++++++++++++++++++++++++++++++++++----- pfunk/web/views/base.py | 2 +- pfunk/web/views/json.py | 3 +++ 3 files changed, 41 insertions(+), 6 deletions(-) diff --git a/pfunk/project.py b/pfunk/project.py index 1e7edc4..c5fb153 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -257,7 +257,24 @@ def wsgi_app(self, environ, start_response): return [str.encode(response.body)] def generate_swagger(self): - """ Generates swagger doc """ + """ Generates swagger doc. Details are going to be acquired from the collections + + The acquisition of the information needed for docs are as follows: + Response: + Description (str): View's `get_query` docstrings + Status Code (int): + Acquired from `response_class` class variable of a view + Error status_codes are acquired too in class variables + Operation: + HTTP Methods (arr): Defined `http_methods` in a view. + Summary (str): ({http_method}) -> {collection_name} + Description (str): Docstring of the view + Path: + Endpoint (str): Path of the function. You can see it in `url` method of a view. + + Returns: + Generated YAML file + """ paths = [] rules = [GraphQLView.url()] @@ -271,15 +288,30 @@ def generate_swagger(self): methods = route.methods args = route.arguments - # TODO: Figure out a way to acquire response in collection view. - # NOTE: I thought of using the view's `get_query' function and do a mocked call to return a matching response - rsp = sw.Response(status_code=200, description='test') + rsp = sw.Response( + status_code=view.response_class.status_code, + description=view.get_query.__doc__) + not_found_rsp = sw.Response( + status_code=view.not_found_class.status_code, + description=view.not_found_class.default_payload) + bad_req_rsp = sw.Response( + status_code=view.bad_request_class.status_code, + description=view.bad_request_class.default_payload) + method_not_allowed_rsp = sw.Response( + status_code=view.method_not_allowed_class.status_code, + description=view.method_not_allowed_class.default_payload) + unauthorized_rsp = sw.Response( + status_code=view.unauthorized_class.status_code, + description=view.unauthorized_class.default_payload) + forbidden_rsp = sw.Response( + status_code=view.forbidden_class.status_code, + description=view.forbidden_class.default_payload) op = sw.Operation( http_method=list(methods)[0], summary=f'({list(methods)[0]}) -> {col.__class__.__name__}', description=view.__doc__, - responses=[rsp]) + responses=[rsp, not_found_rsp, bad_req_rsp, method_not_allowed_rsp, unauthorized_rsp, forbidden_rsp]) p = sw.Path(endpoint=rule, operations=[op]) paths.append(p) diff --git a/pfunk/web/views/base.py b/pfunk/web/views/base.py index 7963f7b..59a397e 100644 --- a/pfunk/web/views/base.py +++ b/pfunk/web/views/base.py @@ -332,7 +332,7 @@ class ObjectMixin(object): """ Generic GET mixin for a Fauna object. """ def get_query(self): - """ Acuires """ + """ Acuires the entity in a collection using by ID """ return self.collection.get(self.request.kwargs.get('id'), **self.get_query_kwargs()) def get_query_kwargs(self): diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 1459001..cfbe739 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -33,6 +33,7 @@ class CreateView(UpdateMixin, ActionMixin, JSONView): login_required = True def get_query(self): + """ Entity created in a collection """ obj = self.collection.create(**self.get_query_kwargs(), _token=self.request.token) return obj @@ -68,6 +69,7 @@ class UpdateView(UpdateMixin, IDMixin, JSONView): login_required = True def get_query(self): + """ Entity in collection updated by an ID """ obj = self.collection.get(self.request.kwargs.get('id'), _token=self.request.token) obj._data.update(self.get_query_kwargs()) obj.save() @@ -88,6 +90,7 @@ class DeleteView(ObjectMixin, IDMixin, JSONView): login_required = True def get_query(self): + """ Deleted an entity in the specified collection """ return self.collection.delete_from_id(self.request.kwargs.get('id'), _token=self.request.token) From 8a0913cf62b02493a175d74b7b9a9c1dcd634efe Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 8 Feb 2022 15:58:40 +0800 Subject: [PATCH 011/214] Optimized code. Fixed API methods not valid in swagger yaml. --- pfunk/project.py | 58 ++++++++++++++++++++++++++---------------------- 1 file changed, 32 insertions(+), 26 deletions(-) diff --git a/pfunk/project.py b/pfunk/project.py index c5fb153..33eb213 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -288,32 +288,38 @@ def generate_swagger(self): methods = route.methods args = route.arguments - rsp = sw.Response( - status_code=view.response_class.status_code, - description=view.get_query.__doc__) - not_found_rsp = sw.Response( - status_code=view.not_found_class.status_code, - description=view.not_found_class.default_payload) - bad_req_rsp = sw.Response( - status_code=view.bad_request_class.status_code, - description=view.bad_request_class.default_payload) - method_not_allowed_rsp = sw.Response( - status_code=view.method_not_allowed_class.status_code, - description=view.method_not_allowed_class.default_payload) - unauthorized_rsp = sw.Response( - status_code=view.unauthorized_class.status_code, - description=view.unauthorized_class.default_payload) - forbidden_rsp = sw.Response( - status_code=view.forbidden_class.status_code, - description=view.forbidden_class.default_payload) - - op = sw.Operation( - http_method=list(methods)[0], - summary=f'({list(methods)[0]}) -> {col.__class__.__name__}', - description=view.__doc__, - responses=[rsp, not_found_rsp, bad_req_rsp, method_not_allowed_rsp, unauthorized_rsp, forbidden_rsp]) - p = sw.Path(endpoint=rule, operations=[op]) - paths.append(p) + responses = [] + response_classes = [ + 'response_class', + 'not_found_class', + 'bad_request_class', + 'method_not_allowed_class', + 'unauthorized_class', + 'forbidden_class' + ] + for rsp_cls in response_classes: + if rsp_cls == 'response_class': + responses.append( + sw.Response( + status_code=view.response_class.status_code, + description=view.get_query.__doc__) + ) + else: + responses.append( + sw.Response( + status_code=getattr(view, rsp_cls).status_code, + description=getattr(view, rsp_cls).default_payload) + ) + + view_methods = list(methods) + for method in view_methods: + op = sw.Operation( + http_method=method.lower(), + summary=f'({method}) -> {col.__class__.__name__}', + description=view.__doc__, + responses=responses) + p = sw.Path(endpoint=rule, operations=[op]) + paths.append(p) info = sw.Info( title='PFunk', From 75455dc6a73a1c70f3fe081312369ffa3ba257bf Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 11 Feb 2022 16:50:59 +0800 Subject: [PATCH 012/214] Added acquisition of config info from json config file --- pfunk/project.py | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/pfunk/project.py b/pfunk/project.py index 33eb213..5d9199c 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -1,5 +1,7 @@ from http.client import responses import logging +import os +import json import requests from io import BytesIO @@ -275,6 +277,17 @@ def generate_swagger(self): Returns: Generated YAML file """ + if not os.path.exists(f'pfunk.json'): + raise Exception('Missing JSON Config file.') + else: + with open(f'pfunk.json', 'r') as f: + data = json.loads(f.read()) + proj_title = data.get('name') + proj_desc = data.get('description', 'A Pfunk project') + proj_ver = data.get('ver', '1.0') + host = data.get('host', 'pfunk.com') + basePath = data.get('basePath', '/') + schemes = ['https'] paths = [] rules = [GraphQLView.url()] @@ -322,13 +335,13 @@ def generate_swagger(self): paths.append(p) info = sw.Info( - title='PFunk', - description='Test site', - version='dev') + title=proj_title, + description=proj_desc, + version=proj_ver) t = sw.SwaggerTemplate( - host='PFunk', - basePath='/', + host=host, + basePath=basePath, info=info, paths=paths, - schemes=['https']) + schemes=schemes) print(t.to_yaml()) From de8cae646f4fb4dc80d9fd2ecf086f94012e4f95 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 11 Feb 2022 16:55:37 +0800 Subject: [PATCH 013/214] Added generation of yaml file. Configured unittests for project --- pfunk/project.py | 3 +++ pfunk/tests/test_project.py | 3 ++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/pfunk/project.py b/pfunk/project.py index 5d9199c..9fb2b55 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -344,4 +344,7 @@ def generate_swagger(self): info=info, paths=paths, schemes=schemes) + + with open(f'swagger.yaml', 'x') as swag_doc: + swag_doc.write(t.to_yaml()) print(t.to_yaml()) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index 7065d88..80b90af 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -29,4 +29,5 @@ def setUp(self) -> None: def test_swagger(self): self.project.add_resource(Person) - self.project.generate_swagger() \ No newline at end of file + self.project.generate_swagger() + self.assertTrue(True) # if there are no exceptions, then it passed \ No newline at end of file From 79f9514599e26d86e5a00e2dd5f21e5da3e4bb87 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 15 Feb 2022 15:10:11 +0800 Subject: [PATCH 014/214] Added additional CLI init args. Added checking of existing swagger yaml file. --- pfunk/cli.py | 6 +++++- pfunk/project.py | 13 ++++++++++--- 2 files changed, 15 insertions(+), 4 deletions(-) diff --git a/pfunk/cli.py b/pfunk/cli.py index 40036ca..209b49c 100644 --- a/pfunk/cli.py +++ b/pfunk/cli.py @@ -27,9 +27,11 @@ def load_config_file(filename): @click.option('--email', prompt=True, help='Default From Email') @click.option('--bucket', prompt=True, help='S3 Bucket') @click.option('--fauna_key', prompt=True, help='Fauna Key') +@click.option('--host', prompt=True, help='Host') +@click.option('--description', prompt=True, help='Project Description') @click.option('--api_type', type=click.Choice(['web', 'rest', 'none']), prompt=True, help='API Type (web, rest, none)') @click.argument('name') -def init(name: str, api_type: str, fauna_key: str, bucket: str, email: str, stage_name: str): +def init(name: str, api_type: str, fauna_key: str, bucket: str, email: str, stage_name: str, description: str, host: str): """ Creates a PFunk project Args: @@ -50,6 +52,8 @@ def init(name: str, api_type: str, fauna_key: str, bucket: str, email: str, stag json.dump({ 'name': name, 'api_type': api_type, + 'description': description, + 'host': host, 'stages': {stage_name: { 'key_module': f'{name}.{stage_name}_keys.KEYS', 'fauna_secret': fauna_key, diff --git a/pfunk/project.py b/pfunk/project.py index 9fb2b55..dbd7018 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -326,6 +326,9 @@ def generate_swagger(self): view_methods = list(methods) for method in view_methods: + if method == 'HEAD': + # Skip HEAD operations + continue op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', @@ -345,6 +348,10 @@ def generate_swagger(self): paths=paths, schemes=schemes) - with open(f'swagger.yaml', 'x') as swag_doc: - swag_doc.write(t.to_yaml()) - print(t.to_yaml()) + if not os.path.exists(f'swagger.yaml'): + with open(f'swagger.yaml', 'x') as swag_doc: + swag_doc.write(t.to_yaml()) + else: + print('There is an existing swagger file. Kindly move/delete it to generate a new one. Printing instead...') + print(t.to_yaml()) + return None From eced2e70b5184e1cdccdf1f82d2eb04b4b519329 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 15 Feb 2022 15:13:24 +0800 Subject: [PATCH 015/214] removed commented out debugs --- pfunk/tests/test_project.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index 80b90af..61e71ee 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -8,24 +8,24 @@ class ProjectTestCase(unittest.TestCase): def setUp(self) -> None: self.project = Project() - # def test_add_resource(self): - # self.project.add_resource(Person) - # self.project.add_resource(Person) - # # Test that no duplicates are there - # self.assertEqual(self.project.collections, set([Person])) + def test_add_resource(self): + self.project.add_resource(Person) + self.project.add_resource(Person) + # Test that no duplicates are there + self.assertEqual(self.project.collections, set([Person])) - # def test_add_resources(self): - # self.project.add_resources([Person, Sport]) - # self.assertEqual(self.project.collections, set([Person, Sport])) + def test_add_resources(self): + self.project.add_resources([Person, Sport]) + self.assertEqual(self.project.collections, set([Person, Sport])) - # def test_render(self): - # self.project.add_resources([Person, Sport]) - # gql = self.project.render() - # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - # self.assertTrue('enum gender_pronouns' in gql) - # self.assertTrue('type Person' in gql) - # self.assertTrue('type Sport' in gql) - # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + def test_render(self): + self.project.add_resources([Person, Sport]) + gql = self.project.render() + self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + self.assertTrue('enum gender_pronouns' in gql) + self.assertTrue('type Person' in gql) + self.assertTrue('type Sport' in gql) + self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): self.project.add_resource(Person) From 7a1d6099b7f2bb19c657959bcadd0d71ba810846 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 24 Feb 2022 12:13:11 +0800 Subject: [PATCH 016/214] Added a method to acquire a Rule's arguments even if it is defined as a string --- pfunk/project.py | 24 +++++++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/pfunk/project.py b/pfunk/project.py index dbd7018..e175a1d 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -17,7 +17,7 @@ from werkzeug import Request as WerkzeugRequest from werkzeug.exceptions import NotFound, MethodNotAllowed from werkzeug.http import HTTP_STATUS_CODES -from werkzeug.routing import Map +from werkzeug.routing import Map, parse_rule from werkzeug.utils import cached_property from pfunk.web.request import HTTPRequest, RESTRequest, WSGIRequest @@ -300,6 +300,14 @@ def generate_swagger(self): rule = route.rule methods = route.methods args = route.arguments + + if args is None: + # if `defaults` weren't used in URL building, use the argument defined in the URL string + for converter, arguments, variable in parse_rule(rule): + if variable.startswith('/') or converter is None: + continue + args = variable + arg_type = converter responses = [] response_classes = [ @@ -329,11 +337,21 @@ def generate_swagger(self): if method == 'HEAD': # Skip HEAD operations continue + + # BUG: `Parameter` class can't be found on swaggyp module + params = sw.Parameter( + name=arg_type, + _in='path', + description='', + required=False, + allowEmptyValue=False + ) op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', description=view.__doc__, - responses=responses) + responses=responses, + parameters=[params]) p = sw.Path(endpoint=rule, operations=[op]) paths.append(p) @@ -353,5 +371,5 @@ def generate_swagger(self): swag_doc.write(t.to_yaml()) else: print('There is an existing swagger file. Kindly move/delete it to generate a new one. Printing instead...') - print(t.to_yaml()) + # print(t.to_yaml()) return None From a0f4d133f6ea1952b387c1f609a9e4c8fcfcd390 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 2 Mar 2022 17:25:48 +0800 Subject: [PATCH 017/214] Added iteration through every collection to create definition for swagger documentation --- pfunk/project.py | 43 +++++++++++++++++++++++++------------ pfunk/tests/test_project.py | 32 +++++++++++++-------------- 2 files changed, 45 insertions(+), 30 deletions(-) diff --git a/pfunk/project.py b/pfunk/project.py index e175a1d..d04de7c 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -2,16 +2,13 @@ import logging import os import json - import requests from io import BytesIO - from envs import env import swaggyp as sw from faunadb.client import FaunaClient from jinja2 import Template from valley.contrib import Schema - from valley.properties import CharProperty, ForeignProperty from valley.utils import import_util from werkzeug import Request as WerkzeugRequest @@ -290,6 +287,7 @@ def generate_swagger(self): schemes = ['https'] paths = [] + definitions = [] rules = [GraphQLView.url()] for i in self.collections: col = i() @@ -338,23 +336,39 @@ def generate_swagger(self): # Skip HEAD operations continue - # BUG: `Parameter` class can't be found on swaggyp module - params = sw.Parameter( - name=arg_type, - _in='path', - description='', - required=False, - allowEmptyValue=False - ) + # # BUG: `Parameter` class can't be found on swaggyp module + # params = sw.Parameter( + # name=arg_type, + # _in='path', + # description='', + # required=False, + # allowEmptyValue=False + # ) + # op = sw.Operation( + # http_method=method.lower(), + # summary=f'({method}) -> {col.__class__.__name__}', + # description=view.__doc__, + # responses=responses, + # parameters=[params]) op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', description=view.__doc__, - responses=responses, - parameters=[params]) + responses=responses) p = sw.Path(endpoint=rule, operations=[op]) paths.append(p) + + # Define model definitions by iterating through collection's fields for its properties + col_properties = {} + for property, field_type in col._base_properties.items(): + print(property) + col_properties[property] = {"type": field_type.GRAPHQL_FIELD_TYPE.lower()} + model_schema = sw.SwagSchema(properties=col_properties) + model = sw.Definition(name=col.name, schema=model_schema) + definitions.append(model) + + info = sw.Info( title=proj_title, description=proj_desc, @@ -364,7 +378,8 @@ def generate_swagger(self): basePath=basePath, info=info, paths=paths, - schemes=schemes) + schemes=schemes, + definitions=definitions) if not os.path.exists(f'swagger.yaml'): with open(f'swagger.yaml', 'x') as swag_doc: diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index 61e71ee..80b90af 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -8,24 +8,24 @@ class ProjectTestCase(unittest.TestCase): def setUp(self) -> None: self.project = Project() - def test_add_resource(self): - self.project.add_resource(Person) - self.project.add_resource(Person) - # Test that no duplicates are there - self.assertEqual(self.project.collections, set([Person])) + # def test_add_resource(self): + # self.project.add_resource(Person) + # self.project.add_resource(Person) + # # Test that no duplicates are there + # self.assertEqual(self.project.collections, set([Person])) - def test_add_resources(self): - self.project.add_resources([Person, Sport]) - self.assertEqual(self.project.collections, set([Person, Sport])) + # def test_add_resources(self): + # self.project.add_resources([Person, Sport]) + # self.assertEqual(self.project.collections, set([Person, Sport])) - def test_render(self): - self.project.add_resources([Person, Sport]) - gql = self.project.render() - self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - self.assertTrue('enum gender_pronouns' in gql) - self.assertTrue('type Person' in gql) - self.assertTrue('type Sport' in gql) - self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + # def test_render(self): + # self.project.add_resources([Person, Sport]) + # gql = self.project.render() + # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + # self.assertTrue('enum gender_pronouns' in gql) + # self.assertTrue('type Person' in gql) + # self.assertTrue('type Sport' in gql) + # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): self.project.add_resource(Person) From 883026669d26f8d0ae86ef3f0e7b0dc77a507a7a Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 2 Mar 2022 17:31:36 +0800 Subject: [PATCH 018/214] Added GRAPHQL_TO_YAML_TYPES dict for easy conversion between the two --- pfunk/project.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/pfunk/project.py b/pfunk/project.py index d04de7c..3bb9056 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -42,6 +42,12 @@ ] } +GRAPHQL_TO_YAML_TYPES = { + "String": "string", + "Int": "integer", + "Float": "integer", + "Boolean": "boolean" +} class Project(Schema): """ @@ -362,8 +368,9 @@ def generate_swagger(self): # Define model definitions by iterating through collection's fields for its properties col_properties = {} for property, field_type in col._base_properties.items(): - print(property) - col_properties[property] = {"type": field_type.GRAPHQL_FIELD_TYPE.lower()} + # TODO: Figure out what to do on graphql type -> IDs (relations) + col_properties[property] = { + "type": GRAPHQL_TO_YAML_TYPES.get(field_type.GRAPHQL_FIELD_TYPE)} model_schema = sw.SwagSchema(properties=col_properties) model = sw.Definition(name=col.name, schema=model_schema) definitions.append(model) @@ -386,5 +393,5 @@ def generate_swagger(self): swag_doc.write(t.to_yaml()) else: print('There is an existing swagger file. Kindly move/delete it to generate a new one. Printing instead...') - # print(t.to_yaml()) + print(t.to_yaml()) return None From ca9cfd02375c20eb0660e783ade704866ff96a61 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 4 Mar 2022 14:05:36 +0800 Subject: [PATCH 019/214] Fixed usage of Parameter class not having the _type field --- pfunk/project.py | 40 ++++++++++++++++++++++++---------------- 1 file changed, 24 insertions(+), 16 deletions(-) diff --git a/pfunk/project.py b/pfunk/project.py index 3bb9056..65f812e 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -49,6 +49,14 @@ "Boolean": "boolean" } +WERKZEUG_URL_TO_YAML_TYPES = { + "int": "integer", + "string": "string", + "float": "integer", + "path": "string", + "uuid": "string" +} + class Project(Schema): """ Project configuration class. @@ -304,14 +312,18 @@ def generate_swagger(self): rule = route.rule methods = route.methods args = route.arguments + arg_type = None - if args is None: + if args is None or len(args) == 0: # if `defaults` weren't used in URL building, use the argument defined in the URL string for converter, arguments, variable in parse_rule(rule): if variable.startswith('/') or converter is None: continue args = variable arg_type = converter + print(f'PATH: {route}') + print(f'ARGS: {args}') + print(f'ARG_TYPE: {arg_type}\n') responses = [] response_classes = [ @@ -342,25 +354,21 @@ def generate_swagger(self): # Skip HEAD operations continue - # # BUG: `Parameter` class can't be found on swaggyp module - # params = sw.Parameter( - # name=arg_type, - # _in='path', - # description='', - # required=False, - # allowEmptyValue=False - # ) - # op = sw.Operation( - # http_method=method.lower(), - # summary=f'({method}) -> {col.__class__.__name__}', - # description=view.__doc__, - # responses=responses, - # parameters=[params]) + # BUG: `Parameter` class can't be found on swaggyp module + params = sw.Parameter( + name=args, + _type=WERKZEUG_URL_TO_YAML_TYPES[arg_type], + _in='path', + description='', + required=True, + allowEmptyValue=False + ) op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', description=view.__doc__, - responses=responses) + responses=responses, + parameters=[params]) p = sw.Path(endpoint=rule, operations=[op]) paths.append(p) From 3fb068082f2e8e0aadeb0bbca63ec19852516665 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 9 Mar 2022 18:30:21 +0800 Subject: [PATCH 020/214] Fixed Definition not return correct class name. Fixed _type in parameter not using safe-get --- pfunk/project.py | 42 ++++++++++++++++++++++++++---------------- poetry.lock | 13 ++++++++----- pyproject.toml | 2 +- 3 files changed, 35 insertions(+), 22 deletions(-) diff --git a/pfunk/project.py b/pfunk/project.py index 65f812e..dde6ea0 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -353,22 +353,31 @@ def generate_swagger(self): if method == 'HEAD': # Skip HEAD operations continue - + # BUG: `Parameter` class can't be found on swaggyp module - params = sw.Parameter( - name=args, - _type=WERKZEUG_URL_TO_YAML_TYPES[arg_type], - _in='path', - description='', - required=True, - allowEmptyValue=False - ) - op = sw.Operation( - http_method=method.lower(), - summary=f'({method}) -> {col.__class__.__name__}', - description=view.__doc__, - responses=responses, - parameters=[params]) + # BUG: Swagger semantic error, param name doesn't match the param in URL: + # it is formatted as `` (werkzeug) instead of `{id}` for swagger + if arg_type: + params = sw.Parameter( + name=args, + _type=WERKZEUG_URL_TO_YAML_TYPES.get(arg_type), + _in='path', + description='', + required=True, + allowEmptyValue=False + ) + op = sw.Operation( + http_method=method.lower(), + summary=f'({method}) -> {col.__class__.__name__}', + description=view.__doc__, + responses=responses, + parameters=[params]) + else: + op = sw.Operation( + http_method=method.lower(), + summary=f'({method}) -> {col.__class__.__name__}', + description=view.__doc__, + responses=responses) p = sw.Path(endpoint=rule, operations=[op]) paths.append(p) @@ -380,7 +389,8 @@ def generate_swagger(self): col_properties[property] = { "type": GRAPHQL_TO_YAML_TYPES.get(field_type.GRAPHQL_FIELD_TYPE)} model_schema = sw.SwagSchema(properties=col_properties) - model = sw.Definition(name=col.name, schema=model_schema) + print(f'COLLECTION NAME: {type(col).__name__}') + model = sw.Definition(name=type(col).__name__, schema=model_schema) definitions.append(model) diff --git a/poetry.lock b/poetry.lock index 1980f79..e9f81fb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -998,14 +998,18 @@ requests = {version = ">=2.20", markers = "python_version >= \"3.0\""} [[package]] name = "swaggyp" version = "0.1.0" -description = "Python library for generating Swagger templates based on valley " +description = "Python library for generating Swagger templates based on valley" category = "main" optional = false python-versions = "*" [package.dependencies] pyyaml = ">=3.12" -valley = ">=1.5.1" +valley = ">=1.5.2" + +[package.source] +type = "file" +url = ".ignore/dist/swaggyp-0.1.0-py2.py3-none-any.whl" [[package]] name = "terminado" @@ -1141,7 +1145,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pyt [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "1d1b15d220f03648b1e636035cc1eed50d9db5c0b1845ad008400a4f8b2614a4" +content-hash = "d14555ad92eb971be6fe1a8f67dae4421d661f2674d8d389325cc05719bca19f" [metadata.files] appnope = [ @@ -1737,8 +1741,7 @@ stripe = [ {file = "stripe-2.61.0.tar.gz", hash = "sha256:8131addd3512a22c4c539dda2d869a8f488e06f1b02d1f3a5f0f4848fc56184e"}, ] swaggyp = [ - {file = "swaggyp-0.1.0-py2.py3-none-any.whl", hash = "sha256:4c95967632c22fc821e6c1ac69a2be2e37467629c436ccca7946ad073b445268"}, - {file = "swaggyp-0.1.0.tar.gz", hash = "sha256:f72fe0855a8ce4e968030ada1448d876e054bb7c8e444f1c16295b5943696f5f"}, + {file = "swaggyp-0.1.0-py2.py3-none-any.whl", hash = "sha256:1411a7688d613513874febbd754504e0a45e48c1a0985b34b0653d285dbbc43d"}, ] terminado = [ {file = "terminado-0.9.4-py3-none-any.whl", hash = "sha256:daed77f9fad7b32558fa84b226a76f45a02242c20813502f36c4e1ade6d8f1ad"}, diff --git a/pyproject.toml b/pyproject.toml index 8b1d8f6..87a92fc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,7 @@ graphql-py = "^0.8.1" sammy = "^0.4.3" stripe = "^2.61.0" bleach = "^4.1.0" -swaggyp = "^0.1.0" +swaggyp = {path = ".ignore/dist/swaggyp-0.1.0-py2.py3-none-any.whl"} [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From 1a7452bf5af9bc16c5834069f5d22a9ebe675b8f Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 10 Mar 2022 16:48:58 +0800 Subject: [PATCH 021/214] Fixed url params to have a swagger-specific syntax. Added ability to reference a definition in reference fields --- pfunk/project.py | 60 ++++++++++++++++++++++++------------- pfunk/tests/test_project.py | 4 +-- 2 files changed, 41 insertions(+), 23 deletions(-) diff --git a/pfunk/project.py b/pfunk/project.py index dde6ea0..2bccb01 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -1,6 +1,7 @@ from http.client import responses import logging import os +import re import json import requests from io import BytesIO @@ -49,6 +50,20 @@ "Boolean": "boolean" } +PFUNK_TO_YAML_TYPES = { + "StringField": "string", + "SlugField": "string", + "EmailField": "string", + "EnumField": "string", + "ManyToManyField": "#/definitions/", + "ReferenceField": "#/definitions/", + "ForeignList": "#/definitions/", + "IntegerField": "integer", + "FloatField": "integer", + "BooleanField": "boolean", + "ListField": "array" +} + WERKZEUG_URL_TO_YAML_TYPES = { "int": "integer", "string": "string", @@ -313,17 +328,6 @@ def generate_swagger(self): methods = route.methods args = route.arguments arg_type = None - - if args is None or len(args) == 0: - # if `defaults` weren't used in URL building, use the argument defined in the URL string - for converter, arguments, variable in parse_rule(rule): - if variable.startswith('/') or converter is None: - continue - args = variable - arg_type = converter - print(f'PATH: {route}') - print(f'ARGS: {args}') - print(f'ARG_TYPE: {arg_type}\n') responses = [] response_classes = [ @@ -339,7 +343,7 @@ def generate_swagger(self): responses.append( sw.Response( status_code=view.response_class.status_code, - description=view.get_query.__doc__) + description=view.get_query.__doc__ or 'Fill the docstrings to show description') ) else: responses.append( @@ -353,10 +357,18 @@ def generate_swagger(self): if method == 'HEAD': # Skip HEAD operations continue - + + if args is None or len(args) == 0: + # if `defaults` weren't used in URL building, use the argument defined in the URL string + for converter, arguments, variable in parse_rule(rule): + if variable.startswith('/') or converter is None: + continue + args = variable + arg_type = converter + # BUG: `Parameter` class can't be found on swaggyp module - # BUG: Swagger semantic error, param name doesn't match the param in URL: - # it is formatted as `` (werkzeug) instead of `{id}` for swagger + # Replace werkzeug params () to swagger-style params ({id}) + swagger_rule = re.sub('<\w+:\w+>', f'{{{args}}}', rule) if arg_type: params = sw.Parameter( name=args, @@ -378,22 +390,28 @@ def generate_swagger(self): summary=f'({method}) -> {col.__class__.__name__}', description=view.__doc__, responses=responses) - p = sw.Path(endpoint=rule, operations=[op]) + p = sw.Path(endpoint=swagger_rule, operations=[op]) paths.append(p) # Define model definitions by iterating through collection's fields for its properties col_properties = {} for property, field_type in col._base_properties.items(): - # TODO: Figure out what to do on graphql type -> IDs (relations) - col_properties[property] = { - "type": GRAPHQL_TO_YAML_TYPES.get(field_type.GRAPHQL_FIELD_TYPE)} + # Get pfunk field specifier + field_type_class = field_type.__class__.__name__ + + if field_type_class in ['ReferenceField', 'ManyToManyField']: + # Acquire the class that the collection is referencing to + foreign_class = field_type.get_foreign_class().__name__ + ref_field = PFUNK_TO_YAML_TYPES.get(field_type_class) + col_properties[property] = {"$ref": ref_field + foreign_class} + else: + col_properties[property] = { + "type": PFUNK_TO_YAML_TYPES.get(field_type_class)} model_schema = sw.SwagSchema(properties=col_properties) - print(f'COLLECTION NAME: {type(col).__name__}') model = sw.Definition(name=type(col).__name__, schema=model_schema) definitions.append(model) - info = sw.Info( title=proj_title, description=proj_desc, diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index 80b90af..dfe0290 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -1,6 +1,6 @@ import unittest from pfunk.project import Project -from pfunk.tests import Person, Sport, GENDER_PRONOUN +from pfunk.tests import Person, Sport, GENDER_PRONOUN, Group, User class ProjectTestCase(unittest.TestCase): @@ -28,6 +28,6 @@ def setUp(self) -> None: # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): - self.project.add_resource(Person) + self.project.add_resources([Person, Sport, Group, User]) self.project.generate_swagger() self.assertTrue(True) # if there are no exceptions, then it passed \ No newline at end of file From 7044a79769cde8251802b146ffbe826558c4d693 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 10 Mar 2022 16:50:01 +0800 Subject: [PATCH 022/214] Cleaned using autopep8 --- pfunk/project.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/pfunk/project.py b/pfunk/project.py index 2bccb01..1a9857b 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -72,6 +72,7 @@ "uuid": "string" } + class Project(Schema): """ Project configuration class. @@ -342,22 +343,22 @@ def generate_swagger(self): if rsp_cls == 'response_class': responses.append( sw.Response( - status_code=view.response_class.status_code, + status_code=view.response_class.status_code, description=view.get_query.__doc__ or 'Fill the docstrings to show description') ) else: responses.append( sw.Response( - status_code=getattr(view, rsp_cls).status_code, + status_code=getattr(view, rsp_cls).status_code, description=getattr(view, rsp_cls).default_payload) ) view_methods = list(methods) for method in view_methods: if method == 'HEAD': - # Skip HEAD operations + # Skip HEAD operations continue - + if args is None or len(args) == 0: # if `defaults` weren't used in URL building, use the argument defined in the URL string for converter, arguments, variable in parse_rule(rule): @@ -384,7 +385,7 @@ def generate_swagger(self): description=view.__doc__, responses=responses, parameters=[params]) - else: + else: op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', @@ -393,18 +394,18 @@ def generate_swagger(self): p = sw.Path(endpoint=swagger_rule, operations=[op]) paths.append(p) - # Define model definitions by iterating through collection's fields for its properties col_properties = {} for property, field_type in col._base_properties.items(): # Get pfunk field specifier - field_type_class = field_type.__class__.__name__ - + field_type_class = field_type.__class__.__name__ + if field_type_class in ['ReferenceField', 'ManyToManyField']: # Acquire the class that the collection is referencing to foreign_class = field_type.get_foreign_class().__name__ ref_field = PFUNK_TO_YAML_TYPES.get(field_type_class) - col_properties[property] = {"$ref": ref_field + foreign_class} + col_properties[property] = { + "$ref": ref_field + foreign_class} else: col_properties[property] = { "type": PFUNK_TO_YAML_TYPES.get(field_type_class)} @@ -428,6 +429,6 @@ def generate_swagger(self): with open(f'swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) else: - print('There is an existing swagger file. Kindly move/delete it to generate a new one. Printing instead...') + print('There is an existing swagger file. Kindly move/delete it to generate a new one. Printing instead...') print(t.to_yaml()) return None From 51db0fd8558a9a3060ae9ec9a58d976aae5003a5 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 17 Mar 2022 16:36:26 +0800 Subject: [PATCH 023/214] Moved entirety of swagger generator to a separate class --- pfunk/project.py | 158 +---- pfunk/utils/swagger.py | 199 +++++++ poetry.lock | 1245 +++++++++++++++++++++++----------------- pyproject.toml | 4 +- 4 files changed, 912 insertions(+), 694 deletions(-) create mode 100644 pfunk/utils/swagger.py diff --git a/pfunk/project.py b/pfunk/project.py index 1a9857b..90ff228 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -1,8 +1,4 @@ -from http.client import responses import logging -import os -import re -import json import requests from io import BytesIO from envs import env @@ -15,7 +11,7 @@ from werkzeug import Request as WerkzeugRequest from werkzeug.exceptions import NotFound, MethodNotAllowed from werkzeug.http import HTTP_STATUS_CODES -from werkzeug.routing import Map, parse_rule +from werkzeug.routing import Map from werkzeug.utils import cached_property from pfunk.web.request import HTTPRequest, RESTRequest, WSGIRequest @@ -25,6 +21,7 @@ from .template import graphql_template from .utils.publishing import BearerAuth from .web.views.graphql import GraphQLView +from .utils.swagger import SwaggerDoc logger = logging.getLogger('pfunk') @@ -286,149 +283,8 @@ def wsgi_app(self, environ, start_response): return [str.encode(response.body)] def generate_swagger(self): - """ Generates swagger doc. Details are going to be acquired from the collections - - The acquisition of the information needed for docs are as follows: - Response: - Description (str): View's `get_query` docstrings - Status Code (int): - Acquired from `response_class` class variable of a view - Error status_codes are acquired too in class variables - Operation: - HTTP Methods (arr): Defined `http_methods` in a view. - Summary (str): ({http_method}) -> {collection_name} - Description (str): Docstring of the view - Path: - Endpoint (str): Path of the function. You can see it in `url` method of a view. - - Returns: - Generated YAML file - """ - if not os.path.exists(f'pfunk.json'): - raise Exception('Missing JSON Config file.') - else: - with open(f'pfunk.json', 'r') as f: - data = json.loads(f.read()) - proj_title = data.get('name') - proj_desc = data.get('description', 'A Pfunk project') - proj_ver = data.get('ver', '1.0') - host = data.get('host', 'pfunk.com') - basePath = data.get('basePath', '/') - schemes = ['https'] - - paths = [] - definitions = [] - rules = [GraphQLView.url()] - for i in self.collections: - col = i() - rules.extend(col.urls) - - for view in col.collection_views: - route = view.url(col) - rule = route.rule - methods = route.methods - args = route.arguments - arg_type = None - - responses = [] - response_classes = [ - 'response_class', - 'not_found_class', - 'bad_request_class', - 'method_not_allowed_class', - 'unauthorized_class', - 'forbidden_class' - ] - for rsp_cls in response_classes: - if rsp_cls == 'response_class': - responses.append( - sw.Response( - status_code=view.response_class.status_code, - description=view.get_query.__doc__ or 'Fill the docstrings to show description') - ) - else: - responses.append( - sw.Response( - status_code=getattr(view, rsp_cls).status_code, - description=getattr(view, rsp_cls).default_payload) - ) - - view_methods = list(methods) - for method in view_methods: - if method == 'HEAD': - # Skip HEAD operations - continue - - if args is None or len(args) == 0: - # if `defaults` weren't used in URL building, use the argument defined in the URL string - for converter, arguments, variable in parse_rule(rule): - if variable.startswith('/') or converter is None: - continue - args = variable - arg_type = converter - - # BUG: `Parameter` class can't be found on swaggyp module - # Replace werkzeug params () to swagger-style params ({id}) - swagger_rule = re.sub('<\w+:\w+>', f'{{{args}}}', rule) - if arg_type: - params = sw.Parameter( - name=args, - _type=WERKZEUG_URL_TO_YAML_TYPES.get(arg_type), - _in='path', - description='', - required=True, - allowEmptyValue=False - ) - op = sw.Operation( - http_method=method.lower(), - summary=f'({method}) -> {col.__class__.__name__}', - description=view.__doc__, - responses=responses, - parameters=[params]) - else: - op = sw.Operation( - http_method=method.lower(), - summary=f'({method}) -> {col.__class__.__name__}', - description=view.__doc__, - responses=responses) - p = sw.Path(endpoint=swagger_rule, operations=[op]) - paths.append(p) - - # Define model definitions by iterating through collection's fields for its properties - col_properties = {} - for property, field_type in col._base_properties.items(): - # Get pfunk field specifier - field_type_class = field_type.__class__.__name__ - - if field_type_class in ['ReferenceField', 'ManyToManyField']: - # Acquire the class that the collection is referencing to - foreign_class = field_type.get_foreign_class().__name__ - ref_field = PFUNK_TO_YAML_TYPES.get(field_type_class) - col_properties[property] = { - "$ref": ref_field + foreign_class} - else: - col_properties[property] = { - "type": PFUNK_TO_YAML_TYPES.get(field_type_class)} - model_schema = sw.SwagSchema(properties=col_properties) - model = sw.Definition(name=type(col).__name__, schema=model_schema) - definitions.append(model) - - info = sw.Info( - title=proj_title, - description=proj_desc, - version=proj_ver) - t = sw.SwaggerTemplate( - host=host, - basePath=basePath, - info=info, - paths=paths, - schemes=schemes, - definitions=definitions) - - if not os.path.exists(f'swagger.yaml'): - with open(f'swagger.yaml', 'x') as swag_doc: - swag_doc.write(t.to_yaml()) - else: - print('There is an existing swagger file. Kindly move/delete it to generate a new one. Printing instead...') - print(t.to_yaml()) - return None + swag = SwaggerDoc( + collections=self.collections, + rules=[GraphQLView.url()]) + swag_file = swag.generate_swagger() + return swag_file diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py new file mode 100644 index 0000000..2eb01f5 --- /dev/null +++ b/pfunk/utils/swagger.py @@ -0,0 +1,199 @@ +import os +import re +import json +import swaggyp as sw +from werkzeug.routing import Map, parse_rule + +from pfunk.collection import Collection + +GRAPHQL_TO_YAML_TYPES = { + "String": "string", + "Int": "integer", + "Float": "integer", + "Boolean": "boolean" +} + +PFUNK_TO_YAML_TYPES = { + "StringField": "string", + "SlugField": "string", + "EmailField": "string", + "EnumField": "string", + "ManyToManyField": "#/definitions/", + "ReferenceField": "#/definitions/", + "ForeignList": "#/definitions/", + "IntegerField": "integer", + "FloatField": "integer", + "BooleanField": "boolean", + "ListField": "array" +} + +WERKZEUG_URL_TO_YAML_TYPES = { + "int": "integer", + "string": "string", + "float": "integer", + "path": "string", + "uuid": "string" +} + + +class SwaggerDoc(object): + + def __init__(self, collections, rules=[]): + """ Generates swagger doc. Details are going to be acquired from the collections + + The acquisition of the information needed for docs are as follows: + Response: + Description (str): View's `get_query` docstrings + Status Code (int): + Acquired from `response_class` class variable of a view + Error status_codes are acquired too in class variables + Operation: + HTTP Methods (arr): Defined `http_methods` in a view. + Summary (str): ({http_method}) -> {collection_name} + Description (str): Docstring of the view + Path: + Endpoint (str): Path of the function. You can see it in `url` method of a view. + + Returns: + Generated YAML file + """ + self.collections = collections + self.rules = rules + self.paths = [] + self.definitions = [] + self.responses = [] + self._response_classes = [ + 'response_class', + 'not_found_class', + 'bad_request_class', + 'method_not_allowed_class', + 'unauthorized_class', + 'forbidden_class' + ] + + def _convert_url_to_swagger(self, replacement: str, to_replace: str) -> str: + return re.sub('<\w+:\w+>', f'{{{replacement}}}', to_replace) + + def write_to_yaml(self): + if not os.path.exists(f'pfunk.json'): + raise Exception('Missing JSON Config file.') + else: + with open(f'pfunk.json', 'r') as f: + data = json.loads(f.read()) + proj_title = data.get('name') + proj_desc = data.get('description', 'A Pfunk project') + proj_ver = data.get('ver', '1.0') + host = data.get('host', 'pfunk.com') + basePath = data.get('basePath', '/') + schemes = ['https'] + + info = sw.Info( + title=proj_title, + description=proj_desc, + version=proj_ver) + t = sw.SwaggerTemplate( + host=host, + basePath=basePath, + info=info, + paths=self.paths, + schemes=schemes, + definitions=self.definitions) + + if not os.path.exists(f'swagger.yaml'): + with open(f'swagger.yaml', 'x') as swag_doc: + swag_doc.write(t.to_yaml()) + else: + print('There is an existing swagger file. Kindly move/delete it to generate a new one. Printing instead...') + print(t.to_yaml()) + return t.to_yaml() + + def get_operations(self, col: Collection): + for view in col.collection_views: + route = view.url(col) + rule = route.rule + methods = route.methods + args = route.arguments + arg_type = None + responses = [] + for rsp_cls in self._response_classes: + if rsp_cls == 'response_class': + responses.append( + sw.Response( + status_code=view.response_class.status_code, + description=view.get_query.__doc__ or 'Fill the docstrings to show description') + ) + else: + responses.append( + sw.Response( + status_code=getattr(view, rsp_cls).status_code, + description=getattr(view, rsp_cls).default_payload) + ) + + view_methods = list(methods) + for method in view_methods: + if method == 'HEAD': + # Skip HEAD operations + continue + + if args is None or len(args) == 0: + # if `defaults` weren't used in URL building, use the argument defined in the URL string + for converter, arguments, variable in parse_rule(rule): + if variable.startswith('/') or converter is None: + continue + args = variable + arg_type = converter + + # Replace werkzeug params () to swagger-style params ({id}) + swagger_rule = self._convert_url_to_swagger(args, rule) + if arg_type: + params = sw.Parameter( + name=args, + _type=WERKZEUG_URL_TO_YAML_TYPES.get(arg_type), + _in='path', + description='', + required=True, + allowEmptyValue=False + ) + op = sw.Operation( + http_method=method.lower(), + summary=f'({method}) -> {col.__class__.__name__}', + description=view.__doc__, + responses=responses, + parameters=[params]) + else: + op = sw.Operation( + http_method=method.lower(), + summary=f'({method}) -> {col.__class__.__name__}', + description=view.__doc__, + responses=responses) + p = sw.Path(endpoint=swagger_rule, operations=[op]) + self.paths.append(p) + return self.paths + + def get_model_definitions(self, col: Collection): + # Define model definitions by iterating through collection's fields for its properties + col_properties = {} + for property, field_type in col._base_properties.items(): + # Get pfunk field specifier + field_type_class = field_type.__class__.__name__ + + if field_type_class in ['ReferenceField', 'ManyToManyField']: + # Acquire the class that the collection is referencing to + foreign_class = field_type.get_foreign_class().__name__ + ref_field = PFUNK_TO_YAML_TYPES.get(field_type_class) + col_properties[property] = { + "$ref": ref_field + foreign_class} + else: + col_properties[property] = { + "type": PFUNK_TO_YAML_TYPES.get(field_type_class)} + model_schema = sw.SwagSchema(properties=col_properties) + model = sw.Definition(name=type(col).__name__, schema=model_schema) + self.definitions.append(model) + return self.definitions + + def generate_swagger(self): + for i in self.collections: + col = i() + self.get_operations(col) + self.get_model_definitions(col) + return self.write_to_yaml() \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index e9f81fb..52a4c7c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -8,20 +8,48 @@ python-versions = "*" [[package]] name = "argon2-cffi" -version = "20.1.0" +version = "21.3.0" description = "The secure Argon2 password hashing algorithm." category = "dev" optional = false +python-versions = ">=3.6" + +[package.dependencies] +argon2-cffi-bindings = "*" + +[package.extras] +dev = ["pre-commit", "cogapp", "tomli", "coverage[toml] (>=5.0.2)", "hypothesis", "pytest", "sphinx", "sphinx-notfound-page", "furo"] +docs = ["sphinx", "sphinx-notfound-page", "furo"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] + +[[package]] +name = "argon2-cffi-bindings" +version = "21.2.0" +description = "Low-level CFFI bindings for Argon2" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cffi = ">=1.0.1" + +[package.extras] +dev = ["pytest", "cogapp", "pre-commit", "wheel"] +tests = ["pytest"] + +[[package]] +name = "asttokens" +version = "2.0.5" +description = "Annotate AST trees with source code positions" +category = "dev" +optional = false python-versions = "*" [package.dependencies] -cffi = ">=1.0.0" six = "*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest", "sphinx", "wheel", "pre-commit"] -docs = ["sphinx"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] +test = ["astroid", "pytest"] [[package]] name = "astunparse" @@ -34,27 +62,19 @@ python-versions = "*" [package.dependencies] six = ">=1.6.1,<2.0" -[[package]] -name = "async-generator" -version = "1.10" -description = "Async generators and context managers for Python 3.5+" -category = "dev" -optional = false -python-versions = ">=3.5" - [[package]] name = "attrs" -version = "20.3.0" +version = "21.4.0" description = "Classes Without Boilerplate" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "furo", "sphinx", "pre-commit"] -docs = ["furo", "sphinx", "zope.interface"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] [[package]] name = "backcall" @@ -64,6 +84,21 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "beautifulsoup4" +version = "4.10.0" +description = "Screen-scraping library" +category = "dev" +optional = false +python-versions = ">3.0.0" + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + [[package]] name = "bleach" version = "4.1.0" @@ -79,14 +114,14 @@ webencodings = "*" [[package]] name = "boto3" -version = "1.18.36" +version = "1.21.18" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.6" [package.dependencies] -botocore = ">=1.21.36,<1.22.0" +botocore = ">=1.24.18,<1.25.0" jmespath = ">=0.7.1,<1.0.0" s3transfer = ">=0.5.0,<0.6.0" @@ -95,7 +130,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.21.36" +version = "1.24.18" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -107,11 +142,11 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.11.24)"] +crt = ["awscrt (==0.12.5)"] [[package]] name = "cachetools" -version = "4.2.2" +version = "4.2.4" description = "Extensible memoizing collections and decorators" category = "main" optional = false @@ -119,7 +154,7 @@ python-versions = "~=3.5" [[package]] name = "certifi" -version = "2020.12.5" +version = "2021.10.8" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false @@ -127,7 +162,7 @@ python-versions = "*" [[package]] name = "cffi" -version = "1.14.5" +version = "1.15.0" description = "Foreign Function Interface for Python calling C code." category = "main" optional = false @@ -137,16 +172,19 @@ python-versions = "*" pycparser = "*" [[package]] -name = "chardet" -version = "4.0.0" -description = "Universal encoding detector for Python 2 and 3" +name = "charset-normalizer" +version = "2.0.12" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] [[package]] name = "click" -version = "8.0.1" +version = "8.0.4" description = "Composable command line interface toolkit" category = "main" optional = false @@ -154,7 +192,6 @@ python-versions = ">=3.6" [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [[package]] name = "colorama" @@ -177,7 +214,7 @@ toml = ["toml"] [[package]] name = "cryptography" -version = "3.4.7" +version = "3.4.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false @@ -194,9 +231,17 @@ sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] test = ["pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] +[[package]] +name = "debugpy" +version = "1.5.1" +description = "An implementation of the Debug Adapter Protocol for Python" +category = "dev" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" + [[package]] name = "decorator" -version = "5.0.9" +version = "5.1.1" description = "Decorators for Humans" category = "main" optional = false @@ -212,26 +257,34 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "entrypoints" -version = "0.3" +version = "0.4" description = "Discover and load entry points from installed packages." category = "dev" optional = false -python-versions = ">=2.7" +python-versions = ">=3.6" [[package]] name = "envs" -version = "1.3" +version = "1.4" description = "Easy access of environment variables from Python with support for strings, booleans, list, tuples, and dicts." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6,<4.0" [package.extras] -cli = ["jinja2 (>=2.8)", "click (>=6.6)", "terminaltables (>=3.0.0)"] +cli = ["click[cli] (>=8.0.3,<9.0.0)", "Jinja2[cli] (>=3.0.3,<4.0.0)", "terminaltables[cli] (>=3.1.10,<4.0.0)"] + +[[package]] +name = "executing" +version = "0.8.3" +description = "Get the currently executing AST node of a frame, and other information" +category = "dev" +optional = false +python-versions = "*" [[package]] name = "faunadb" -version = "4.1.0" +version = "4.2.0" description = "FaunaDB Python driver" category = "main" optional = false @@ -244,7 +297,6 @@ iso8601 = "*" requests = "*" [package.extras] -doc = ["sphinx", "sphinx-rtd-theme"] lint = ["pylint"] test = ["nose2", "nose2"] @@ -312,53 +364,55 @@ python-versions = "*" [[package]] name = "idna" -version = "2.10" +version = "3.3" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.5" [[package]] -name = "importlib-metadata" -version = "4.0.1" -description = "Read metadata from Python packages" -category = "main" +name = "importlib-resources" +version = "5.4.0" +description = "Read resources from Python packages" +category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} -zipp = ">=0.5" +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] [[package]] name = "ipykernel" -version = "5.5.3" +version = "6.9.1" description = "IPython Kernel for Jupyter" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" [package.dependencies] appnope = {version = "*", markers = "platform_system == \"Darwin\""} -ipython = ">=5.0.0" -jupyter-client = "*" -tornado = ">=4.2" -traitlets = ">=4.1.0" +debugpy = ">=1.0.0,<2.0" +ipython = ">=7.23.1" +jupyter-client = "<8.0" +matplotlib-inline = ">=0.1.0,<0.2.0" +nest-asyncio = "*" +tornado = ">=4.2,<7.0" +traitlets = ">=5.1.0,<6.0" [package.extras] -test = ["pytest (!=5.3.4)", "pytest-cov", "flaky", "nose", "jedi (<=0.17.2)"] +test = ["pytest (!=5.3.4)", "pytest-cov", "flaky", "ipyparallel"] [[package]] name = "ipython" -version = "7.22.0" +version = "8.1.1" description = "IPython: Productive Interactive Computing" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" [package.dependencies] appnope = {version = "*", markers = "sys_platform == \"darwin\""} @@ -366,22 +420,26 @@ backcall = "*" colorama = {version = "*", markers = "sys_platform == \"win32\""} decorator = "*" jedi = ">=0.16" +matplotlib-inline = "*" pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} pickleshare = "*" prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" -pygments = "*" -traitlets = ">=4.2" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5" [package.extras] -all = ["Sphinx (>=1.3)", "ipykernel", "ipyparallel", "ipywidgets", "nbconvert", "nbformat", "nose (>=0.10.1)", "notebook", "numpy (>=1.16)", "pygments", "qtconsole", "requests", "testpath"] +all = ["black", "Sphinx (>=1.3)", "ipykernel", "nbconvert", "nbformat", "ipywidgets", "notebook", "ipyparallel", "qtconsole", "curio", "matplotlib (!=3.2.0)", "numpy (>=1.19)", "pandas", "pytest", "testpath", "trio", "pytest-asyncio"] +black = ["black"] doc = ["Sphinx (>=1.3)"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] -notebook = ["notebook", "ipywidgets"] +notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] -test = ["nose (>=0.10.1)", "requests", "testpath", "pygments", "nbformat", "ipykernel", "numpy (>=1.16)"] +test = ["pytest", "pytest-asyncio", "testpath"] +test_extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.19)", "pandas", "pytest", "testpath", "trio"] [[package]] name = "ipython-genutils" @@ -393,7 +451,7 @@ python-versions = "*" [[package]] name = "ipywidgets" -version = "7.6.3" +version = "7.6.5" description = "IPython HTML widgets for Jupyter" category = "dev" optional = false @@ -402,6 +460,7 @@ python-versions = "*" [package.dependencies] ipykernel = ">=4.5.1" ipython = {version = ">=4.0.0", markers = "python_version >= \"3.3\""} +ipython-genutils = ">=0.2.0,<0.3.0" jupyterlab-widgets = {version = ">=1.0.0", markers = "python_version >= \"3.6\""} nbformat = ">=4.2.0" traitlets = ">=4.3.1" @@ -412,15 +471,15 @@ test = ["pytest (>=3.6.0)", "pytest-cov", "mock"] [[package]] name = "iso8601" -version = "0.1.14" +version = "1.0.2" description = "Simple module to parse ISO 8601 dates" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6.2,<4.0" [[package]] name = "jedi" -version = "0.18.0" +version = "0.18.1" description = "An autocompletion tool for Python that can be used for text editors." category = "dev" optional = false @@ -431,7 +490,7 @@ parso = ">=0.8.0,<0.9.0" [package.extras] qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<6.0.0)"] +testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] name = "jinja2" @@ -457,21 +516,20 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "jsonschema" -version = "3.2.0" +version = "4.4.0" description = "An implementation of JSON Schema validation for Python" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" [package.dependencies] attrs = ">=17.4.0" -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} -pyrsistent = ">=0.14.0" -six = ">=1.11.0" +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" [package.extras] -format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] -format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format_nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] [[package]] name = "jupyter" @@ -491,13 +549,14 @@ qtconsole = "*" [[package]] name = "jupyter-client" -version = "6.2.0" +version = "7.1.2" description = "Jupyter protocol implementation and client libraries" category = "dev" optional = false python-versions = ">=3.6.1" [package.dependencies] +entrypoints = "*" jupyter-core = ">=4.6.0" nest-asyncio = ">=1.5" python-dateutil = ">=2.1" @@ -506,12 +565,12 @@ tornado = ">=4.1" traitlets = "*" [package.extras] -doc = ["sphinx (>=1.3.6)", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] -test = ["async-generator", "ipykernel", "ipython", "mock", "pytest-asyncio", "pytest-timeout", "pytest", "mypy", "pre-commit", "jedi (<0.18)"] +doc = ["myst-parser", "sphinx (>=1.3.6)", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] +test = ["codecov", "coverage", "ipykernel", "ipython", "mock", "mypy", "pre-commit", "pytest", "pytest-asyncio", "pytest-cov", "pytest-timeout", "jedi (<0.18)"] [[package]] name = "jupyter-console" -version = "6.4.0" +version = "6.4.3" description = "Jupyter terminal console" category = "dev" optional = false @@ -520,7 +579,7 @@ python-versions = ">=3.6" [package.dependencies] ipykernel = "*" ipython = "*" -jupyter-client = "*" +jupyter-client = ">=7.0.0" prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" pygments = "*" @@ -529,14 +588,14 @@ test = ["pexpect"] [[package]] name = "jupyter-core" -version = "4.7.1" +version = "4.9.2" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] -pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\""} +pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} traitlets = "*" [[package]] @@ -552,7 +611,7 @@ pygments = ">=2.4.1,<3" [[package]] name = "jupyterlab-widgets" -version = "1.0.0" +version = "1.0.2" description = "A JupyterLab extension." category = "dev" optional = false @@ -560,11 +619,22 @@ python-versions = ">=3.6" [[package]] name = "markupsafe" -version = "2.0.1" +version = "2.1.0" description = "Safely add untrusted strings to HTML/XML markup." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" + +[[package]] +name = "matplotlib-inline" +version = "0.1.3" +description = "Inline Matplotlib backend for Jupyter" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +traitlets = "*" [[package]] name = "mistune" @@ -576,33 +646,32 @@ python-versions = "*" [[package]] name = "nbclient" -version = "0.5.3" +version = "0.5.13" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." category = "dev" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7.0" [package.dependencies] -async-generator = "*" jupyter-client = ">=6.1.5" nbformat = ">=5.0" nest-asyncio = "*" -traitlets = ">=4.2" +traitlets = ">=5.0.0" [package.extras] -dev = ["codecov", "coverage", "ipython", "ipykernel", "ipywidgets", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "check-manifest", "flake8", "mypy", "tox", "bumpversion", "xmltodict", "pip (>=18.1)", "wheel (>=0.31.0)", "setuptools (>=38.6.0)", "twine (>=1.11.0)", "black"] sphinx = ["Sphinx (>=1.7)", "sphinx-book-theme", "mock", "moto", "myst-parser"] -test = ["codecov", "coverage", "ipython", "ipykernel", "ipywidgets", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "check-manifest", "flake8", "mypy", "tox", "bumpversion", "xmltodict", "pip (>=18.1)", "wheel (>=0.31.0)", "setuptools (>=38.6.0)", "twine (>=1.11.0)", "black"] +test = ["ipython (<8.0.0)", "ipykernel", "ipywidgets (<8.0.0)", "pytest (>=4.1)", "pytest-asyncio", "pytest-cov (>=2.6.1)", "check-manifest", "flake8", "mypy", "xmltodict", "black", "pip (>=18.1)", "wheel (>=0.31.0)", "setuptools (>=38.6.0)", "twine (>=1.11.0)"] [[package]] name = "nbconvert" -version = "6.0.7" +version = "6.4.4" description = "Converting Jupyter Notebooks" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] +beautifulsoup4 = "*" bleach = "*" defusedxml = "*" entrypoints = ">=0.2.2" @@ -615,36 +684,35 @@ nbformat = ">=4.4" pandocfilters = ">=1.4.1" pygments = ">=2.4.1" testpath = "*" -traitlets = ">=4.2" +traitlets = ">=5.0" [package.extras] -all = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (==0.2.2)", "tornado (>=4.0)", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] +all = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (>=1,<1.1)", "tornado (>=4.0)", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] docs = ["sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] serve = ["tornado (>=4.0)"] -test = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (==0.2.2)"] -webpdf = ["pyppeteer (==0.2.2)"] +test = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (>=1,<1.1)"] +webpdf = ["pyppeteer (>=1,<1.1)"] [[package]] name = "nbformat" -version = "5.1.3" +version = "5.2.0" description = "The Jupyter Notebook format" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" [package.dependencies] -ipython-genutils = "*" jsonschema = ">=2.4,<2.5.0 || >2.5.0" jupyter-core = "*" traitlets = ">=4.1" [package.extras] fast = ["fastjsonschema"] -test = ["check-manifest", "fastjsonschema", "testpath", "pytest", "pytest-cov"] +test = ["check-manifest", "fastjsonschema", "testpath", "pytest"] [[package]] name = "nest-asyncio" -version = "1.5.1" +version = "1.5.4" description = "Patch asyncio to allow nested event loops" category = "dev" optional = false @@ -652,7 +720,7 @@ python-versions = ">=3.5" [[package]] name = "notebook" -version = "6.3.0" +version = "6.4.8" description = "A web-based notebook environment for interactive computing" category = "dev" optional = false @@ -667,32 +735,33 @@ jupyter-client = ">=5.3.4" jupyter-core = ">=4.6.1" nbconvert = "*" nbformat = "*" +nest-asyncio = ">=1.5" prometheus-client = "*" pyzmq = ">=17" -Send2Trash = ">=1.5.0" +Send2Trash = ">=1.8.0" terminado = ">=0.8.3" tornado = ">=6.1" traitlets = ">=4.2.1" [package.extras] -docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme"] +docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"] json-logging = ["json-logging"] test = ["pytest", "coverage", "requests", "nbval", "selenium", "pytest-cov", "requests-unixsocket"] [[package]] name = "packaging" -version = "20.9" +version = "21.3" description = "Core utilities for Python packages" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [package.dependencies] -pyparsing = ">=2.0.2" +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "pandocfilters" -version = "1.4.3" +version = "1.5.0" description = "Utilities for writing pandoc filters in python" category = "dev" optional = false @@ -700,7 +769,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "parso" -version = "0.8.2" +version = "0.8.3" description = "A Python Parser" category = "dev" optional = false @@ -712,7 +781,7 @@ testing = ["docopt", "pytest (<6.0.0)"] [[package]] name = "pdoc" -version = "7.2.0" +version = "7.4.0" description = "API Documentation for Python Projects" category = "dev" optional = false @@ -756,22 +825,22 @@ python-versions = "*" [[package]] name = "prometheus-client" -version = "0.10.1" +version = "0.13.1" description = "Python client for the Prometheus monitoring system." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [package.extras] twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.18" +version = "3.0.28" description = "Library for building powerful interactive command lines in Python" category = "dev" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.6.2" [package.dependencies] wcwidth = "*" @@ -784,17 +853,28 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +tests = ["pytest"] + [[package]] name = "py" -version = "1.10.0" +version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pycparser" -version = "2.20" +version = "2.21" description = "C parser in Python" category = "main" optional = false @@ -802,7 +882,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.8.1" +version = "2.11.2" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false @@ -810,37 +890,40 @@ python-versions = ">=3.5" [[package]] name = "pyjwt" -version = "2.1.0" +version = "2.3.0" description = "JSON Web Token implementation in Python" category = "main" optional = false python-versions = ">=3.6" [package.extras] -crypto = ["cryptography (>=3.3.1,<4.0.0)"] -dev = ["sphinx", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.3.1,<4.0.0)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "mypy", "pre-commit"] +crypto = ["cryptography (>=3.3.1)"] +dev = ["sphinx", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.3.1)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "mypy", "pre-commit"] docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"] [[package]] name = "pyparsing" -version = "2.4.7" +version = "3.0.7" description = "Python parsing module" category = "main" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.6" + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pyrsistent" -version = "0.17.3" +version = "0.18.1" description = "Persistent/Functional/Immutable data structures" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" [[package]] name = "python-dateutil" -version = "2.8.1" +version = "2.8.2" description = "Extensions to the standard Python datetime module" category = "main" optional = false @@ -851,7 +934,7 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2021.1" +version = "2021.3" description = "World timezone definitions, modern and historical" category = "main" optional = false @@ -859,7 +942,7 @@ python-versions = "*" [[package]] name = "pywin32" -version = "300" +version = "303" description = "Python for Window Extensions" category = "dev" optional = false @@ -867,23 +950,23 @@ python-versions = "*" [[package]] name = "pywinpty" -version = "0.5.7" -description = "Python bindings for the winpty library" +version = "2.0.5" +description = "Pseudo terminal support for Windows from Python." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" [[package]] name = "pyyaml" -version = "5.4.1" +version = "6.0" description = "YAML parser and emitter for Python" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.6" [[package]] name = "pyzmq" -version = "22.0.3" +version = "22.3.0" description = "Python bindings for 0MQ" category = "dev" optional = false @@ -895,7 +978,7 @@ py = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "qtconsole" -version = "5.0.3" +version = "5.2.2" description = "Jupyter Qt console" category = "dev" optional = false @@ -917,33 +1000,39 @@ test = ["flaky", "pytest", "pytest-qt"] [[package]] name = "qtpy" -version = "1.9.0" -description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5, PyQt4 and PySide) and additional custom QWidgets." +version = "2.0.1" +description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" + +[package.dependencies] +packaging = "*" + +[package.extras] +test = ["pytest (>=6.0.0)", "pytest-cov (>=3.0.0)", "pytest-qt"] [[package]] name = "requests" -version = "2.25.1" +version = "2.27.1" description = "Python HTTP for Humans." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] certifi = ">=2017.4.17" -chardet = ">=3.0.2,<5" -idna = ">=2.5,<3" +charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} +idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} urllib3 = ">=1.21.1,<1.27" [package.extras] -security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] [[package]] name = "s3transfer" -version = "0.5.0" +version = "0.5.2" description = "An Amazon S3 Transfer Manager" category = "main" optional = false @@ -970,23 +1059,52 @@ valley = ">=1.5.2" [[package]] name = "send2trash" -version = "1.5.0" +version = "1.8.0" description = "Send file to trash natively under Mac OS X, Windows and Linux." category = "dev" optional = false python-versions = "*" +[package.extras] +nativelib = ["pyobjc-framework-cocoa", "pywin32"] +objc = ["pyobjc-framework-cocoa"] +win32 = ["pywin32"] + [[package]] name = "six" -version = "1.15.0" +version = "1.16.0" description = "Python 2 and 3 compatibility utilities" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "soupsieve" +version = "2.3.1" +description = "A modern CSS selector implementation for Beautiful Soup." +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "stack-data" +version = "0.2.0" +description = "Extract data from python stack frames and tracebacks for informative displays" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +asttokens = "*" +executing = "*" +pure-eval = "*" + +[package.extras] +tests = ["pytest", "typeguard", "pygments", "littleutils", "cython"] + [[package]] name = "stripe" -version = "2.61.0" +version = "2.67.0" description = "Python bindings for the Stripe API" category = "main" optional = false @@ -997,31 +1115,27 @@ requests = {version = ">=2.20", markers = "python_version >= \"3.0\""} [[package]] name = "swaggyp" -version = "0.1.0" +version = "0.2.0" description = "Python library for generating Swagger templates based on valley" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.8,<4.0" [package.dependencies] -pyyaml = ">=3.12" -valley = ">=1.5.2" - -[package.source] -type = "file" -url = ".ignore/dist/swaggyp-0.1.0-py2.py3-none-any.whl" +PyYAML = ">=6.0,<7.0" +valley = ">=1.5.6,<2.0.0" [[package]] name = "terminado" -version = "0.9.4" +version = "0.13.3" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] ptyprocess = {version = "*", markers = "os_name != \"nt\""} -pywinpty = {version = ">=0.5", markers = "os_name == \"nt\""} +pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} tornado = ">=4" [package.extras] @@ -1029,14 +1143,14 @@ test = ["pytest"] [[package]] name = "testpath" -version = "0.4.4" +version = "0.6.0" description = "Test utilities for code working with files and commands" category = "dev" optional = false -python-versions = "*" +python-versions = ">= 3.5" [package.extras] -test = ["pathlib2"] +test = ["pytest"] [[package]] name = "tornado" @@ -1048,38 +1162,27 @@ python-versions = ">= 3.5" [[package]] name = "traitlets" -version = "5.0.5" +version = "5.1.1" description = "Traitlets Python configuration system" category = "dev" optional = false python-versions = ">=3.7" -[package.dependencies] -ipython-genutils = "*" - [package.extras] test = ["pytest"] -[[package]] -name = "typing-extensions" -version = "3.7.4.3" -description = "Backported and Experimental Type Hints for Python 3.5+" -category = "main" -optional = false -python-versions = "*" - [[package]] name = "urllib3" -version = "1.26.4" +version = "1.26.8" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] +brotli = ["brotlipy (>=0.6.0)"] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] -brotli = ["brotlipy (>=0.6.0)"] [[package]] name = "valley" @@ -1110,7 +1213,7 @@ python-versions = "*" [[package]] name = "werkzeug" -version = "2.0.1" +version = "2.0.3" description = "The comprehensive WSGI web application library." category = "main" optional = false @@ -1121,7 +1224,7 @@ watchdog = ["watchdog"] [[package]] name = "widgetsnbextension" -version = "3.5.1" +version = "3.5.2" description = "IPython HTML widgets for Jupyter" category = "dev" optional = false @@ -1132,20 +1235,20 @@ notebook = ">=4.4.1" [[package]] name = "zipp" -version = "3.4.1" +version = "3.7.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" +category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [metadata] lock-version = "1.1" -python-versions = "^3.7" -content-hash = "d14555ad92eb971be6fe1a8f67dae4421d661f2674d8d389325cc05719bca19f" +python-versions = "^3.8" +content-hash = "45c80cdba27ede0a7a28b611355294d4166ccfd7d4209b7fb6b75582d854b5a7" [metadata.files] appnope = [ @@ -1153,123 +1256,131 @@ appnope = [ {file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"}, ] argon2-cffi = [ - {file = "argon2-cffi-20.1.0.tar.gz", hash = "sha256:d8029b2d3e4b4cea770e9e5a0104dd8fa185c1724a0f01528ae4826a6d25f97d"}, - {file = "argon2_cffi-20.1.0-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:6ea92c980586931a816d61e4faf6c192b4abce89aa767ff6581e6ddc985ed003"}, - {file = "argon2_cffi-20.1.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:05a8ac07c7026542377e38389638a8a1e9b78f1cd8439cd7493b39f08dd75fbf"}, - {file = "argon2_cffi-20.1.0-cp27-cp27m-win32.whl", hash = "sha256:0bf066bc049332489bb2d75f69216416329d9dc65deee127152caeb16e5ce7d5"}, - {file = "argon2_cffi-20.1.0-cp27-cp27m-win_amd64.whl", hash = "sha256:57358570592c46c420300ec94f2ff3b32cbccd10d38bdc12dc6979c4a8484fbc"}, - {file = "argon2_cffi-20.1.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:7d455c802727710e9dfa69b74ccaab04568386ca17b0ad36350b622cd34606fe"}, - {file = "argon2_cffi-20.1.0-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:b160416adc0f012fb1f12588a5e6954889510f82f698e23ed4f4fa57f12a0647"}, - {file = "argon2_cffi-20.1.0-cp35-cp35m-win32.whl", hash = "sha256:9bee3212ba4f560af397b6d7146848c32a800652301843df06b9e8f68f0f7361"}, - {file = "argon2_cffi-20.1.0-cp35-cp35m-win_amd64.whl", hash = "sha256:392c3c2ef91d12da510cfb6f9bae52512a4552573a9e27600bdb800e05905d2b"}, - {file = "argon2_cffi-20.1.0-cp36-cp36m-win32.whl", hash = "sha256:ba7209b608945b889457f949cc04c8e762bed4fe3fec88ae9a6b7765ae82e496"}, - {file = "argon2_cffi-20.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:da7f0445b71db6d3a72462e04f36544b0de871289b0bc8a7cc87c0f5ec7079fa"}, - {file = "argon2_cffi-20.1.0-cp37-abi3-macosx_10_6_intel.whl", hash = "sha256:cc0e028b209a5483b6846053d5fd7165f460a1f14774d79e632e75e7ae64b82b"}, - {file = "argon2_cffi-20.1.0-cp37-cp37m-win32.whl", hash = "sha256:18dee20e25e4be86680b178b35ccfc5d495ebd5792cd00781548d50880fee5c5"}, - {file = "argon2_cffi-20.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6678bb047373f52bcff02db8afab0d2a77d83bde61cfecea7c5c62e2335cb203"}, - {file = "argon2_cffi-20.1.0-cp38-cp38-win32.whl", hash = "sha256:77e909cc756ef81d6abb60524d259d959bab384832f0c651ed7dcb6e5ccdbb78"}, - {file = "argon2_cffi-20.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:9dfd5197852530294ecb5795c97a823839258dfd5eb9420233c7cfedec2058f2"}, - {file = "argon2_cffi-20.1.0-cp39-cp39-win32.whl", hash = "sha256:e2db6e85c057c16d0bd3b4d2b04f270a7467c147381e8fd73cbbe5bc719832be"}, - {file = "argon2_cffi-20.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a84934bd818e14a17943de8099d41160da4a336bcc699bb4c394bbb9b94bd32"}, - {file = "argon2_cffi-20.1.0-pp36-pypy36_pp73-macosx_10_7_x86_64.whl", hash = "sha256:b94042e5dcaa5d08cf104a54bfae614be502c6f44c9c89ad1535b2ebdaacbd4c"}, - {file = "argon2_cffi-20.1.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:8282b84ceb46b5b75c3a882b28856b8cd7e647ac71995e71b6705ec06fc232c3"}, - {file = "argon2_cffi-20.1.0-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:3aa804c0e52f208973845e8b10c70d8957c9e5a666f702793256242e9167c4e0"}, - {file = "argon2_cffi-20.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:36320372133a003374ef4275fbfce78b7ab581440dfca9f9471be3dd9a522428"}, + {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, + {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, +] +argon2-cffi-bindings = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] +asttokens = [ + {file = "asttokens-2.0.5-py2.py3-none-any.whl", hash = "sha256:0844691e88552595a6f4a4281a9f7f79b8dd45ca4ccea82e5e05b4bbdb76705c"}, + {file = "asttokens-2.0.5.tar.gz", hash = "sha256:9a54c114f02c7a9480d56550932546a3f1fe71d8a02f1bc7ccd0ee3ee35cf4d5"}, ] astunparse = [ {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, ] -async-generator = [ - {file = "async_generator-1.10-py3-none-any.whl", hash = "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b"}, - {file = "async_generator-1.10.tar.gz", hash = "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"}, -] attrs = [ - {file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"}, - {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"}, + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] backcall = [ {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, ] +beautifulsoup4 = [ + {file = "beautifulsoup4-4.10.0-py3-none-any.whl", hash = "sha256:9a315ce70049920ea4572a4055bc4bd700c940521d36fc858205ad4fcde149bf"}, + {file = "beautifulsoup4-4.10.0.tar.gz", hash = "sha256:c23ad23c521d818955a4151a67d81580319d4bf548d3d49f4223ae041ff98891"}, +] bleach = [ {file = "bleach-4.1.0-py2.py3-none-any.whl", hash = "sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994"}, {file = "bleach-4.1.0.tar.gz", hash = "sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da"}, ] boto3 = [ - {file = "boto3-1.18.36-py3-none-any.whl", hash = "sha256:a7fccb61d95230322dd812629455df14167307c569077fa89d297eae73605ffb"}, - {file = "boto3-1.18.36.tar.gz", hash = "sha256:4df1085f5c24504a1b1a6584947f27b67c26eda123f29d3cecce9b2fd683e09b"}, + {file = "boto3-1.21.18-py3-none-any.whl", hash = "sha256:d857feb6af9932e1ee3a748060a2cd9fd6043dbbccf66976eda54586597efdc0"}, + {file = "boto3-1.21.18.tar.gz", hash = "sha256:8d6f3c548f0ee03d742f404c96515e7579fc6968135aaa50dd855a046698ff79"}, ] botocore = [ - {file = "botocore-1.21.36-py3-none-any.whl", hash = "sha256:e3e522fbe0bad1197aa7182451dc05f650310e77cf0a77749f6a5e82794c53de"}, - {file = "botocore-1.21.36.tar.gz", hash = "sha256:5b9a7d30e44b8a0a2bbbde62ae01bf6c349017e836985a0248552b00bbce7fae"}, + {file = "botocore-1.24.18-py3-none-any.whl", hash = "sha256:7ea8ef1ff7c4882ab59b337662f90ddf5ea860e95e7e209dca593a34ea585b1b"}, + {file = "botocore-1.24.18.tar.gz", hash = "sha256:d2da7ccbc5ddd61fe3cd45fcbd3de380d9e3a15bfa8fbfd2d9259a93dcc60c56"}, ] cachetools = [ - {file = "cachetools-4.2.2-py3-none-any.whl", hash = "sha256:2cc0b89715337ab6dbba85b5b50effe2b0c74e035d83ee8ed637cf52f12ae001"}, - {file = "cachetools-4.2.2.tar.gz", hash = "sha256:61b5ed1e22a0924aed1d23b478f37e8d52549ff8a961de2909c69bf950020cff"}, + {file = "cachetools-4.2.4-py3-none-any.whl", hash = "sha256:92971d3cb7d2a97efff7c7bb1657f21a8f5fb309a37530537c71b1774189f2d1"}, + {file = "cachetools-4.2.4.tar.gz", hash = "sha256:89ea6f1b638d5a73a4f9226be57ac5e4f399d22770b92355f92dcb0f7f001693"}, ] certifi = [ - {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, - {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, + {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, + {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, ] cffi = [ - {file = "cffi-1.14.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991"}, - {file = "cffi-1.14.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1"}, - {file = "cffi-1.14.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa"}, - {file = "cffi-1.14.5-cp27-cp27m-win32.whl", hash = "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3"}, - {file = "cffi-1.14.5-cp27-cp27m-win_amd64.whl", hash = "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5"}, - {file = "cffi-1.14.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482"}, - {file = "cffi-1.14.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6"}, - {file = "cffi-1.14.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045"}, - {file = "cffi-1.14.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa"}, - {file = "cffi-1.14.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406"}, - {file = "cffi-1.14.5-cp35-cp35m-win32.whl", hash = "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369"}, - {file = "cffi-1.14.5-cp35-cp35m-win_amd64.whl", hash = "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315"}, - {file = "cffi-1.14.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24ec4ff2c5c0c8f9c6b87d5bb53555bf267e1e6f70e52e5a9740d32861d36b6f"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c3f39fa737542161d8b0d680df2ec249334cd70a8f420f71c9304bd83c3cbed"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:681d07b0d1e3c462dd15585ef5e33cb021321588bebd910124ef4f4fb71aef55"}, - {file = "cffi-1.14.5-cp36-cp36m-win32.whl", hash = "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53"}, - {file = "cffi-1.14.5-cp36-cp36m-win_amd64.whl", hash = "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813"}, - {file = "cffi-1.14.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06d7cd1abac2ffd92e65c0609661866709b4b2d82dd15f611e602b9b188b0b69"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f861a89e0043afec2a51fd177a567005847973be86f709bbb044d7f42fc4e05"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc5a8e069b9ebfa22e26d0e6b97d6f9781302fe7f4f2b8776c3e1daea35f1adc"}, - {file = "cffi-1.14.5-cp37-cp37m-win32.whl", hash = "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62"}, - {file = "cffi-1.14.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4"}, - {file = "cffi-1.14.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04c468b622ed31d408fea2346bec5bbffba2cc44226302a0de1ade9f5ea3d373"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:06db6321b7a68b2bd6df96d08a5adadc1fa0e8f419226e25b2a5fbf6ccc7350f"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:293e7ea41280cb28c6fcaaa0b1aa1f533b8ce060b9e701d78511e1e6c4a1de76"}, - {file = "cffi-1.14.5-cp38-cp38-win32.whl", hash = "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e"}, - {file = "cffi-1.14.5-cp38-cp38-win_amd64.whl", hash = "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396"}, - {file = "cffi-1.14.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bf1ac1984eaa7675ca8d5745a8cb87ef7abecb5592178406e55858d411eadc0"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:df5052c5d867c1ea0b311fb7c3cd28b19df469c056f7fdcfe88c7473aa63e333"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24a570cd11895b60829e941f2613a4f79df1a27344cbbb82164ef2e0116f09c7"}, - {file = "cffi-1.14.5-cp39-cp39-win32.whl", hash = "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396"}, - {file = "cffi-1.14.5-cp39-cp39-win_amd64.whl", hash = "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d"}, - {file = "cffi-1.14.5.tar.gz", hash = "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c"}, -] -chardet = [ - {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, - {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, + {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, + {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, + {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, + {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, + {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, + {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, + {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, + {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, + {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, + {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, + {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, + {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, + {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, + {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, + {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, + {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, + {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, + {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, ] click = [ - {file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"}, - {file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"}, + {file = "click-8.0.4-py3-none-any.whl", hash = "sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1"}, + {file = "click-8.0.4.tar.gz", hash = "sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, @@ -1330,39 +1441,71 @@ coverage = [ {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, ] cryptography = [ - {file = "cryptography-3.4.7-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1"}, - {file = "cryptography-3.4.7-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250"}, - {file = "cryptography-3.4.7-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:37340614f8a5d2fb9aeea67fd159bfe4f5f4ed535b1090ce8ec428b2f15a11f2"}, - {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:240f5c21aef0b73f40bb9f78d2caff73186700bf1bc6b94285699aff98cc16c6"}, - {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_x86_64.whl", hash = "sha256:1e056c28420c072c5e3cb36e2b23ee55e260cb04eee08f702e0edfec3fb51959"}, - {file = "cryptography-3.4.7-cp36-abi3-win32.whl", hash = "sha256:0f1212a66329c80d68aeeb39b8a16d54ef57071bf22ff4e521657b27372e327d"}, - {file = "cryptography-3.4.7-cp36-abi3-win_amd64.whl", hash = "sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca"}, - {file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:26965837447f9c82f1855e0bc8bc4fb910240b6e0d16a664bb722df3b5b06873"}, - {file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2014_x86_64.whl", hash = "sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d"}, - {file = "cryptography-3.4.7-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b01fd6f2737816cb1e08ed4807ae194404790eac7ad030b34f2ce72b332f5586"}, - {file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177"}, - {file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2014_x86_64.whl", hash = "sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9"}, - {file = "cryptography-3.4.7-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:bf40af59ca2465b24e54f671b2de2c59257ddc4f7e5706dbd6930e26823668d3"}, - {file = "cryptography-3.4.7.tar.gz", hash = "sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713"}, + {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, + {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"}, + {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c4129fc3fdc0fa8e40861b5ac0c673315b3c902bbdc05fc176764815b43dd1d"}, + {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:695104a9223a7239d155d7627ad912953b540929ef97ae0c34c7b8bf30857e89"}, + {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"}, + {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"}, + {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, +] +debugpy = [ + {file = "debugpy-1.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:70b422c63a833630c33e3f9cdbd9b6971f8c5afd452697e464339a21bbe862ba"}, + {file = "debugpy-1.5.1-cp310-cp310-win32.whl", hash = "sha256:3a457ad9c0059a21a6c7d563c1f18e924f5cf90278c722bd50ede6f56b77c7fe"}, + {file = "debugpy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:5d76a4fd028d8009c3faf1185b4b78ceb2273dd2499447664b03939e0368bb90"}, + {file = "debugpy-1.5.1-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:16db27b4b91991442f91d73604d32080b30de655aca9ba821b1972ea8171021b"}, + {file = "debugpy-1.5.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2b073ad5e8d8c488fbb6a116986858bab0c9c4558f28deb8832c7a5a27405bd6"}, + {file = "debugpy-1.5.1-cp36-cp36m-win32.whl", hash = "sha256:318f81f37341e4e054b4267d39896b73cddb3612ca13b39d7eea45af65165e1d"}, + {file = "debugpy-1.5.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b5b3157372e0e0a1297a8b6b5280bcf1d35a40f436c7973771c972726d1e32d5"}, + {file = "debugpy-1.5.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:1ec3a086e14bba6c472632025b8fe5bdfbaef2afa1ebd5c6615ce6ed8d89bc67"}, + {file = "debugpy-1.5.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:26fbe53cca45a608679094791ce587b6e2798acd1d4777a8b303b07622e85182"}, + {file = "debugpy-1.5.1-cp37-cp37m-win32.whl", hash = "sha256:d876db8c312eeb02d85611e0f696abe66a2c1515e6405943609e725d5ff36f2a"}, + {file = "debugpy-1.5.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4404a62fb5332ea5c8c9132290eef50b3a0ba38cecacad5529e969a783bcbdd7"}, + {file = "debugpy-1.5.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f3a3dca9104aa14fd4210edcce6d9ce2b65bd9618c0b222135a40b9d6e2a9eeb"}, + {file = "debugpy-1.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2df2c373e85871086bd55271c929670cd4e1dba63e94a08d442db830646203b"}, + {file = "debugpy-1.5.1-cp38-cp38-win32.whl", hash = "sha256:82f5f9ce93af6861a0713f804e62ab390bb12a17f113153e47fea8bbb1dfbe36"}, + {file = "debugpy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:17a25ce9d7714f92fc97ef00cc06269d7c2b163094990ada30156ed31d9a5030"}, + {file = "debugpy-1.5.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:01e98c594b3e66d529e40edf314f849cd1a21f7a013298df58cd8e263bf8e184"}, + {file = "debugpy-1.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f73988422b17f071ad3c4383551ace1ba5ed810cbab5f9c362783d22d40a08dc"}, + {file = "debugpy-1.5.1-cp39-cp39-win32.whl", hash = "sha256:23df67fc56d59e386c342428a7953c2c06cc226d8525b11319153e96afb65b0c"}, + {file = "debugpy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:a2aa64f6d2ca7ded8a7e8a4e7cae3bc71866b09876b7b05cecad231779cb9156"}, + {file = "debugpy-1.5.1-py2.py3-none-any.whl", hash = "sha256:194f95dd3e84568b5489aab5689a3a2c044e8fdc06f1890b8b4f70b6b89f2778"}, + {file = "debugpy-1.5.1.zip", hash = "sha256:d2b09e91fbd1efa4f4fda121d49af89501beda50c18ed7499712c71a4bf3452e"}, ] decorator = [ - {file = "decorator-5.0.9-py3-none-any.whl", hash = "sha256:6e5c199c16f7a9f0e3a61a4a54b3d27e7dad0dbdde92b944426cb20914376323"}, - {file = "decorator-5.0.9.tar.gz", hash = "sha256:72ecfba4320a893c53f9706bebb2d55c270c1e51a28789361aa93e4a21319ed5"}, + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] defusedxml = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, ] entrypoints = [ - {file = "entrypoints-0.3-py2.py3-none-any.whl", hash = "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19"}, - {file = "entrypoints-0.3.tar.gz", hash = "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"}, + {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, + {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, ] envs = [ - {file = "envs-1.3-py2.py3-none-any.whl", hash = "sha256:cb771a231baafe920f2413c4e665c7394f475b5c4f1ef2388fba00e4c67817cc"}, - {file = "envs-1.3.tar.gz", hash = "sha256:ccf5cd85ddb8ed335e39ed8a22e0d23658f5a6d7da430f225e6f750c6f50ae42"}, + {file = "envs-1.4-py3-none-any.whl", hash = "sha256:4a1fcf85e4d4443e77c348ff7cdd3bfc4c0178b181d447057de342e4172e5ed1"}, + {file = "envs-1.4.tar.gz", hash = "sha256:9d8435c6985d1cdd68299e04c58e2bdb8ae6cf66b2596a8079e6f9a93f2a0398"}, +] +executing = [ + {file = "executing-0.8.3-py2.py3-none-any.whl", hash = "sha256:d1eef132db1b83649a3905ca6dd8897f71ac6f8cac79a7e58a1a09cf137546c9"}, + {file = "executing-0.8.3.tar.gz", hash = "sha256:c6554e21c6b060590a6d3be4b82fb78f8f0194d809de5ea7df1c093763311501"}, ] faunadb = [ - {file = "faunadb-4.1.0-py2.py3-none-any.whl", hash = "sha256:0a048838ef44f5cee653aadbc4302e48609d01dcaa795341c7ce049991a547f1"}, + {file = "faunadb-4.2.0-py2.py3-none-any.whl", hash = "sha256:73d5f560bddb7fc45f9201d526f97fcd0a7b0ef74ce3d4d46b8e116f4ce6e219"}, ] future = [ {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, @@ -1387,36 +1530,36 @@ hyperframe = [ {file = "hyperframe-3.2.0.tar.gz", hash = "sha256:05f0e063e117c16fcdd13c12c93a4424a2c40668abfac3bb419a10f57698204e"}, ] idna = [ - {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, - {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] -importlib-metadata = [ - {file = "importlib_metadata-4.0.1-py3-none-any.whl", hash = "sha256:d7eb1dea6d6a6086f8be21784cc9e3bcfa55872b52309bc5fad53a8ea444465d"}, - {file = "importlib_metadata-4.0.1.tar.gz", hash = "sha256:8c501196e49fb9df5df43833bdb1e4328f64847763ec8a50703148b73784d581"}, +importlib-resources = [ + {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, + {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"}, ] ipykernel = [ - {file = "ipykernel-5.5.3-py3-none-any.whl", hash = "sha256:21abd584543759e49010975a4621603b3cf871b1039cb3879a14094717692614"}, - {file = "ipykernel-5.5.3.tar.gz", hash = "sha256:a682e4f7affd86d9ce9b699d21bcab6d5ec9fbb2bfcb194f2706973b252bc509"}, + {file = "ipykernel-6.9.1-py3-none-any.whl", hash = "sha256:4fae9df6e192837552b2406a6052d707046dd2e153860be73c68484bacba18ed"}, + {file = "ipykernel-6.9.1.tar.gz", hash = "sha256:f95070a2dfd3147f8ab19f18ee46733310813758593745e07ec18fb08b409f1d"}, ] ipython = [ - {file = "ipython-7.22.0-py3-none-any.whl", hash = "sha256:c0ce02dfaa5f854809ab7413c601c4543846d9da81010258ecdab299b542d199"}, - {file = "ipython-7.22.0.tar.gz", hash = "sha256:9c900332d4c5a6de534b4befeeb7de44ad0cc42e8327fa41b7685abde58cec74"}, + {file = "ipython-8.1.1-py3-none-any.whl", hash = "sha256:6f56bfaeaa3247aa3b9cd3b8cbab3a9c0abf7428392f97b21902d12b2f42a381"}, + {file = "ipython-8.1.1.tar.gz", hash = "sha256:8138762243c9b3a3ffcf70b37151a2a35c23d3a29f9743878c33624f4207be3d"}, ] ipython-genutils = [ {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, ] ipywidgets = [ - {file = "ipywidgets-7.6.3-py2.py3-none-any.whl", hash = "sha256:e6513cfdaf5878de30f32d57f6dc2474da395a2a2991b94d487406c0ab7f55ca"}, - {file = "ipywidgets-7.6.3.tar.gz", hash = "sha256:9f1a43e620530f9e570e4a493677d25f08310118d315b00e25a18f12913c41f0"}, + {file = "ipywidgets-7.6.5-py2.py3-none-any.whl", hash = "sha256:d258f582f915c62ea91023299603be095de19afb5ee271698f88327b9fe9bf43"}, + {file = "ipywidgets-7.6.5.tar.gz", hash = "sha256:00974f7cb4d5f8d494c19810fedb9fa9b64bffd3cda7c2be23c133a1ad3c99c5"}, ] iso8601 = [ - {file = "iso8601-0.1.14-py2.py3-none-any.whl", hash = "sha256:e7e1122f064d626e17d47cd5106bed2c620cb38fe464999e0ddae2b6d2de6004"}, - {file = "iso8601-0.1.14.tar.gz", hash = "sha256:8aafd56fa0290496c5edbb13c311f78fa3a241f0853540da09d9363eae3ebd79"}, + {file = "iso8601-1.0.2-py3-none-any.whl", hash = "sha256:d7bc01b1c2a43b259570bb307f057abc578786ea734ba2b87b836c5efc5bd443"}, + {file = "iso8601-1.0.2.tar.gz", hash = "sha256:27f503220e6845d9db954fb212b95b0362d8b7e6c1b2326a87061c3de93594b1"}, ] jedi = [ - {file = "jedi-0.18.0-py2.py3-none-any.whl", hash = "sha256:18456d83f65f400ab0c2d3319e48520420ef43b23a086fdc05dff34132f0fb93"}, - {file = "jedi-0.18.0.tar.gz", hash = "sha256:92550a404bad8afed881a137ec9a461fed49eca661414be45059329614ed0707"}, + {file = "jedi-0.18.1-py2.py3-none-any.whl", hash = "sha256:637c9635fcf47945ceb91cd7f320234a7be540ded6f3e99a50cb6febdfd1ba8d"}, + {file = "jedi-0.18.1.tar.gz", hash = "sha256:74137626a64a99c8eb6ae5832d99b3bdd7d29a3850fe2aa80a4126b2a7d949ab"}, ] jinja2 = [ {file = "Jinja2-3.0.1-py3-none-any.whl", hash = "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4"}, @@ -1427,8 +1570,8 @@ jmespath = [ {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"}, ] jsonschema = [ - {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, - {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, + {file = "jsonschema-4.4.0-py3-none-any.whl", hash = "sha256:77281a1f71684953ee8b3d488371b162419767973789272434bbc3f29d9c8823"}, + {file = "jsonschema-4.4.0.tar.gz", hash = "sha256:636694eb41b3535ed608fe04129f26542b59ed99808b4f688aa32dcf55317a83"}, ] jupyter = [ {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, @@ -1436,133 +1579,109 @@ jupyter = [ {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, ] jupyter-client = [ - {file = "jupyter_client-6.2.0-py3-none-any.whl", hash = "sha256:9715152067e3f7ea3b56f341c9a0f9715c8c7cc316ee0eb13c3c84f5ca0065f5"}, - {file = "jupyter_client-6.2.0.tar.gz", hash = "sha256:e2ab61d79fbf8b56734a4c2499f19830fbd7f6fefb3e87868ef0545cb3c17eb9"}, + {file = "jupyter_client-7.1.2-py3-none-any.whl", hash = "sha256:d56f1c57bef42ff31e61b1185d3348a5b2bcde7c9a05523ae4dbe5ee0871797c"}, + {file = "jupyter_client-7.1.2.tar.gz", hash = "sha256:4ea61033726c8e579edb55626d8ee2e6bf0a83158ddf3751b8dd46b2c5cd1e96"}, ] jupyter-console = [ - {file = "jupyter_console-6.4.0-py3-none-any.whl", hash = "sha256:7799c4ea951e0e96ba8260575423cb323ea5a03fcf5503560fa3e15748869e27"}, - {file = "jupyter_console-6.4.0.tar.gz", hash = "sha256:242248e1685039cd8bff2c2ecb7ce6c1546eb50ee3b08519729e6e881aec19c7"}, + {file = "jupyter_console-6.4.3-py3-none-any.whl", hash = "sha256:e630bcb682c0088dda45688ad7c2424d4a825c8acf494cb036ced03ed0424841"}, + {file = "jupyter_console-6.4.3.tar.gz", hash = "sha256:55f32626b0be647a85e3217ddcdb22db69efc79e8b403b9771eb9ecc696019b5"}, ] jupyter-core = [ - {file = "jupyter_core-4.7.1-py3-none-any.whl", hash = "sha256:8c6c0cac5c1b563622ad49321d5ec47017bd18b94facb381c6973a0486395f8e"}, - {file = "jupyter_core-4.7.1.tar.gz", hash = "sha256:79025cb3225efcd36847d0840f3fc672c0abd7afd0de83ba8a1d3837619122b4"}, + {file = "jupyter_core-4.9.2-py3-none-any.whl", hash = "sha256:f875e4d27e202590311d468fa55f90c575f201490bd0c18acabe4e318db4a46d"}, + {file = "jupyter_core-4.9.2.tar.gz", hash = "sha256:d69baeb9ffb128b8cd2657fcf2703f89c769d1673c851812119e3a2a0e93ad9a"}, ] jupyterlab-pygments = [ {file = "jupyterlab_pygments-0.1.2-py2.py3-none-any.whl", hash = "sha256:abfb880fd1561987efaefcb2d2ac75145d2a5d0139b1876d5be806e32f630008"}, {file = "jupyterlab_pygments-0.1.2.tar.gz", hash = "sha256:cfcda0873626150932f438eccf0f8bf22bfa92345b814890ab360d666b254146"}, ] jupyterlab-widgets = [ - {file = "jupyterlab_widgets-1.0.0-py3-none-any.whl", hash = "sha256:caeaf3e6103180e654e7d8d2b81b7d645e59e432487c1d35a41d6d3ee56b3fef"}, - {file = "jupyterlab_widgets-1.0.0.tar.gz", hash = "sha256:5c1a29a84d3069208cb506b10609175b249b6486d6b1cbae8fcde2a11584fb78"}, + {file = "jupyterlab_widgets-1.0.2-py3-none-any.whl", hash = "sha256:f5d9efface8ec62941173ba1cffb2edd0ecddc801c11ae2931e30b50492eb8f7"}, + {file = "jupyterlab_widgets-1.0.2.tar.gz", hash = "sha256:7885092b2b96bf189c3a705cc3c412a4472ec5e8382d0b47219a66cccae73cfa"}, ] markupsafe = [ - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, - {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3028252424c72b2602a323f70fbf50aa80a5d3aa616ea6add4ba21ae9cc9da4c"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:290b02bab3c9e216da57c1d11d2ba73a9f73a614bbdcc027d299a60cdfabb11a"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e104c0c2b4cd765b4e83909cde7ec61a1e313f8a75775897db321450e928cce"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24c3be29abb6b34052fd26fc7a8e0a49b1ee9d282e3665e8ad09a0a68faee5b3"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204730fd5fe2fe3b1e9ccadb2bd18ba8712b111dcabce185af0b3b5285a7c989"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d3b64c65328cb4cd252c94f83e66e3d7acf8891e60ebf588d7b493a55a1dbf26"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:96de1932237abe0a13ba68b63e94113678c379dca45afa040a17b6e1ad7ed076"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75bb36f134883fdbe13d8e63b8675f5f12b80bb6627f7714c7d6c5becf22719f"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-win32.whl", hash = "sha256:4056f752015dfa9828dce3140dbadd543b555afb3252507348c493def166d454"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:d4e702eea4a2903441f2735799d217f4ac1b55f7d8ad96ab7d4e25417cb0827c"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f0eddfcabd6936558ec020130f932d479930581171368fd728efcfb6ef0dd357"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ddea4c352a488b5e1069069f2f501006b1a4362cb906bee9a193ef1245a7a61"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09c86c9643cceb1d87ca08cdc30160d1b7ab49a8a21564868921959bd16441b8"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0a0abef2ca47b33fb615b491ce31b055ef2430de52c5b3fb19a4042dbc5cadb"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:736895a020e31b428b3382a7887bfea96102c529530299f426bf2e636aacec9e"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:679cbb78914ab212c49c67ba2c7396dc599a8479de51b9a87b174700abd9ea49"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:84ad5e29bf8bab3ad70fd707d3c05524862bddc54dc040982b0dbcff36481de7"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-win32.whl", hash = "sha256:8da5924cb1f9064589767b0f3fc39d03e3d0fb5aa29e0cb21d43106519bd624a"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:454ffc1cbb75227d15667c09f164a0099159da0c1f3d2636aa648f12675491ad"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:142119fb14a1ef6d758912b25c4e803c3ff66920635c44078666fe7cc3f8f759"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b2a5a856019d2833c56a3dcac1b80fe795c95f401818ea963594b345929dffa7"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d1fb9b2eec3c9714dd936860850300b51dbaa37404209c8d4cb66547884b7ed"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62c0285e91414f5c8f621a17b69fc0088394ccdaa961ef469e833dbff64bd5ea"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc3150f85e2dbcf99e65238c842d1cfe69d3e7649b19864c1cc043213d9cd730"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f02cf7221d5cd915d7fa58ab64f7ee6dd0f6cddbb48683debf5d04ae9b1c2cc1"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5653619b3eb5cbd35bfba3c12d575db2a74d15e0e1c08bf1db788069d410ce8"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7d2f5d97fcbd004c03df8d8fe2b973fe2b14e7bfeb2cfa012eaa8759ce9a762f"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-win32.whl", hash = "sha256:3cace1837bc84e63b3fd2dfce37f08f8c18aeb81ef5cf6bb9b51f625cb4e6cd8"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:fabbe18087c3d33c5824cb145ffca52eccd053061df1d79d4b66dafa5ad2a5ea"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:023af8c54fe63530545f70dd2a2a7eed18d07a9a77b94e8bf1e2ff7f252db9a3"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d66624f04de4af8bbf1c7f21cc06649c1c69a7f84109179add573ce35e46d448"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c532d5ab79be0199fa2658e24a02fce8542df196e60665dd322409a03db6a52c"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ec74fada3841b8c5f4c4f197bea916025cb9aa3fe5abf7d52b655d042f956"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c653fde75a6e5eb814d2a0a89378f83d1d3f502ab710904ee585c38888816c"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:961eb86e5be7d0973789f30ebcf6caab60b844203f4396ece27310295a6082c7"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:598b65d74615c021423bd45c2bc5e9b59539c875a9bdb7e5f2a6b92dfcfc268d"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:599941da468f2cf22bf90a84f6e2a65524e87be2fce844f96f2dd9a6c9d1e635"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-win32.whl", hash = "sha256:e6f7f3f41faffaea6596da86ecc2389672fa949bd035251eab26dc6697451d05"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:b8811d48078d1cf2a6863dafb896e68406c5f513048451cd2ded0473133473c7"}, + {file = "MarkupSafe-2.1.0.tar.gz", hash = "sha256:80beaf63ddfbc64a0452b841d8036ca0611e049650e20afcb882f5d3c266d65f"}, +] +matplotlib-inline = [ + {file = "matplotlib-inline-0.1.3.tar.gz", hash = "sha256:a04bfba22e0d1395479f866853ec1ee28eea1485c1d69a6faf00dc3e24ff34ee"}, + {file = "matplotlib_inline-0.1.3-py3-none-any.whl", hash = "sha256:aed605ba3b72462d64d475a21a9296f400a19c4f74a31b59103d2a99ffd5aa5c"}, ] mistune = [ {file = "mistune-0.8.4-py2.py3-none-any.whl", hash = "sha256:88a1051873018da288eee8538d476dffe1262495144b33ecb586c4ab266bb8d4"}, {file = "mistune-0.8.4.tar.gz", hash = "sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e"}, ] nbclient = [ - {file = "nbclient-0.5.3-py3-none-any.whl", hash = "sha256:e79437364a2376892b3f46bedbf9b444e5396cfb1bc366a472c37b48e9551500"}, - {file = "nbclient-0.5.3.tar.gz", hash = "sha256:db17271330c68c8c88d46d72349e24c147bb6f34ec82d8481a8f025c4d26589c"}, + {file = "nbclient-0.5.13-py3-none-any.whl", hash = "sha256:47ac905af59379913c1f8f541098d2550153cf8dc58553cbe18c702b181518b0"}, + {file = "nbclient-0.5.13.tar.gz", hash = "sha256:40c52c9b5e3c31faecaee69f202b3f53e38d7c1c563de0fadde9d7eda0fdafe8"}, ] nbconvert = [ - {file = "nbconvert-6.0.7-py3-none-any.whl", hash = "sha256:39e9f977920b203baea0be67eea59f7b37a761caa542abe80f5897ce3cf6311d"}, - {file = "nbconvert-6.0.7.tar.gz", hash = "sha256:cbbc13a86dfbd4d1b5dee106539de0795b4db156c894c2c5dc382062bbc29002"}, + {file = "nbconvert-6.4.4-py3-none-any.whl", hash = "sha256:c0c13d11378e13f72b9cd509c008383dca4051c228e4985f75023b2a5d82fc9f"}, + {file = "nbconvert-6.4.4.tar.gz", hash = "sha256:ee0dfe34bbd1082ac9bfc750aae3c73fcbc34a70c5574c6986ff83c10a3541fd"}, ] nbformat = [ - {file = "nbformat-5.1.3-py3-none-any.whl", hash = "sha256:eb8447edd7127d043361bc17f2f5a807626bc8e878c7709a1c647abda28a9171"}, - {file = "nbformat-5.1.3.tar.gz", hash = "sha256:b516788ad70771c6250977c1374fcca6edebe6126fd2adb5a69aa5c2356fd1c8"}, + {file = "nbformat-5.2.0-py3-none-any.whl", hash = "sha256:3e30424e8291b2188347f5c3ba5273ed3766f12f8c5137c2e456a0815f36e785"}, + {file = "nbformat-5.2.0.tar.gz", hash = "sha256:93df0b9c67221d38fb970c48f6d361819a6c388299a0ef3171bbb912edfe1324"}, ] nest-asyncio = [ - {file = "nest_asyncio-1.5.1-py3-none-any.whl", hash = "sha256:76d6e972265063fe92a90b9cc4fb82616e07d586b346ed9d2c89a4187acea39c"}, - {file = "nest_asyncio-1.5.1.tar.gz", hash = "sha256:afc5a1c515210a23c461932765691ad39e8eba6551c055ac8d5546e69250d0aa"}, + {file = "nest_asyncio-1.5.4-py3-none-any.whl", hash = "sha256:3fdd0d6061a2bb16f21fe8a9c6a7945be83521d81a0d15cff52e9edee50101d6"}, + {file = "nest_asyncio-1.5.4.tar.gz", hash = "sha256:f969f6013a16fadb4adcf09d11a68a4f617c6049d7af7ac2c676110169a63abd"}, ] notebook = [ - {file = "notebook-6.3.0-py3-none-any.whl", hash = "sha256:cb271af1e8134e3d6fc6d458bdc79c40cbfc84c1eb036a493f216d58f0880e92"}, - {file = "notebook-6.3.0.tar.gz", hash = "sha256:cbc9398d6c81473e9cdb891d2cae9c0d3718fca289dda6d26df5cb660fcadc7d"}, + {file = "notebook-6.4.8-py3-none-any.whl", hash = "sha256:3e702fcc54b8ae597533c3864793b7a1e971dec9e112f67235828d8a798fd654"}, + {file = "notebook-6.4.8.tar.gz", hash = "sha256:1e985c9dc6f678bdfffb9dc657306b5469bfa62d73e03f74e8defbf76d284312"}, ] packaging = [ - {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, - {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] pandocfilters = [ - {file = "pandocfilters-1.4.3.tar.gz", hash = "sha256:bc63fbb50534b4b1f8ebe1860889289e8af94a23bff7445259592df25a3906eb"}, + {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, + {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, ] parso = [ - {file = "parso-0.8.2-py2.py3-none-any.whl", hash = "sha256:a8c4922db71e4fdb90e0d0bc6e50f9b273d3397925e5e60a717e719201778d22"}, - {file = "parso-0.8.2.tar.gz", hash = "sha256:12b83492c6239ce32ff5eed6d3639d6a536170723c6f3f1506869f1ace413398"}, + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, ] pdoc = [ - {file = "pdoc-7.2.0-py3-none-any.whl", hash = "sha256:ebb458952d74041ed0bc8e6d263d0eebcc75fd1e5f1579f964924146147a9868"}, + {file = "pdoc-7.4.0-py3-none-any.whl", hash = "sha256:681a2f243e4ca51bedd0645c2d18275b8b83444e9b6e42b502882ec45369e679"}, ] pexpect = [ {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, @@ -1577,179 +1696,228 @@ ply = [ {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, ] prometheus-client = [ - {file = "prometheus_client-0.10.1-py2.py3-none-any.whl", hash = "sha256:030e4f9df5f53db2292eec37c6255957eb76168c6f974e4176c711cf91ed34aa"}, - {file = "prometheus_client-0.10.1.tar.gz", hash = "sha256:b6c5a9643e3545bcbfd9451766cbaa5d9c67e7303c7bc32c750b6fa70ecb107d"}, + {file = "prometheus_client-0.13.1-py3-none-any.whl", hash = "sha256:357a447fd2359b0a1d2e9b311a0c5778c330cfbe186d880ad5a6b39884652316"}, + {file = "prometheus_client-0.13.1.tar.gz", hash = "sha256:ada41b891b79fca5638bd5cfe149efa86512eaa55987893becd2c6d8d0a5dfc5"}, ] prompt-toolkit = [ - {file = "prompt_toolkit-3.0.18-py3-none-any.whl", hash = "sha256:bf00f22079f5fadc949f42ae8ff7f05702826a97059ffcc6281036ad40ac6f04"}, - {file = "prompt_toolkit-3.0.18.tar.gz", hash = "sha256:e1b4f11b9336a28fa11810bc623c357420f69dfdb6d2dac41ca2c21a55c033bc"}, + {file = "prompt_toolkit-3.0.28-py3-none-any.whl", hash = "sha256:30129d870dcb0b3b6a53efdc9d0a83ea96162ffd28ffe077e94215b233dc670c"}, + {file = "prompt_toolkit-3.0.28.tar.gz", hash = "sha256:9f1cd16b1e86c2968f2519d7fb31dd9d669916f515612c269d14e9ed52b51650"}, ] ptyprocess = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, ] +pure-eval = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] py = [ - {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, - {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] pycparser = [ - {file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"}, - {file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"}, + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] pygments = [ - {file = "Pygments-2.8.1-py3-none-any.whl", hash = "sha256:534ef71d539ae97d4c3a4cf7d6f110f214b0e687e92f9cb9d2a3b0d3101289c8"}, - {file = "Pygments-2.8.1.tar.gz", hash = "sha256:2656e1a6edcdabf4275f9a3640db59fd5de107d88e8663c5d4e9a0fa62f77f94"}, + {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"}, + {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"}, ] pyjwt = [ - {file = "PyJWT-2.1.0-py3-none-any.whl", hash = "sha256:934d73fbba91b0483d3857d1aff50e96b2a892384ee2c17417ed3203f173fca1"}, - {file = "PyJWT-2.1.0.tar.gz", hash = "sha256:fba44e7898bbca160a2b2b501f492824fc8382485d3a6f11ba5d0c1937ce6130"}, + {file = "PyJWT-2.3.0-py3-none-any.whl", hash = "sha256:e0c4bb8d9f0af0c7f5b1ec4c5036309617d03d56932877f2f7a0beeb5318322f"}, + {file = "PyJWT-2.3.0.tar.gz", hash = "sha256:b888b4d56f06f6dcd777210c334e69c737be74755d3e5e9ee3fe67dc18a0ee41"}, ] pyparsing = [ - {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, - {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, + {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, + {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, ] pyrsistent = [ - {file = "pyrsistent-0.17.3.tar.gz", hash = "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e"}, + {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, + {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, + {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, + {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, ] python-dateutil = [ - {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, - {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] pytz = [ - {file = "pytz-2021.1-py2.py3-none-any.whl", hash = "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"}, - {file = "pytz-2021.1.tar.gz", hash = "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da"}, + {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, + {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, ] pywin32 = [ - {file = "pywin32-300-cp35-cp35m-win32.whl", hash = "sha256:1c204a81daed2089e55d11eefa4826c05e604d27fe2be40b6bf8db7b6a39da63"}, - {file = "pywin32-300-cp35-cp35m-win_amd64.whl", hash = "sha256:350c5644775736351b77ba68da09a39c760d75d2467ecec37bd3c36a94fbed64"}, - {file = "pywin32-300-cp36-cp36m-win32.whl", hash = "sha256:a3b4c48c852d4107e8a8ec980b76c94ce596ea66d60f7a697582ea9dce7e0db7"}, - {file = "pywin32-300-cp36-cp36m-win_amd64.whl", hash = "sha256:27a30b887afbf05a9cbb05e3ffd43104a9b71ce292f64a635389dbad0ed1cd85"}, - {file = "pywin32-300-cp37-cp37m-win32.whl", hash = "sha256:d7e8c7efc221f10d6400c19c32a031add1c4a58733298c09216f57b4fde110dc"}, - {file = "pywin32-300-cp37-cp37m-win_amd64.whl", hash = "sha256:8151e4d7a19262d6694162d6da85d99a16f8b908949797fd99c83a0bfaf5807d"}, - {file = "pywin32-300-cp38-cp38-win32.whl", hash = "sha256:fbb3b1b0fbd0b4fc2a3d1d81fe0783e30062c1abed1d17c32b7879d55858cfae"}, - {file = "pywin32-300-cp38-cp38-win_amd64.whl", hash = "sha256:60a8fa361091b2eea27f15718f8eb7f9297e8d51b54dbc4f55f3d238093d5190"}, - {file = "pywin32-300-cp39-cp39-win32.whl", hash = "sha256:638b68eea5cfc8def537e43e9554747f8dee786b090e47ead94bfdafdb0f2f50"}, - {file = "pywin32-300-cp39-cp39-win_amd64.whl", hash = "sha256:b1609ce9bd5c411b81f941b246d683d6508992093203d4eb7f278f4ed1085c3f"}, + {file = "pywin32-303-cp310-cp310-win32.whl", hash = "sha256:6fed4af057039f309263fd3285d7b8042d41507343cd5fa781d98fcc5b90e8bb"}, + {file = "pywin32-303-cp310-cp310-win_amd64.whl", hash = "sha256:51cb52c5ec6709f96c3f26e7795b0bf169ee0d8395b2c1d7eb2c029a5008ed51"}, + {file = "pywin32-303-cp311-cp311-win32.whl", hash = "sha256:d9b5d87ca944eb3aa4cd45516203ead4b37ab06b8b777c54aedc35975dec0dee"}, + {file = "pywin32-303-cp311-cp311-win_amd64.whl", hash = "sha256:fcf44032f5b14fcda86028cdf49b6ebdaea091230eb0a757282aa656e4732439"}, + {file = "pywin32-303-cp36-cp36m-win32.whl", hash = "sha256:aad484d52ec58008ca36bd4ad14a71d7dd0a99db1a4ca71072213f63bf49c7d9"}, + {file = "pywin32-303-cp36-cp36m-win_amd64.whl", hash = "sha256:2a09632916b6bb231ba49983fe989f2f625cea237219530e81a69239cd0c4559"}, + {file = "pywin32-303-cp37-cp37m-win32.whl", hash = "sha256:b1675d82bcf6dbc96363fca747bac8bff6f6e4a447a4287ac652aa4b9adc796e"}, + {file = "pywin32-303-cp37-cp37m-win_amd64.whl", hash = "sha256:c268040769b48a13367221fced6d4232ed52f044ffafeda247bd9d2c6bdc29ca"}, + {file = "pywin32-303-cp38-cp38-win32.whl", hash = "sha256:5f9ec054f5a46a0f4dfd72af2ce1372f3d5a6e4052af20b858aa7df2df7d355b"}, + {file = "pywin32-303-cp38-cp38-win_amd64.whl", hash = "sha256:793bf74fce164bcffd9d57bb13c2c15d56e43c9542a7b9687b4fccf8f8a41aba"}, + {file = "pywin32-303-cp39-cp39-win32.whl", hash = "sha256:7d3271c98434617a11921c5ccf74615794d97b079e22ed7773790822735cc352"}, + {file = "pywin32-303-cp39-cp39-win_amd64.whl", hash = "sha256:79cbb862c11b9af19bcb682891c1b91942ec2ff7de8151e2aea2e175899cda34"}, ] pywinpty = [ - {file = "pywinpty-0.5.7-cp27-cp27m-win32.whl", hash = "sha256:b358cb552c0f6baf790de375fab96524a0498c9df83489b8c23f7f08795e966b"}, - {file = "pywinpty-0.5.7-cp27-cp27m-win_amd64.whl", hash = "sha256:1e525a4de05e72016a7af27836d512db67d06a015aeaf2fa0180f8e6a039b3c2"}, - {file = "pywinpty-0.5.7-cp35-cp35m-win32.whl", hash = "sha256:2740eeeb59297593a0d3f762269b01d0285c1b829d6827445fcd348fb47f7e70"}, - {file = "pywinpty-0.5.7-cp35-cp35m-win_amd64.whl", hash = "sha256:33df97f79843b2b8b8bc5c7aaf54adec08cc1bae94ee99dfb1a93c7a67704d95"}, - {file = "pywinpty-0.5.7-cp36-cp36m-win32.whl", hash = "sha256:e854211df55d107f0edfda8a80b39dfc87015bef52a8fe6594eb379240d81df2"}, - {file = "pywinpty-0.5.7-cp36-cp36m-win_amd64.whl", hash = "sha256:dbd838de92de1d4ebf0dce9d4d5e4fc38d0b7b1de837947a18b57a882f219139"}, - {file = "pywinpty-0.5.7-cp37-cp37m-win32.whl", hash = "sha256:5fb2c6c6819491b216f78acc2c521b9df21e0f53b9a399d58a5c151a3c4e2a2d"}, - {file = "pywinpty-0.5.7-cp37-cp37m-win_amd64.whl", hash = "sha256:dd22c8efacf600730abe4a46c1388355ce0d4ab75dc79b15d23a7bd87bf05b48"}, - {file = "pywinpty-0.5.7-cp38-cp38-win_amd64.whl", hash = "sha256:8fc5019ff3efb4f13708bd3b5ad327589c1a554cb516d792527361525a7cb78c"}, - {file = "pywinpty-0.5.7.tar.gz", hash = "sha256:2d7e9c881638a72ffdca3f5417dd1563b60f603e1b43e5895674c2a1b01f95a0"}, + {file = "pywinpty-2.0.5-cp310-none-win_amd64.whl", hash = "sha256:f86c76e2881c37e69678cbbf178109f8da1fa8584db24d58e1b9369b0276cfcb"}, + {file = "pywinpty-2.0.5-cp37-none-win_amd64.whl", hash = "sha256:ff9b52f182650cfdf3db1b264a6fe0963eb9d996a7a1fa843ac406c1e32111f8"}, + {file = "pywinpty-2.0.5-cp38-none-win_amd64.whl", hash = "sha256:651ee1467bd7eb6f64d44dbc954b7ab7d15ab6d8adacc4e13299692c67c5d5d2"}, + {file = "pywinpty-2.0.5-cp39-none-win_amd64.whl", hash = "sha256:e59a508ae78374febada3e53b5bbc90b5ad07ae68cbfd72a2e965f9793ae04f3"}, + {file = "pywinpty-2.0.5.tar.gz", hash = "sha256:e125d3f1804d8804952b13e33604ad2ca8b9b2cac92b27b521c005d1604794f8"}, ] pyyaml = [ - {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, - {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, - {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"}, - {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, - {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, - {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, - {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347"}, - {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541"}, - {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, - {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, - {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, - {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, - {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa"}, - {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"}, - {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, - {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, - {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, - {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, - {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247"}, - {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc"}, - {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, - {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, - {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, - {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, - {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122"}, - {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6"}, - {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, - {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, - {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] pyzmq = [ - {file = "pyzmq-22.0.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c0cde362075ee8f3d2b0353b283e203c2200243b5a15d5c5c03b78112a17e7d4"}, - {file = "pyzmq-22.0.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:ff1ea14075bbddd6f29bf6beb8a46d0db779bcec6b9820909584081ec119f8fd"}, - {file = "pyzmq-22.0.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:26380487eae4034d6c2a3fb8d0f2dff6dd0d9dd711894e8d25aa2d1938950a33"}, - {file = "pyzmq-22.0.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:3e29f9cf85a40d521d048b55c63f59d6c772ac1c4bf51cdfc23b62a62e377c33"}, - {file = "pyzmq-22.0.3-cp36-cp36m-win32.whl", hash = "sha256:4f34a173f813b38b83f058e267e30465ed64b22cd0cf6bad21148d3fa718f9bb"}, - {file = "pyzmq-22.0.3-cp36-cp36m-win_amd64.whl", hash = "sha256:30df70f81fe210506aa354d7fd486a39b87d9f7f24c3d3f4f698ec5d96b8c084"}, - {file = "pyzmq-22.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7026f0353977431fc884abd4ac28268894bd1a780ba84bb266d470b0ec26d2ed"}, - {file = "pyzmq-22.0.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6d4163704201fff0f3ab0cd5d7a0ea1514ecfffd3926d62ec7e740a04d2012c7"}, - {file = "pyzmq-22.0.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:763c175294d861869f18eb42901d500eda7d3fa4565f160b3b2fd2678ea0ebab"}, - {file = "pyzmq-22.0.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:61e4bb6cd60caf1abcd796c3f48395e22c5b486eeca6f3a8797975c57d94b03e"}, - {file = "pyzmq-22.0.3-cp37-cp37m-win32.whl", hash = "sha256:b25e5d339550a850f7e919fe8cb4c8eabe4c917613db48dab3df19bfb9a28969"}, - {file = "pyzmq-22.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:3ef50d74469b03725d781a2a03c57537d86847ccde587130fe35caafea8f75c6"}, - {file = "pyzmq-22.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60e63577b85055e4cc43892fecd877b86695ee3ef12d5d10a3c5d6e77a7cc1a3"}, - {file = "pyzmq-22.0.3-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:f5831eff6b125992ec65d973f5151c48003b6754030094723ac4c6e80a97c8c4"}, - {file = "pyzmq-22.0.3-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:9221783dacb419604d5345d0e097bddef4459a9a95322de6c306bf1d9896559f"}, - {file = "pyzmq-22.0.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:b62ea18c0458a65ccd5be90f276f7a5a3f26a6dea0066d948ce2fa896051420f"}, - {file = "pyzmq-22.0.3-cp38-cp38-win32.whl", hash = "sha256:81e7df0da456206201e226491aa1fc449da85328bf33bbeec2c03bb3a9f18324"}, - {file = "pyzmq-22.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:f52070871a0fd90a99130babf21f8af192304ec1e995bec2a9533efc21ea4452"}, - {file = "pyzmq-22.0.3-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:c5e29fe4678f97ce429f076a2a049a3d0b2660ada8f2c621e5dc9939426056dd"}, - {file = "pyzmq-22.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d18ddc6741b51f3985978f2fda57ddcdae359662d7a6b395bc8ff2292fca14bd"}, - {file = "pyzmq-22.0.3-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4231943514812dfb74f44eadcf85e8dd8cf302b4d0bce450ce1357cac88dbfdc"}, - {file = "pyzmq-22.0.3-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:23a74de4b43c05c3044aeba0d1f3970def8f916151a712a3ac1e5cd9c0bc2902"}, - {file = "pyzmq-22.0.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:532af3e6dddea62d9c49062ece5add998c9823c2419da943cf95589f56737de0"}, - {file = "pyzmq-22.0.3-cp39-cp39-win32.whl", hash = "sha256:33acd2b9790818b9d00526135acf12790649d8d34b2b04d64558b469c9d86820"}, - {file = "pyzmq-22.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:a558c5bc89d56d7253187dccc4e81b5bb0eac5ae9511eb4951910a1245d04622"}, - {file = "pyzmq-22.0.3-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:581787c62eaa0e0db6c5413cedc393ebbadac6ddfd22e1cf9a60da23c4f1a4b2"}, - {file = "pyzmq-22.0.3-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:38e3dca75d81bec4f2defa14b0a65b74545812bb519a8e89c8df96bbf4639356"}, - {file = "pyzmq-22.0.3-pp36-pypy36_pp73-win32.whl", hash = "sha256:2f971431aaebe0a8b54ac018e041c2f0b949a43745444e4dadcc80d0f0ef8457"}, - {file = "pyzmq-22.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:da7d4d4c778c86b60949d17531e60c54ed3726878de8a7f8a6d6e7f8cc8c3205"}, - {file = "pyzmq-22.0.3-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:13465c1ff969cab328bc92f7015ce3843f6e35f8871ad79d236e4fbc85dbe4cb"}, - {file = "pyzmq-22.0.3-pp37-pypy37_pp73-win32.whl", hash = "sha256:279cc9b51db48bec2db146f38e336049ac5a59e5f12fb3a8ad864e238c1c62e3"}, - {file = "pyzmq-22.0.3.tar.gz", hash = "sha256:f7f63ce127980d40f3e6a5fdb87abf17ce1a7c2bd8bf2c7560e1bbce8ab1f92d"}, + {file = "pyzmq-22.3.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:6b217b8f9dfb6628f74b94bdaf9f7408708cb02167d644edca33f38746ca12dd"}, + {file = "pyzmq-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2841997a0d85b998cbafecb4183caf51fd19c4357075dfd33eb7efea57e4c149"}, + {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f89468059ebc519a7acde1ee50b779019535db8dcf9b8c162ef669257fef7a93"}, + {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea12133df25e3a6918718fbb9a510c6ee5d3fdd5a346320421aac3882f4feeea"}, + {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c532fd68b93998aab92356be280deec5de8f8fe59cd28763d2cc8a58747b7f"}, + {file = "pyzmq-22.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f907c7359ce8bf7f7e63c82f75ad0223384105f5126f313400b7e8004d9b33c3"}, + {file = "pyzmq-22.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:902319cfe23366595d3fa769b5b751e6ee6750a0a64c5d9f757d624b2ac3519e"}, + {file = "pyzmq-22.3.0-cp310-cp310-win32.whl", hash = "sha256:67db33bea0a29d03e6eeec55a8190e033318cee3cbc732ba8fd939617cbf762d"}, + {file = "pyzmq-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:7661fc1d5cb73481cf710a1418a4e1e301ed7d5d924f91c67ba84b2a1b89defd"}, + {file = "pyzmq-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79244b9e97948eaf38695f4b8e6fc63b14b78cc37f403c6642ba555517ac1268"}, + {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab888624ed68930442a3f3b0b921ad7439c51ba122dbc8c386e6487a658e4a4e"}, + {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18cd854b423fce44951c3a4d3e686bac8f1243d954f579e120a1714096637cc0"}, + {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:de8df0684398bd74ad160afdc2a118ca28384ac6f5e234eb0508858d8d2d9364"}, + {file = "pyzmq-22.3.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:62bcade20813796c426409a3e7423862d50ff0639f5a2a95be4b85b09a618666"}, + {file = "pyzmq-22.3.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ea5a79e808baef98c48c884effce05c31a0698c1057de8fc1c688891043c1ce1"}, + {file = "pyzmq-22.3.0-cp36-cp36m-win32.whl", hash = "sha256:3c1895c95be92600233e476fe283f042e71cf8f0b938aabf21b7aafa62a8dac9"}, + {file = "pyzmq-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:851977788b9caa8ed011f5f643d3ee8653af02c5fc723fa350db5125abf2be7b"}, + {file = "pyzmq-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b4ebed0977f92320f6686c96e9e8dd29eed199eb8d066936bac991afc37cbb70"}, + {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42abddebe2c6a35180ca549fadc7228d23c1e1f76167c5ebc8a936b5804ea2df"}, + {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1e41b32d6f7f9c26bc731a8b529ff592f31fc8b6ef2be9fa74abd05c8a342d7"}, + {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:be4e0f229cf3a71f9ecd633566bd6f80d9fa6afaaff5489492be63fe459ef98c"}, + {file = "pyzmq-22.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08c4e315a76ef26eb833511ebf3fa87d182152adf43dedee8d79f998a2162a0b"}, + {file = "pyzmq-22.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:badb868fff14cfd0e200eaa845887b1011146a7d26d579aaa7f966c203736b92"}, + {file = "pyzmq-22.3.0-cp37-cp37m-win32.whl", hash = "sha256:7c58f598d9fcc52772b89a92d72bf8829c12d09746a6d2c724c5b30076c1f11d"}, + {file = "pyzmq-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2b97502c16a5ec611cd52410bdfaab264997c627a46b0f98d3f666227fd1ea2d"}, + {file = "pyzmq-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d728b08448e5ac3e4d886b165385a262883c34b84a7fe1166277fe675e1c197a"}, + {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:480b9931bfb08bf8b094edd4836271d4d6b44150da051547d8c7113bf947a8b0"}, + {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7dc09198e4073e6015d9a8ea093fc348d4e59de49382476940c3dd9ae156fba8"}, + {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ca6cd58f62a2751728016d40082008d3b3412a7f28ddfb4a2f0d3c130f69e74"}, + {file = "pyzmq-22.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:468bd59a588e276961a918a3060948ae68f6ff5a7fa10bb2f9160c18fe341067"}, + {file = "pyzmq-22.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c88fa7410e9fc471e0858638f403739ee869924dd8e4ae26748496466e27ac59"}, + {file = "pyzmq-22.3.0-cp38-cp38-win32.whl", hash = "sha256:c0f84360dcca3481e8674393bdf931f9f10470988f87311b19d23cda869bb6b7"}, + {file = "pyzmq-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:f762442bab706fd874064ca218b33a1d8e40d4938e96c24dafd9b12e28017f45"}, + {file = "pyzmq-22.3.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:954e73c9cd4d6ae319f1c936ad159072b6d356a92dcbbabfd6e6204b9a79d356"}, + {file = "pyzmq-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f43b4a2e6218371dd4f41e547bd919ceeb6ebf4abf31a7a0669cd11cd91ea973"}, + {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:acebba1a23fb9d72b42471c3771b6f2f18dcd46df77482612054bd45c07dfa36"}, + {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cf98fd7a6c8aaa08dbc699ffae33fd71175696d78028281bc7b832b26f00ca57"}, + {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d072f7dfbdb184f0786d63bda26e8a0882041b1e393fbe98940395f7fab4c5e2"}, + {file = "pyzmq-22.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:53f4fd13976789ffafedd4d46f954c7bb01146121812b72b4ddca286034df966"}, + {file = "pyzmq-22.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1b5d457acbadcf8b27561deeaa386b0217f47626b29672fa7bd31deb6e91e1b"}, + {file = "pyzmq-22.3.0-cp39-cp39-win32.whl", hash = "sha256:e6a02cf7271ee94674a44f4e62aa061d2d049001c844657740e156596298b70b"}, + {file = "pyzmq-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d3dcb5548ead4f1123851a5ced467791f6986d68c656bc63bfff1bf9e36671e2"}, + {file = "pyzmq-22.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3a4c9886d61d386b2b493377d980f502186cd71d501fffdba52bd2a0880cef4f"}, + {file = "pyzmq-22.3.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:80e043a89c6cadefd3a0712f8a1322038e819ebe9dbac7eca3bce1721bcb63bf"}, + {file = "pyzmq-22.3.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1621e7a2af72cced1f6ec8ca8ca91d0f76ac236ab2e8828ac8fe909512d566cb"}, + {file = "pyzmq-22.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:d6157793719de168b199194f6b6173f0ccd3bf3499e6870fac17086072e39115"}, + {file = "pyzmq-22.3.0.tar.gz", hash = "sha256:8eddc033e716f8c91c6a2112f0a8ebc5e00532b4a6ae1eb0ccc48e027f9c671c"}, ] qtconsole = [ - {file = "qtconsole-5.0.3-py3-none-any.whl", hash = "sha256:4a38053993ca2da058f76f8d75b3d8906efbf9183de516f92f222ac8e37d9614"}, - {file = "qtconsole-5.0.3.tar.gz", hash = "sha256:c091a35607d2a2432e004c4a112d241ce908086570cf68594176dd52ccaa212d"}, + {file = "qtconsole-5.2.2-py3-none-any.whl", hash = "sha256:4aa6a3e600e0c8cf16853f2378311bc2371f57cb0f22ecfc28994f4cf409ee2e"}, + {file = "qtconsole-5.2.2.tar.gz", hash = "sha256:8f9db97b27782184efd0a0f2d57ea3bd852d053747a2e442a9011329c082976d"}, ] qtpy = [ - {file = "QtPy-1.9.0-py2.py3-none-any.whl", hash = "sha256:fa0b8363b363e89b2a6f49eddc162a04c0699ae95e109a6be3bb145a913190ea"}, - {file = "QtPy-1.9.0.tar.gz", hash = "sha256:2db72c44b55d0fe1407be8fba35c838ad0d6d3bb81f23007886dc1fc0f459c8d"}, + {file = "QtPy-2.0.1-py3-none-any.whl", hash = "sha256:d93f2c98e97387fcc9d623d509772af5b6c15ab9d8f9f4c5dfbad9a73ad34812"}, + {file = "QtPy-2.0.1.tar.gz", hash = "sha256:adfd073ffbd2de81dc7aaa0b983499ef5c59c96adcfdcc9dea60d42ca885eb8f"}, ] requests = [ - {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, - {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, + {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, + {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, ] s3transfer = [ - {file = "s3transfer-0.5.0-py3-none-any.whl", hash = "sha256:9c1dc369814391a6bda20ebbf4b70a0f34630592c9aa520856bf384916af2803"}, - {file = "s3transfer-0.5.0.tar.gz", hash = "sha256:50ed823e1dc5868ad40c8dc92072f757aa0e653a192845c94a3b676f4a62da4c"}, + {file = "s3transfer-0.5.2-py3-none-any.whl", hash = "sha256:7a6f4c4d1fdb9a2b640244008e142cbc2cd3ae34b386584ef044dd0f27101971"}, + {file = "s3transfer-0.5.2.tar.gz", hash = "sha256:95c58c194ce657a5f4fb0b9e60a84968c808888aed628cd98ab8771fe1db98ed"}, ] sammy = [ {file = "sammy-0.4.3-py2.py3-none-any.whl", hash = "sha256:f7fc12fae537ba22830a9de761686fac439e23c67339807f2f3d026d019bae9e"}, {file = "sammy-0.4.3.tar.gz", hash = "sha256:dff98017d59c8860349cb07bd8af2eb843ccfc7a78d11ed82599b306ef4ce7ca"}, ] send2trash = [ - {file = "Send2Trash-1.5.0-py3-none-any.whl", hash = "sha256:f1691922577b6fa12821234aeb57599d887c4900b9ca537948d2dac34aea888b"}, - {file = "Send2Trash-1.5.0.tar.gz", hash = "sha256:60001cc07d707fe247c94f74ca6ac0d3255aabcb930529690897ca2a39db28b2"}, + {file = "Send2Trash-1.8.0-py3-none-any.whl", hash = "sha256:f20eaadfdb517eaca5ce077640cb261c7d2698385a6a0f072a4a5447fd49fa08"}, + {file = "Send2Trash-1.8.0.tar.gz", hash = "sha256:d2c24762fd3759860a0aff155e45871447ea58d2be6bdd39b5c8f966a0c99c2d"}, ] six = [ - {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, - {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +soupsieve = [ + {file = "soupsieve-2.3.1-py3-none-any.whl", hash = "sha256:1a3cca2617c6b38c0343ed661b1fa5de5637f257d4fe22bd9f1338010a1efefb"}, + {file = "soupsieve-2.3.1.tar.gz", hash = "sha256:b8d49b1cd4f037c7082a9683dfa1801aa2597fb11c3a1155b7a5b94829b4f1f9"}, +] +stack-data = [ + {file = "stack_data-0.2.0-py3-none-any.whl", hash = "sha256:999762f9c3132308789affa03e9271bbbe947bf78311851f4d485d8402ed858e"}, + {file = "stack_data-0.2.0.tar.gz", hash = "sha256:45692d41bd633a9503a5195552df22b583caf16f0b27c4e58c98d88c8b648e12"}, ] stripe = [ - {file = "stripe-2.61.0-py2.py3-none-any.whl", hash = "sha256:5c6016362031a585d4f1138aff6e99c4c6d70b22b152b91fb6202d36bd8ac8c2"}, - {file = "stripe-2.61.0.tar.gz", hash = "sha256:8131addd3512a22c4c539dda2d869a8f488e06f1b02d1f3a5f0f4848fc56184e"}, + {file = "stripe-2.67.0-py2.py3-none-any.whl", hash = "sha256:7efaee187a0615ce9361b6009122f89488a874c3175b5262dd9741a49088db18"}, + {file = "stripe-2.67.0.tar.gz", hash = "sha256:718b93d6a18105243bce0c9b48e518f72f1f083d27f07e07b8bdb3a541fe3835"}, ] swaggyp = [ - {file = "swaggyp-0.1.0-py2.py3-none-any.whl", hash = "sha256:1411a7688d613513874febbd754504e0a45e48c1a0985b34b0653d285dbbc43d"}, + {file = "swaggyp-0.2.0-py3-none-any.whl", hash = "sha256:030dca0f0a24469abcf7e1f047eaa01c84c206a8d891cfb1a4a24f40e2a6a146"}, + {file = "swaggyp-0.2.0.tar.gz", hash = "sha256:b39ab7f8a49fdb17af7862c5ac81b872a5f7f95c3b0989a6a285fde3f4ee4885"}, ] terminado = [ - {file = "terminado-0.9.4-py3-none-any.whl", hash = "sha256:daed77f9fad7b32558fa84b226a76f45a02242c20813502f36c4e1ade6d8f1ad"}, - {file = "terminado-0.9.4.tar.gz", hash = "sha256:9a7dbcfbc2778830eeb70261bf7aa9d98a3eac8631a3afe3febeb57c12f798be"}, + {file = "terminado-0.13.3-py3-none-any.whl", hash = "sha256:874d4ea3183536c1782d13c7c91342ef0cf4e5ee1d53633029cbc972c8760bd8"}, + {file = "terminado-0.13.3.tar.gz", hash = "sha256:94d1cfab63525993f7d5c9b469a50a18d0cdf39435b59785715539dd41e36c0d"}, ] testpath = [ - {file = "testpath-0.4.4-py2.py3-none-any.whl", hash = "sha256:bfcf9411ef4bf3db7579063e0546938b1edda3d69f4e1fb8756991f5951f85d4"}, - {file = "testpath-0.4.4.tar.gz", hash = "sha256:60e0a3261c149755f4399a1fff7d37523179a70fdc3abdf78de9fc2604aeec7e"}, + {file = "testpath-0.6.0-py3-none-any.whl", hash = "sha256:8ada9f80a2ac6fb0391aa7cdb1a7d11cfa8429f693eda83f74dde570fe6fa639"}, + {file = "testpath-0.6.0.tar.gz", hash = "sha256:2f1b97e6442c02681ebe01bd84f531028a7caea1af3825000f52345c30285e0f"}, ] tornado = [ {file = "tornado-6.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:d371e811d6b156d82aa5f9a4e08b58debf97c302a35714f6f45e35139c332e32"}, @@ -1795,17 +1963,12 @@ tornado = [ {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, ] traitlets = [ - {file = "traitlets-5.0.5-py3-none-any.whl", hash = "sha256:69ff3f9d5351f31a7ad80443c2674b7099df13cc41fc5fa6e2f6d3b0330b0426"}, - {file = "traitlets-5.0.5.tar.gz", hash = "sha256:178f4ce988f69189f7e523337a3e11d91c786ded9360174a3d9ca83e79bc5396"}, -] -typing-extensions = [ - {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"}, - {file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"}, - {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, + {file = "traitlets-5.1.1-py3-none-any.whl", hash = "sha256:2d313cc50a42cd6c277e7d7dc8d4d7fedd06a2c215f78766ae7b1a66277e0033"}, + {file = "traitlets-5.1.1.tar.gz", hash = "sha256:059f456c5a7c1c82b98c2e8c799f39c9b8128f6d0d46941ee118daace9eb70c7"}, ] urllib3 = [ - {file = "urllib3-1.26.4-py2.py3-none-any.whl", hash = "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df"}, - {file = "urllib3-1.26.4.tar.gz", hash = "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937"}, + {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"}, + {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"}, ] valley = [ {file = "valley-1.5.6-py3-none-any.whl", hash = "sha256:fa2e5fc51d59901e5eb178116a4fb15b712928b4c87809f59cdf02a934d63cf6"}, @@ -1820,14 +1983,14 @@ webencodings = [ {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, ] werkzeug = [ - {file = "Werkzeug-2.0.1-py3-none-any.whl", hash = "sha256:6c1ec500dcdba0baa27600f6a22f6333d8b662d22027ff9f6202e3367413caa8"}, - {file = "Werkzeug-2.0.1.tar.gz", hash = "sha256:1de1db30d010ff1af14a009224ec49ab2329ad2cde454c8a708130642d579c42"}, + {file = "Werkzeug-2.0.3-py3-none-any.whl", hash = "sha256:1421ebfc7648a39a5c58c601b154165d05cf47a3cd0ccb70857cbdacf6c8f2b8"}, + {file = "Werkzeug-2.0.3.tar.gz", hash = "sha256:b863f8ff057c522164b6067c9e28b041161b4be5ba4d0daceeaa50a163822d3c"}, ] widgetsnbextension = [ - {file = "widgetsnbextension-3.5.1-py2.py3-none-any.whl", hash = "sha256:bd314f8ceb488571a5ffea6cc5b9fc6cba0adaf88a9d2386b93a489751938bcd"}, - {file = "widgetsnbextension-3.5.1.tar.gz", hash = "sha256:079f87d87270bce047512400efd70238820751a11d2d8cb137a5a5bdbaf255c7"}, + {file = "widgetsnbextension-3.5.2-py2.py3-none-any.whl", hash = "sha256:763a9fdc836d141fa080005a886d63f66f73d56dba1fb5961afc239c77708569"}, + {file = "widgetsnbextension-3.5.2.tar.gz", hash = "sha256:e0731a60ba540cd19bbbefe771a9076dcd2dde90713a8f87f27f53f2d1db7727"}, ] zipp = [ - {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, - {file = "zipp-3.4.1.tar.gz", hash = "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76"}, + {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, + {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, ] diff --git a/pyproject.toml b/pyproject.toml index 87a92fc..90b920c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ authors = ["Brian Jinwright"] license = "Apache-2.0" [tool.poetry.dependencies] -python = "^3.7" +python = "^3.8" faunadb = "^4.0.1" valley = "^1.5.6" jinja2 = "3.0.1" @@ -25,7 +25,7 @@ graphql-py = "^0.8.1" sammy = "^0.4.3" stripe = "^2.61.0" bleach = "^4.1.0" -swaggyp = {path = ".ignore/dist/swaggyp-0.1.0-py2.py3-none-any.whl"} +swaggyp = "^0.2.0" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From f2fe931b313fe1e37d1fc40b584c1ee41bc88117 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 28 Mar 2022 09:42:03 +0800 Subject: [PATCH 024/214] removed comment blocks in unit tests --- pfunk/tests/test_project.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index dfe0290..d96985f 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -8,24 +8,24 @@ class ProjectTestCase(unittest.TestCase): def setUp(self) -> None: self.project = Project() - # def test_add_resource(self): - # self.project.add_resource(Person) - # self.project.add_resource(Person) - # # Test that no duplicates are there - # self.assertEqual(self.project.collections, set([Person])) + def test_add_resource(self): + self.project.add_resource(Person) + self.project.add_resource(Person) + # Test that no duplicates are there + self.assertEqual(self.project.collections, set([Person])) - # def test_add_resources(self): - # self.project.add_resources([Person, Sport]) - # self.assertEqual(self.project.collections, set([Person, Sport])) + def test_add_resources(self): + self.project.add_resources([Person, Sport]) + self.assertEqual(self.project.collections, set([Person, Sport])) - # def test_render(self): - # self.project.add_resources([Person, Sport]) - # gql = self.project.render() - # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - # self.assertTrue('enum gender_pronouns' in gql) - # self.assertTrue('type Person' in gql) - # self.assertTrue('type Sport' in gql) - # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + def test_render(self): + self.project.add_resources([Person, Sport]) + gql = self.project.render() + self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + self.assertTrue('enum gender_pronouns' in gql) + self.assertTrue('type Person' in gql) + self.assertTrue('type Sport' in gql) + self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): self.project.add_resources([Person, Sport, Group, User]) From f88dd7c1964cb57a51e9553835c163e36ef5f9ff Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 29 Mar 2022 11:40:59 +0800 Subject: [PATCH 025/214] Added more docstrings for swagger generation future --- pfunk/utils/swagger.py | 43 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 2eb01f5..9711904 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -53,6 +53,9 @@ def __init__(self, collections, rules=[]): Description (str): Docstring of the view Path: Endpoint (str): Path of the function. You can see it in `url` method of a view. + Model: + Name (str): The class name of the `collection` + Properties (str): The fields of the collection and their type Returns: Generated YAML file @@ -75,6 +78,11 @@ def _convert_url_to_swagger(self, replacement: str, to_replace: str) -> str: return re.sub('<\w+:\w+>', f'{{{replacement}}}', to_replace) def write_to_yaml(self): + """ Using the class' variables, write it to a swagger (yaml) file + + It will create `swagger.yaml` file in current directory, if + there is already one, it will print the yaml file instead. + """ if not os.path.exists(f'pfunk.json'): raise Exception('Missing JSON Config file.') else: @@ -108,6 +116,22 @@ def write_to_yaml(self): return t.to_yaml() def get_operations(self, col: Collection): + """ Acquires all of the endpoint in the collections and make it + as an `operation` for swagger doc + + Appends all of the acquired paths here in `self.paths` + array class variable + + Args: + col (`pfunk.collection.Collection`, required): + The collection that has views + + Returns: + paths ([`swaggyp.Path`], required): + An array of `Path` that can be consumed using + `swaggyp.SwaggerTemplate` to show + available paths + """ for view in col.collection_views: route = view.url(col) rule = route.rule @@ -171,6 +195,24 @@ def get_operations(self, col: Collection): return self.paths def get_model_definitions(self, col: Collection): + """ Acquires collection's name, fields, and relationships to + convert it to a swagger `Definition` + + Converts `ReferenceField` and `ManyToManyField` to + reference other definitions as a characterization + of relationships defined on models + + Args: + col (`pfunk.collection.Collection`, required): + The collection that has views + + Returns: + definitions ([`swaggyp.Definition`], required): + An array of `Definition` that can be consumed using + `swaggyp.SwaggerTemplate` to show + available models + + """ # Define model definitions by iterating through collection's fields for its properties col_properties = {} for property, field_type in col._base_properties.items(): @@ -192,6 +234,7 @@ def get_model_definitions(self, col: Collection): return self.definitions def generate_swagger(self): + """ One-function-to-call needed function to generate a swagger documentation """ for i in self.collections: col = i() self.get_operations(col) From 3f7c64b322d32b4e5fa54ee9dcc5f41ccdc394ad Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 5 Apr 2022 15:08:35 +0800 Subject: [PATCH 026/214] Added aws.py as a skeleton for upcoming AWS features --- pfunk/utils/aws.py | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 pfunk/utils/aws.py diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py new file mode 100644 index 0000000..7413120 --- /dev/null +++ b/pfunk/utils/aws.py @@ -0,0 +1,36 @@ +import boto3 +import swaggyp as sw + +class ApiGateway(object): + + def __init__(self): + self.client = boto3.client('apigateway') + pass + + def create_api_from_yaml(self, yaml_file): + # response = client.import_rest_api( + # failOnWarnings=True|False, + # parameters={ + # 'string': 'string' + # }, + # body=b'bytes'|file + # ) + pass + + def update_api_from_yaml(self, yaml_file): + # response = client.put_rest_api( + # restApiId='string', + # mode='merge'|'overwrite', + # failOnWarnings=True|False, + # parameters={ + # 'string': 'string' + # }, + # body=b'bytes'|file + # ) + pass + + def validate_yaml(self, yaml_file): + """ Validate YAML file if it is valid for using """ + pass + + def \ No newline at end of file From 25e01580a56a3f39548696ef1a5424bd6089f63b Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 5 Apr 2022 15:11:41 +0800 Subject: [PATCH 027/214] Added skeleton tests for aws features --- pfunk/tests/test_aws.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 pfunk/tests/test_aws.py diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py new file mode 100644 index 0000000..c3cdc45 --- /dev/null +++ b/pfunk/tests/test_aws.py @@ -0,0 +1,20 @@ +import unittest + +from pfunk.utils.aws import ApiGateway +from pfunk.tests import User, Group +from pfunk.project import Project + + +class ApiGatewayTests(unittest.TestCase): + + def setUp(self) -> None: + self.project = Project() + + def test_validate_yaml(self): + pass + + def test_create_api_from_yaml(self): + pass + + def test_update_api_from_yaml(self): + pass From 5624739a81276becbefb960a97e9627efbf5449a Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 7 Apr 2022 15:29:22 +0800 Subject: [PATCH 028/214] Added yaml validator and have it return specific errors --- pfunk/tests/test_aws.py | 34 ++++++++++++++++++++++----- pfunk/utils/aws.py | 21 ++++++++++++----- pfunk/utils/swagger.py | 1 + poetry.lock | 52 ++++++++++++++++++++++++++++++++++++----- pyproject.toml | 1 + 5 files changed, 91 insertions(+), 18 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index c3cdc45..a6b1314 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -1,20 +1,42 @@ import unittest +from unittest import mock from pfunk.utils.aws import ApiGateway -from pfunk.tests import User, Group +from pfunk.tests import User, Group, Person, Sport from pfunk.project import Project class ApiGatewayTests(unittest.TestCase): - def setUp(self) -> None: - self.project = Project() + @classmethod + def setUpCls(cls) -> None: + cls.project = Project() + cls.aws_client = ApiGateway() + cls.project.add_resources([Person, Sport, Group, User]) + cls.api_yaml = cls.project.generate_swagger() def test_validate_yaml(self): - pass + result = self.aws_client.validate_yaml(self.api_yaml) + self.assertIsNone(result) # if there are no errors, then spec is valid + def test_validate_wrong_yaml(self): + result = self.aws_client.validate_yaml('wrong yaml...33::39') + self.assertIsNotNone(result) # if there are returned objs, there is an error + + @mock.patch('boto3.client') def test_create_api_from_yaml(self): - pass + result = self.aws_client.create_api_from_yaml() + self.assertTrue(result['success']) + @mock.patch('boto3.client') def test_update_api_from_yaml(self): - pass + result = self.aws_client.create_api_from_yaml() + self.assertTrue(result['success']) + + def test_create_api_from_wrong_yaml(self): + result = self.aws_client.create_api_from_yaml('wrong yaml...21320:: asdkas') + self.assertEqual(result, 'Bad Request. YAML is not valid.') + + def test_update_api_from_wrong_yaml(self): + result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') + self.assertEqual(result, 'Bad Request. YAML is not valid.') \ No newline at end of file diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 7413120..4c61506 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,5 +1,9 @@ import boto3 import swaggyp as sw +from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator +from openapi_spec_validator.readers import read_from_filename +from openapi_spec_validator.exceptions import OpenAPIValidationError + class ApiGateway(object): @@ -7,6 +11,17 @@ def __init__(self): self.client = boto3.client('apigateway') pass + def validate_yaml(self, yaml_file): + """ Validate YAML file if it is valid for using OpenAPI Spec v2""" + try: + spec_dict, spec_url = read_from_filename(yaml_file) + validate_v2_spec(spec_dict) + except OpenAPIValidationError as err: + errors = [{err.message: err.json_path} + for err in openapi_v2_spec_validator.iter_errors(spec_dict)] + return errors + return None + def create_api_from_yaml(self, yaml_file): # response = client.import_rest_api( # failOnWarnings=True|False, @@ -28,9 +43,3 @@ def update_api_from_yaml(self, yaml_file): # body=b'bytes'|file # ) pass - - def validate_yaml(self, yaml_file): - """ Validate YAML file if it is valid for using """ - pass - - def \ No newline at end of file diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 9711904..175d0ea 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -110,6 +110,7 @@ def write_to_yaml(self): if not os.path.exists(f'swagger.yaml'): with open(f'swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) + return t.to_yaml() else: print('There is an existing swagger file. Kindly move/delete it to generate a new one. Printing instead...') print(t.to_yaml()) diff --git a/poetry.lock b/poetry.lock index 67a8d8a..83b3b94 100644 --- a/poetry.lock +++ b/poetry.lock @@ -66,7 +66,7 @@ six = ">=1.6.1,<2.0" name = "attrs" version = "21.4.0" description = "Classes Without Boilerplate" -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" @@ -374,7 +374,7 @@ python-versions = ">=3.5" name = "importlib-resources" version = "5.6.0" description = "Read resources from Python packages" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -519,7 +519,7 @@ python-versions = ">=3.7" name = "jsonschema" version = "4.4.0" description = "An implementation of JSON Schema validation for Python" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -750,6 +750,38 @@ docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "m json-logging = ["json-logging"] test = ["pytest", "coverage", "requests", "nbval", "selenium", "pytest-cov", "requests-unixsocket"] +[[package]] +name = "openapi-schema-validator" +version = "0.2.3" +description = "OpenAPI schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.0.0,<5.0.0" + +[package.extras] +rfc3339-validator = ["rfc3339-validator"] +strict-rfc3339 = ["strict-rfc3339"] +isodate = ["isodate"] + +[[package]] +name = "openapi-spec-validator" +version = "0.4.0" +description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3.0 spec validator" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.2.0,<5.0.0" +openapi-schema-validator = ">=0.2.0,<0.3.0" +PyYAML = ">=5.1" + +[package.extras] +requests = ["requests"] + [[package]] name = "packaging" version = "21.3" @@ -930,7 +962,7 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "pyrsistent" version = "0.18.1" description = "Persistent/Functional/Immutable data structures" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -1250,7 +1282,7 @@ notebook = ">=4.4.1" name = "zipp" version = "3.7.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -1261,7 +1293,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "45c80cdba27ede0a7a28b611355294d4166ccfd7d4209b7fb6b75582d854b5a7" +content-hash = "6b06fa36eba0e2e7e8d19a1efecff127af145281ccb3d10c02e1baebb062d147" [metadata.files] appnope = [ @@ -1678,6 +1710,14 @@ notebook = [ {file = "notebook-6.4.10-py3-none-any.whl", hash = "sha256:49cead814bff0945fcb2ee07579259418672ac175d3dc3d8102a4b0a656ed4df"}, {file = "notebook-6.4.10.tar.gz", hash = "sha256:2408a76bc6289283a8eecfca67e298ec83c67db51a4c2e1b713dd180bb39e90e"}, ] +openapi-schema-validator = [ + {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, + {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, +] +openapi-spec-validator = [ + {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, + {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, +] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, diff --git a/pyproject.toml b/pyproject.toml index 25bd35e..46062d0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,6 +26,7 @@ sammy = "^0.4.3" stripe = "^2.61.0" bleach = "^4.1.0" swaggyp = "^0.2.0" +openapi-spec-validator = "^0.4.0" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From ec8afcb03258cfb91196a9a54b305702183c5b09 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 11 Apr 2022 15:54:06 +0800 Subject: [PATCH 029/214] Made yaml validation to properly separate openapi errors and python errors. Refactored swagger.py to return the swagger file directory --- pfunk/tests/test_aws.py | 46 +++++++++++++++---------- pfunk/utils/aws.py | 74 ++++++++++++++++++++++++++++++++++------- pfunk/utils/swagger.py | 36 ++++++++++++++------ 3 files changed, 117 insertions(+), 39 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index a6b1314..6ec5841 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -1,4 +1,6 @@ +import os import unittest +import tempfile from unittest import mock from pfunk.utils.aws import ApiGateway @@ -9,34 +11,44 @@ class ApiGatewayTests(unittest.TestCase): @classmethod - def setUpCls(cls) -> None: + def setUpClass(cls) -> None: cls.project = Project() cls.aws_client = ApiGateway() cls.project.add_resources([Person, Sport, Group, User]) - cls.api_yaml = cls.project.generate_swagger() + + swagger = cls.project.generate_swagger() + cls.swagger_dir = swagger['dir'] + cls.swagger_file = swagger['swagger_file'] + print(cls.swagger_dir) def test_validate_yaml(self): - result = self.aws_client.validate_yaml(self.api_yaml) + result = self.aws_client.validate_yaml(self.swagger_dir) self.assertIsNone(result) # if there are no errors, then spec is valid def test_validate_wrong_yaml(self): result = self.aws_client.validate_yaml('wrong yaml...33::39') - self.assertIsNotNone(result) # if there are returned objs, there is an error + # if there are returned objs, there is an error + self.assertIsNotNone(result) @mock.patch('boto3.client') - def test_create_api_from_yaml(self): - result = self.aws_client.create_api_from_yaml() + def test_create_api_from_yaml(self, mocked): + result = self.aws_client.create_api_from_yaml( + yaml_file=self.swagger_dir) self.assertTrue(result['success']) @mock.patch('boto3.client') - def test_update_api_from_yaml(self): - result = self.aws_client.create_api_from_yaml() - self.assertTrue(result['success']) - - def test_create_api_from_wrong_yaml(self): - result = self.aws_client.create_api_from_yaml('wrong yaml...21320:: asdkas') - self.assertEqual(result, 'Bad Request. YAML is not valid.') - - def test_update_api_from_wrong_yaml(self): - result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') - self.assertEqual(result, 'Bad Request. YAML is not valid.') \ No newline at end of file + def test_create_api_from_wrong_yaml(self, mocked): + with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: + tmp.seek(0) + tmp.write('test wrong yaml') + result = self.aws_client.create_api_from_yaml(tmp.name) + self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') + + # @mock.patch('boto3.client') + # def test_update_api_from_yaml(self): + # result = self.aws_client.update_api_from_yaml(yaml_file=self.api_yaml) + # self.assertTrue(result['success']) + + # def test_update_api_from_wrong_yaml(self): + # result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') + # self.assertEqual(result, 'Bad Request. YAML is not valid.') diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 4c61506..b1c26c0 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,36 +1,86 @@ import boto3 import swaggyp as sw +# from botocore.exceptions import BadReq +from envs import env from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator from openapi_spec_validator.readers import read_from_filename from openapi_spec_validator.exceptions import OpenAPIValidationError class ApiGateway(object): + region_name = env('SES_REGION_NAME', 'us-east-1') def __init__(self): - self.client = boto3.client('apigateway') - pass + self.client = boto3.client('apigateway', region_name=self.region_name) def validate_yaml(self, yaml_file): """ Validate YAML file if it is valid for using OpenAPI Spec v2""" try: spec_dict, spec_url = read_from_filename(yaml_file) validate_v2_spec(spec_dict) + except (OSError, AttributeError) as err: + return {'errors': str(err)} except OpenAPIValidationError as err: + return self._iterate_validator_errors(spec_dict) + return None + + def _iterate_validator_errors(self, spec_dict): + """ Iterates through list of errors that the `openapi_spec_validator` returned + + This method was implemented due to `openapi_spec_validator` design + that if an error happened while iterating through the YAML file + it returns a Python error. + + Args: + spec_dict (dict, required): + `spec_dict` generated from `openapi_spec_validator.readers.read_from_filename` + Returns: + list of errors + """ + try: errors = [{err.message: err.json_path} for err in openapi_v2_spec_validator.iter_errors(spec_dict)] return errors - return None + except (OSError, AttributeError) as err: + return str(err) - def create_api_from_yaml(self, yaml_file): - # response = client.import_rest_api( - # failOnWarnings=True|False, - # parameters={ - # 'string': 'string' - # }, - # body=b'bytes'|file - # ) - pass + def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): + """ Creates an API for AWS API Gateway from a YAML swagger file + + Args: + yaml_file (yaml file, required): + The OpenAPI swagger file to create API from + fail_on_warnings (bool, optional): + Specifies if the method will error on warnings. Default: `True` + """ + _yaml_valid = self.validate_yaml(yaml_file) + if _yaml_valid: + return { + "error": 'Bad Request. YAML is not valid.', + "yaml_err": _yaml_valid + } + + try: + if not type(yaml_file) == 'string': + with open(yaml_file, 'r') as file: + response = self.client.import_rest_api( + failOnWarnings=fail_on_warnings, + body=file + ) + else: + response = self.client.import_rest_api( + failOnWarnings=fail_on_warnings, + body=yaml_file + ) + + if response: + return { + 'success': True, + response: response + } + # TODO: Specify boto exceptions + except Exception as err: + return err def update_api_from_yaml(self, yaml_file): # response = client.put_rest_api( diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 175d0ea..086ade7 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -77,11 +77,20 @@ def __init__(self, collections, rules=[]): def _convert_url_to_swagger(self, replacement: str, to_replace: str) -> str: return re.sub('<\w+:\w+>', f'{{{replacement}}}', to_replace) - def write_to_yaml(self): + def write_to_yaml(self, dir=''): """ Using the class' variables, write it to a swagger (yaml) file It will create `swagger.yaml` file in current directory, if there is already one, it will print the yaml file instead. + + Args: + dir (str, optional): + custom directory of the swagger file. If there are no provided, create one in current dir. + Returns: + dir (str, required): + directory of the created swagger file + swagger_file (str, required): + the contents of the swagger yaml file """ if not os.path.exists(f'pfunk.json'): raise Exception('Missing JSON Config file.') @@ -107,14 +116,16 @@ def write_to_yaml(self): schemes=schemes, definitions=self.definitions) - if not os.path.exists(f'swagger.yaml'): - with open(f'swagger.yaml', 'x') as swag_doc: + if not os.path.exists(f'{dir}/swagger.yaml'): + with open(f'{dir}/swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) - return t.to_yaml() else: - print('There is an existing swagger file. Kindly move/delete it to generate a new one. Printing instead...') - print(t.to_yaml()) - return t.to_yaml() + print('There is an existing swagger file. Kindly move/delete it to generate a new one.') + # print(t.to_yaml()) + return { + "dir": f'{dir}/swagger.yaml', + "swagger_file": t.to_yaml() + } def get_operations(self, col: Collection): """ Acquires all of the endpoint in the collections and make it @@ -234,10 +245,15 @@ def get_model_definitions(self, col: Collection): self.definitions.append(model) return self.definitions - def generate_swagger(self): - """ One-function-to-call needed function to generate a swagger documentation """ + def generate_swagger(self, dir=''): + """ One-function-to-call needed function to generate a swagger documentation + + Args: + dir (str, optional): + directory to create the yaml file + """ for i in self.collections: col = i() self.get_operations(col) self.get_model_definitions(col) - return self.write_to_yaml() \ No newline at end of file + return self.write_to_yaml(dir) \ No newline at end of file From 18bcf5b1ab0d92a5d6177eee655fbc794a6373fd Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 11 Apr 2022 15:55:52 +0800 Subject: [PATCH 030/214] Refactored default dir of swagger file. Refactored unit tests for AWS utils --- pfunk/tests/test_aws.py | 1 - pfunk/utils/swagger.py | 6 +++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index 6ec5841..571f3c1 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -19,7 +19,6 @@ def setUpClass(cls) -> None: swagger = cls.project.generate_swagger() cls.swagger_dir = swagger['dir'] cls.swagger_file = swagger['swagger_file'] - print(cls.swagger_dir) def test_validate_yaml(self): result = self.aws_client.validate_yaml(self.swagger_dir) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 086ade7..edc9bfd 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -116,14 +116,14 @@ def write_to_yaml(self, dir=''): schemes=schemes, definitions=self.definitions) - if not os.path.exists(f'{dir}/swagger.yaml'): - with open(f'{dir}/swagger.yaml', 'x') as swag_doc: + if not os.path.exists(f'{dir}swagger.yaml'): + with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) else: print('There is an existing swagger file. Kindly move/delete it to generate a new one.') # print(t.to_yaml()) return { - "dir": f'{dir}/swagger.yaml', + "dir": f'{dir}swagger.yaml', "swagger_file": t.to_yaml() } From 8bb1cd3cb37fb56deed2ab07fd11a12f97f86b9b Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 12 Apr 2022 16:15:50 +0800 Subject: [PATCH 031/214] Finished create/update api from yaml. Added writing to config file if API is created. Added reading from config file if API is to be updated --- pfunk/tests/test_aws.py | 23 +++++--- pfunk/utils/aws.py | 128 ++++++++++++++++++++++++++++++++-------- 2 files changed, 119 insertions(+), 32 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index 571f3c1..2789dc2 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -11,7 +11,8 @@ class ApiGatewayTests(unittest.TestCase): @classmethod - def setUpClass(cls) -> None: + @mock.patch('boto3.client') + def setUpClass(cls, mocked) -> None: cls.project = Project() cls.aws_client = ApiGateway() cls.project.add_resources([Person, Sport, Group, User]) @@ -43,11 +44,17 @@ def test_create_api_from_wrong_yaml(self, mocked): result = self.aws_client.create_api_from_yaml(tmp.name) self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') - # @mock.patch('boto3.client') - # def test_update_api_from_yaml(self): - # result = self.aws_client.update_api_from_yaml(yaml_file=self.api_yaml) - # self.assertTrue(result['success']) + @mock.patch('boto3.client') + def test_update_api_from_yaml(self, mocked): + result = self.aws_client.update_api_from_yaml( + yaml_file=self.swagger_dir, mode='merge') + print(result) + self.assertTrue(result['success']) - # def test_update_api_from_wrong_yaml(self): - # result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') - # self.assertEqual(result, 'Bad Request. YAML is not valid.') + @mock.patch('boto3.client') + def test_update_api_from_wrong_yaml(self, mocked): + with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: + tmp.seek(0) + tmp.write('test wrong yaml') + result = self.aws_client.update_api_from_yaml(tmp.name, mode='merge') + self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index b1c26c0..7427db3 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,4 +1,6 @@ +import datetime import boto3 +import json import swaggyp as sw # from botocore.exceptions import BadReq from envs import env @@ -6,12 +8,54 @@ from openapi_spec_validator.readers import read_from_filename from openapi_spec_validator.exceptions import OpenAPIValidationError +AWS_ACCESS_KEY = env('AWS_ACCESS_KEY') +AWS_SECRET_ACCESS_KEY = env('AWS_SECRET_ACCESS_KEY') +AWS_DEFAULT_REGION = env('AWS_DEFAULT_REGION') + + +def _json_dt_helper(o): + """ Helps serializing `datetime` objects to a readable string """ + if isinstance(o, (datetime.date, datetime.datetime)): + return o.isoformat() + + +def write_to_config(obj, config_file_dir='pfunk.json'): + """ Writes to pfunk config file + + Args: + obj (dict, required): + key, value pairs to write to json file + config_file_dir (str, optional): + directory of the config json file, default='pfunk.json' + Returns: + config_file (dict, required): + the current value of config file (pfunk.json) + """ + with open(config_file_dir, 'r+') as f: + data = json.load(f) + data.update(obj) + f.seek(0) + f.truncate() + json.dump(data, f, indent=4, sort_keys=True, default=_json_dt_helper) + return data + + +def read_from_config_file(config_file_dir='pfunk.json'): + """ Returns data from config file in dict form """ + with open(config_file_dir, 'r') as f: + data = json.load(f) + return data + class ApiGateway(object): region_name = env('SES_REGION_NAME', 'us-east-1') def __init__(self): - self.client = boto3.client('apigateway', region_name=self.region_name) + self.client = boto3.client( + 'apigateway', + aws_access_key_id=AWS_ACCESS_KEY, + aws_secret_access_key=AWS_SECRET_ACCESS_KEY, + region_name=AWS_DEFAULT_REGION) def validate_yaml(self, yaml_file): """ Validate YAML file if it is valid for using OpenAPI Spec v2""" @@ -63,33 +107,69 @@ def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): try: if not type(yaml_file) == 'string': with open(yaml_file, 'r') as file: - response = self.client.import_rest_api( - failOnWarnings=fail_on_warnings, - body=file - ) - else: - response = self.client.import_rest_api( - failOnWarnings=fail_on_warnings, - body=yaml_file - ) + yaml_file = file.read() + response = self.client.import_rest_api( + failOnWarnings=fail_on_warnings, + body=yaml_file) + + # TODO: Fix -- if using mocked obj, don't write anything + if response: + write_to_config({'api': response}) + return { + 'success': True, + 'response': response + } + # TODO: Specify boto exceptions + except Exception as err: + return { + 'error': str(err) + } + + def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnings=True): + """ Updates rest API using yaml file + + Args: + rest_api_id (string, required): + ID of the API for updating, if not provided, use API ID from `pfunk.json` + yaml_file (yaml file, required): + The OpenAPI swagger file to create API from + mode (string, required): + Mode of update, choice=['merge', 'overwrite'] + fail_on_warnings (bool, optional): + Specifies if the method will error on warnings. Default: `True` + """ + _yaml_valid = self.validate_yaml(yaml_file) + if _yaml_valid: + return { + "error": 'Bad Request. YAML is not valid.', + "yaml_err": _yaml_valid + } + + try: + if not type(yaml_file) == 'string': + with open(yaml_file, 'r') as file: + yaml_file = file.read() + # Acquire REST API ID from config file if not provided + if not rest_api_id: + data = read_from_config_file() + if data.get('api'): + rest_api_id = (data.get('api') + .get('id')) + + response = self.client.put_rest_api( + restApiId=rest_api_id, + mode=mode, + failOnWarnings=fail_on_warnings, + body=yaml_file + ) if response: return { 'success': True, - response: response + 'response': response } # TODO: Specify boto exceptions except Exception as err: - return err - - def update_api_from_yaml(self, yaml_file): - # response = client.put_rest_api( - # restApiId='string', - # mode='merge'|'overwrite', - # failOnWarnings=True|False, - # parameters={ - # 'string': 'string' - # }, - # body=b'bytes'|file - # ) - pass + return { + 'error': str(err) + } From 7107bbfc1bc0ac8322b36a6ea6ea9c7345b299d4 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 13 Apr 2022 13:44:30 +0800 Subject: [PATCH 032/214] Reworked what exceptions does aws util react to. Added doc for aws util tests --- pfunk/tests/test_aws.py | 10 +++++++++- pfunk/utils/aws.py | 15 ++++++--------- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index 2789dc2..d28c852 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -9,6 +9,15 @@ class ApiGatewayTests(unittest.TestCase): + """ Unit tests for creation of API from Swagger file + + Note that the unittests uses mocked boto3 normally. If + you want to test against a real endpoint, remove the + patch decorator at `setUpClass` and the `mocked` + param. Also make sure you have the required + env vars for AWS credentials and you have + the json config in the current env. + """ @classmethod @mock.patch('boto3.client') @@ -48,7 +57,6 @@ def test_create_api_from_wrong_yaml(self, mocked): def test_update_api_from_yaml(self, mocked): result = self.aws_client.update_api_from_yaml( yaml_file=self.swagger_dir, mode='merge') - print(result) self.assertTrue(result['success']) @mock.patch('boto3.client') diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 7427db3..13164f8 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -2,7 +2,7 @@ import boto3 import json import swaggyp as sw -# from botocore.exceptions import BadReq +from botocore.exceptions import ClientError, NoCredentialsError from envs import env from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator from openapi_spec_validator.readers import read_from_filename @@ -20,7 +20,7 @@ def _json_dt_helper(o): def write_to_config(obj, config_file_dir='pfunk.json'): - """ Writes to pfunk config file + """ Appends object to pfunk config file Args: obj (dict, required): @@ -111,16 +111,14 @@ def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): response = self.client.import_rest_api( failOnWarnings=fail_on_warnings, body=yaml_file) - - # TODO: Fix -- if using mocked obj, don't write anything + if response: write_to_config({'api': response}) return { 'success': True, 'response': response } - # TODO: Specify boto exceptions - except Exception as err: + except (ClientError, NoCredentialsError) as err: return { 'error': str(err) } @@ -154,7 +152,7 @@ def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnin data = read_from_config_file() if data.get('api'): rest_api_id = (data.get('api') - .get('id')) + .get('id')) response = self.client.put_rest_api( restApiId=rest_api_id, @@ -168,8 +166,7 @@ def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnin 'success': True, 'response': response } - # TODO: Specify boto exceptions - except Exception as err: + except (ClientError, NoCredentialsError) as err: return { 'error': str(err) } From a4736c40b0e5fa8198ddbdf680e4146bd61f4ed0 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 27 Apr 2022 14:41:51 +0800 Subject: [PATCH 033/214] Did cleaning up of swaggerdoc docstrings --- pfunk/utils/swagger.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index edc9bfd..2c5f02f 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -42,6 +42,7 @@ def __init__(self, collections, rules=[]): """ Generates swagger doc. Details are going to be acquired from the collections The acquisition of the information needed for docs are as follows: + ``` Response: Description (str): View's `get_query` docstrings Status Code (int): @@ -56,9 +57,16 @@ def __init__(self, collections, rules=[]): Model: Name (str): The class name of the `collection` Properties (str): The fields of the collection and their type - + ``` + + Args: + collections ([`pfunk.collection.Collection`]): + array of collection of the project to generate models from + rules ([`werkzeug.routing.Rule`]): + array of additional URLs that the given collection doesn't have Returns: - Generated YAML file + swagger.yaml (yaml, required): + Generated YAML file """ self.collections = collections self.rules = rules From d9ed6ca5cdf715a2171c0504d2684b60f2102a7c Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 4 May 2022 14:52:41 +0800 Subject: [PATCH 034/214] added generate-swagger as a cli command, creates a swagger file from the provided json config file of a project --- pfunk/cli.py | 46 ++++++++++++++++++++++++++++++++++-------- pfunk/project.py | 18 ++++++++++++++--- pfunk/utils/swagger.py | 19 ++++++++++++----- 3 files changed, 67 insertions(+), 16 deletions(-) diff --git a/pfunk/cli.py b/pfunk/cli.py index 0b234c8..457c065 100644 --- a/pfunk/cli.py +++ b/pfunk/cli.py @@ -25,6 +25,7 @@ def load_config_file(filename): config = json.load(f) return config + @pfunk.command() @click.option('--generate_local_key', prompt=True, help='Specifies whether to generate a local database and key', default=False) @@ -36,8 +37,7 @@ def load_config_file(filename): @click.option('--description', prompt=True, help='Project Description') @click.option('--api_type', type=click.Choice(['web', 'rest', 'none']), prompt=True, help='API Type (web, rest, none)') @click.argument('name') -def init(name: str, api_type: str, fauna_key: str, bucket: str, email: str, stage_name: str, description: str, host: str): - +def init(name: str, api_type: str, fauna_key: str, bucket: str, email: str, stage_name: str, description: str, host: str, generate_local_key: bool): """ Creates a PFunk project Args: @@ -69,7 +69,8 @@ def init(name: str, api_type: str, fauna_key: str, bucket: str, email: str, stag }, f, indent=4, sort_keys=True) open(f'{name}/__init__.py', 'x').close() with open(f'{name}/wsgi.py', 'x') as f: - f.write(wsgi_template.render(PFUNK_PROJECT=f'{name}.project.project')) + f.write(wsgi_template.render( + PFUNK_PROJECT=f'{name}.project.project')) with open(f'{name}/project.py', 'x') as f: f.write(project_template.render()) with open(f'{name}/collections.py', 'x') as f: @@ -81,9 +82,11 @@ def init(name: str, api_type: str, fauna_key: str, bucket: str, email: str, stag q.create_database({'name': db_name}) ) key = client.query( - q.create_key({'database': q.database(db_name), 'role': 'admin'}) + q.create_key( + {'database': q.database(db_name), 'role': 'admin'}) ) - click.secho(f'Fauna Local Secret (copy into your .env or pipenv file): {key}', fg='green') + click.secho( + f'Fauna Local Secret (copy into your .env or pipenv file): {key}', fg='green') else: click.echo('There is already a project file in this directory.') @@ -113,6 +116,7 @@ def add_stage(stage_name: str, fauna_key: str, filename: str): else: click.echo('You have not run the init command yet.') + @pfunk.command() @click.option('--use_reloader', default=True) @click.option('--use_debugger', default=True) @@ -138,7 +142,8 @@ def local(hostname: str, port: int, wsgi: str, config_file: str, use_debugger: b sys.path.insert(0, os.getcwd()) wsgi_path = wsgi or f'{config.get("name")}.wsgi.app' app = import_util(wsgi_path) - run_simple(hostname, port, app, use_debugger=use_debugger, use_reloader=use_reloader) + run_simple(hostname, port, app, use_debugger=use_debugger, + use_reloader=use_reloader) @pfunk.command() @@ -191,6 +196,7 @@ def seed_keys(stage_name: str, config_path: str): f.write(key_template.render(keys=keys)) return keys_path + @pfunk.command() @click.option('--local_user', help='Specifies whether the user is local.', prompt=True, default=False) @click.option('--config_path', help='Configuration file path', default='pfunk.json') @@ -247,9 +253,11 @@ def create_admin_user(stage_name: str, group_slug: str, last_name: str, first_na project = import_util(project_path) perm_list = [] for i in project.collections: - perm_list.append(PermissionGroup(collection=i, permissions=['create', 'write', 'read', 'delete'])) + perm_list.append(PermissionGroup(collection=i, permissions=[ + 'create', 'write', 'read', 'delete'])) user.add_permissions(group, perm_list) + @pfunk.command() @click.option('--config_path', help='Configuration file path') @click.argument('stage_name') @@ -271,6 +279,28 @@ def deploy(stage_name: str, config_path: str): return d.deploy(stage_name) + +@pfunk.command() +@click.option('--config_path', help='Configuration file path', default='pfunk.json') +@click.option('--yaml_path', help='Dir to create yaml swagger file to', default='') +def generate_swagger(config_path: str, yaml_path: str): + """ Generates the swagger file of the project from a config json file + + Args: + config_path (str, optional): + dir of the json config file to use + yaml_path (str, optional): + dir to put the generated swagger file + + Returns: + + """ + config = load_config_file(config_path) + sys.path.insert(0, os.getcwd()) + project_path = f'{config.get("name")}.project.project' + project = import_util(project_path) + project.generate_swagger(yaml_dir=yaml_path, config_file=config_path) + + if __name__ == '__main__': pfunk() - diff --git a/pfunk/project.py b/pfunk/project.py index 321f6ed..431962e 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -294,9 +294,21 @@ def wsgi_app(self, environ, start_response): start_response(status_str, response.wsgi_headers) return [str.encode(response.body)] - def generate_swagger(self): + def generate_swagger(self, yaml_dir='', config_file='pfunk.json'): + """ Generates a swagger file that houses all endpoints + + Args: + yaml_dir (str, optional): + which directory to create the swagger yaml file + config_file (str, optional): + which directory to look for the config file + + Returns: + swagger file + """ swag = SwaggerDoc( collections=self.collections, - rules=[GraphQLView.url()]) - swag_file = swag.generate_swagger() + rules=[GraphQLView.url()], + config_file=config_file) + swag_file = swag.generate_swagger(dir=yaml_dir) return swag_file diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 2c5f02f..6335fbd 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -38,7 +38,7 @@ class SwaggerDoc(object): - def __init__(self, collections, rules=[]): + def __init__(self, collections, rules=[], config_file='pfunk.json'): """ Generates swagger doc. Details are going to be acquired from the collections The acquisition of the information needed for docs are as follows: @@ -64,6 +64,9 @@ def __init__(self, collections, rules=[]): array of collection of the project to generate models from rules ([`werkzeug.routing.Rule`]): array of additional URLs that the given collection doesn't have + config_file (str, optional): + directory of the config_file + Returns: swagger.yaml (yaml, required): Generated YAML file @@ -73,6 +76,7 @@ def __init__(self, collections, rules=[]): self.paths = [] self.definitions = [] self.responses = [] + self.config_file = config_file self._response_classes = [ 'response_class', 'not_found_class', @@ -100,10 +104,10 @@ def write_to_yaml(self, dir=''): swagger_file (str, required): the contents of the swagger yaml file """ - if not os.path.exists(f'pfunk.json'): + if not os.path.exists(self.config_file): raise Exception('Missing JSON Config file.') else: - with open(f'pfunk.json', 'r') as f: + with open(self.config_file, 'r') as f: data = json.loads(f.read()) proj_title = data.get('name') proj_desc = data.get('description', 'A Pfunk project') @@ -112,6 +116,10 @@ def write_to_yaml(self, dir=''): basePath = data.get('basePath', '/') schemes = ['https'] + if dir: + if not dir.endswith('/'): + dir = dir + "/" + info = sw.Info( title=proj_title, description=proj_desc, @@ -128,7 +136,8 @@ def write_to_yaml(self, dir=''): with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) else: - print('There is an existing swagger file. Kindly move/delete it to generate a new one.') + print( + 'There is an existing swagger file. Kindly move/delete it to generate a new one.') # print(t.to_yaml()) return { "dir": f'{dir}swagger.yaml', @@ -264,4 +273,4 @@ def generate_swagger(self, dir=''): col = i() self.get_operations(col) self.get_model_definitions(col) - return self.write_to_yaml(dir) \ No newline at end of file + return self.write_to_yaml(dir) From 4f23b851de3fd6e8a25195c25d8c58c97fcd28a9 Mon Sep 17 00:00:00 2001 From: Brian Jinwright Date: Mon, 9 May 2022 21:41:21 -0400 Subject: [PATCH 035/214] added environment variable references for ReferenceFields and ManytoManyFields in pfunk.contrib.auth.collections --- pfunk/contrib/auth/collections.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index 5a1bcb5..9a7930c 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -24,9 +24,6 @@ AccountStatus = Enum(name='AccountStatus', choices=['ACTIVE', 'INACTIVE']) - - - class Key(object): @classmethod @@ -122,6 +119,7 @@ class BaseUser(Collection): collection_roles = [Public, UserRole] non_public_fields = ['groups'] use_email_verification = True + group_class = env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group') # Views collection_views = [LoginView, SignUpView, VerifyEmailView, LogoutView, UpdatePasswordView, ForgotPasswordView, ForgotPasswordChangeView] # Signals @@ -380,7 +378,8 @@ def permissions(self): class User(BaseUser): """ User that has permission capabilities. Extension of `BaseUser` """ - groups = ManyToManyField(Group, 'users_groups') + groups = ManyToManyField(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group'), 'users_groups') + @classmethod def get_permissions(cls, ref, _token=None): @@ -388,7 +387,7 @@ def get_permissions(cls, ref, _token=None): def get_groups(self, _token=None): """ Returns the groups (collections) that the user is bound with """ - return [Group.get(i.id(), _token=_token) for i in self.client(_token=_token).query( + return [self.group_class.get(i.id(), _token=_token) for i in self.client(_token=_token).query( q.paginate(q.match('users_groups_by_user', self.ref)) ).get('data')] From 9a124332efb80650b818eb34dd524f7d3a0be70c Mon Sep 17 00:00:00 2001 From: Brian Jinwright Date: Wed, 25 May 2022 23:56:47 -0400 Subject: [PATCH 036/214] Fixed problems associated with subclassed User and Group collections and auth functions and roles --- pfunk/__init__.py | 3 +- pfunk/cli.py | 21 +- pfunk/contrib/auth/collections/__init__.py | 103 +++ pfunk/contrib/auth/collections/common.py | 34 + pfunk/contrib/auth/collections/group.py | 16 + pfunk/contrib/auth/collections/group_user.py | 0 .../{collections.py => collections/user.py} | 184 +---- pfunk/contrib/auth/resources.py | 2 +- pfunk/contrib/auth/views.py | 5 +- pfunk/contrib/ecommerce/collections.py | 13 +- pfunk/contrib/ecommerce/resources.py | 2 +- pfunk/contrib/ecommerce/views.py | 14 +- pfunk/contrib/email/base.py | 7 +- pfunk/contrib/email/ses.py | 5 +- pfunk/contrib/generic.py | 35 +- pfunk/contrib/templates.py | 3 +- pfunk/exceptions.py | 4 +- pfunk/fields.py | 20 +- pfunk/project.py | 12 +- pfunk/queryset.py | 2 +- pfunk/resources.py | 37 +- pfunk/template.py | 2 +- pfunk/testcase.py | 14 +- pfunk/tests/__init__.py | 9 +- pfunk/tests/test_auth.py | 7 +- pfunk/tests/test_collection.py | 4 - pfunk/tests/test_crud.py | 13 +- pfunk/tests/test_deployment.py | 12 +- pfunk/tests/test_email.py | 13 +- pfunk/tests/test_jwt.py | 6 +- pfunk/tests/test_project.py | 3 +- pfunk/tests/test_resources.py | 8 +- pfunk/tests/test_web_change_password.py | 9 +- pfunk/tests/test_web_crud.py | 8 +- pfunk/tests/test_web_forgot_password.py | 27 +- pfunk/tests/test_web_login.py | 6 +- pfunk/tests/test_web_signup.py | 7 +- pfunk/tests/test_web_stripe.py | 40 +- pfunk/utils/deploy.py | 5 +- pfunk/utils/json_utils.py | 2 +- pfunk/utils/publishing.py | 6 +- pfunk/web/request.py | 6 +- pfunk/web/response.py | 4 +- pfunk/web/views/base.py | 5 +- pfunk/web/views/graphql.py | 18 +- pfunk/web/views/json.py | 4 +- poetry.lock | 693 ++++++++++-------- 47 files changed, 774 insertions(+), 679 deletions(-) create mode 100644 pfunk/contrib/auth/collections/__init__.py create mode 100644 pfunk/contrib/auth/collections/common.py create mode 100644 pfunk/contrib/auth/collections/group.py create mode 100644 pfunk/contrib/auth/collections/group_user.py rename pfunk/contrib/auth/{collections.py => collections/user.py} (63%) diff --git a/pfunk/__init__.py b/pfunk/__init__.py index 8568372..56ad1d9 100644 --- a/pfunk/__init__.py +++ b/pfunk/__init__.py @@ -5,8 +5,9 @@ .. include:: ../CONTRIBUTE.md """ __docformat__ = "google" + +from .client import FaunaClient from .collection import Collection, Enum from .fields import (StringField, IntegerField, DateField, DateTimeField, BooleanField, FloatField, EmailField, EnumField, ReferenceField, ManyToManyField, SlugField) from .project import Project -from .client import FaunaClient diff --git a/pfunk/cli.py b/pfunk/cli.py index 1261fa0..bdae009 100644 --- a/pfunk/cli.py +++ b/pfunk/cli.py @@ -1,14 +1,12 @@ -import click import json import os import sys -import datetime -from jinja2 import TemplateNotFound +import click from valley.utils import import_util from werkzeug.serving import run_simple -from pfunk.client import FaunaClient, q +from pfunk.client import FaunaClient, q from pfunk.contrib.auth.collections import Group, PermissionGroup from pfunk.exceptions import DocNotFound from pfunk.template import wsgi_template, project_template, collections_templates, key_template @@ -25,6 +23,7 @@ def load_config_file(filename): config = json.load(f) return config + @pfunk.command() @click.option('--generate_local_key', prompt=True, help='Specifies whether to generate a local database and key', default=False) @@ -70,7 +69,8 @@ def init(name: str, api_type: str, fauna_key: str, bucket: str, email: str, stag with open(f'{name}/collections.py', 'x') as f: f.write(collections_templates.render()) if generate_local_key: - client = FaunaClient(secret='secret') + domain = click.prompt('Please enter your local Fauna Docker hostname.', default='fauna') + client = FaunaClient(secret='secret', scheme='http') db_name = f'{name}-local' client.query( q.create_database({'name': db_name}) @@ -108,6 +108,7 @@ def add_stage(stage_name: str, fauna_key: str, filename: str): else: click.echo('You have not run the init command yet.') + @pfunk.command() @click.option('--use_reloader', default=True) @click.option('--use_debugger', default=True) @@ -158,7 +159,6 @@ def publish(stage_name: str, project_path: str, config_path: str, publish_locall project_path = f'{config.get("name")}.project.project' project = import_util(project_path) if not publish_locally: - secret = config['stages'][stage_name]['fauna_secret'] os.environ['FAUNA_SECRET'] = secret project.publish() @@ -186,6 +186,7 @@ def seed_keys(stage_name: str, config_path: str): f.write(key_template.render(keys=keys)) return keys_path + @pfunk.command() @click.option('--local_user', help='Specifies whether the user is local.', prompt=True, default=False) @click.option('--config_path', help='Configuration file path', default='pfunk.json') @@ -197,7 +198,8 @@ def seed_keys(stage_name: str, config_path: str): @click.option('--last_name', prompt=True, help='Last Name') @click.option('--group_slug', prompt=True, help='User Group Slug', default=None) @click.argument('stage_name') -def create_admin_user(stage_name: str, group_slug: str, last_name: str, first_name: str, email: str, password: str, username: str, +def create_admin_user(stage_name: str, group_slug: str, last_name: str, first_name: str, email: str, password: str, + username: str, project_path: str, config_path: str, local_user: bool): """ Create an admin user in the project's Fauna user collection. @@ -218,7 +220,7 @@ def create_admin_user(stage_name: str, group_slug: str, last_name: str, first_na """ config = load_config_file(config_path) secret = config['stages'][stage_name]['fauna_secret'] - User = import_util('pfunk.contrib.auth.collections.User') + User = import_util('pfunk.contrib.auth.collections.user.User') if not local_user: os.environ['FAUNA_SECRET'] = secret @@ -245,6 +247,7 @@ def create_admin_user(stage_name: str, group_slug: str, last_name: str, first_na perm_list.append(PermissionGroup(collection=i, permissions=['create', 'write', 'read', 'delete'])) user.add_permissions(group, perm_list) + @pfunk.command() @click.option('--config_path', help='Configuration file path') @click.argument('stage_name') @@ -266,6 +269,6 @@ def deploy(stage_name: str, config_path: str): return d.deploy(stage_name) + if __name__ == '__main__': pfunk() - diff --git a/pfunk/contrib/auth/collections/__init__.py b/pfunk/contrib/auth/collections/__init__.py new file mode 100644 index 0000000..394c5f4 --- /dev/null +++ b/pfunk/contrib/auth/collections/__init__.py @@ -0,0 +1,103 @@ +import datetime +import json +import random +import uuid + +import jwt +from cryptography.fernet import Fernet +from dateutil import tz +from envs import env +from jwt import ExpiredSignatureError +from valley.utils import import_util +from werkzeug.utils import cached_property + +from pfunk import Collection +from pfunk.exceptions import Unauthorized + + +class Key(object): + + @classmethod + def create_keys(cls): + c = cls() + keys = {} + for i in range(10): + kid = str(uuid.uuid4()) + k = {'signature_key': Fernet.generate_key().decode(), 'payload_key': Fernet.generate_key().decode(), + 'kid': kid} + keys[kid] = k + return keys + + @classmethod + def import_keys(cls): + try: + keys = import_util(env('KEY_MODULE', 'bad.import')) + except ImportError: + keys = {} + return keys + + @classmethod + def get_keys(cls): + keys = cls.import_keys() + return list(keys.values()) + + @classmethod + def get_key(cls): + + return random.choice(cls.get_keys()) + + @classmethod + def create_jwt(cls, secret_claims): + + key = cls.get_key() + pay_f = Fernet(key.get('payload_key')) + gmt = tz.gettz('GMT') + now = datetime.datetime.now(tz=gmt) + exp = now + datetime.timedelta(days=1) + payload = { + 'iat': now.timestamp(), + 'exp': exp.timestamp(), + 'nbf': now.timestamp(), + 'iss': env('PROJECT_NAME', 'pfunk'), + 'til': pay_f.encrypt(json.dumps(secret_claims).encode()).decode() + } + return jwt.encode(payload, key.get('signature_key'), algorithm="HS256", headers={'kid': key.get('kid')}), exp + + @classmethod + def decrypt_jwt(cls, encoded): + headers = jwt.get_unverified_header(encoded) + keys = cls.import_keys() + key = keys.get(headers.get('kid')) + try: + decoded = jwt.decode(encoded, key.get('signature_key'), algorithms="HS256", verify=True, + options={"require": ["iat", "exp", "nbf", 'iss', 'til']}) + except ExpiredSignatureError: + raise Unauthorized('Unauthorized') + pay_f = Fernet(key.get('payload_key').encode()) + k = pay_f.decrypt(decoded.get('til').encode()) + return json.loads(k.decode()) + + +class PermissionGroup(object): + """ List of permission that a user/object has + + Attributes: + collection (`pfunk.collection.Collection`, required): + Collection to allow permissions + permission (list, required): + What operations should be allowed `['create', 'read', 'delete', 'write']` + """ + valid_actions: list = ['create', 'read', 'delete', 'write'] + + def __init__(self, collection: Collection, permissions: list): + if not issubclass(collection, Collection): + raise ValueError( + 'Permission class requires a Collection class as the first argument.') + self.collection = collection + self._permissions = permissions + self.collection_name = self.collection.get_class_name() + + @cached_property + def permissions(self): + """ Lists all collections and its given permissions """ + return [f'{self.collection_name}-{i}'.lower() for i in self._permissions if i in self.valid_actions] diff --git a/pfunk/contrib/auth/collections/common.py b/pfunk/contrib/auth/collections/common.py new file mode 100644 index 0000000..2aa07df --- /dev/null +++ b/pfunk/contrib/auth/collections/common.py @@ -0,0 +1,34 @@ +from envs import env + +from pfunk import ReferenceField, Collection +from pfunk.fields import ListField + + +class UserGroups(Collection): + """ Many-to-many collection of the user-group relationship + + The native fauna-way of holding many-to-many relationship + is to only have the ID of the 2 object. Here in pfunk, we + leverage the flexibility of the collection to have another + field, which is `permissions`, this field holds the capablities + of a user, allowing us to add easier permission handling. + Instead of manually going to roles and adding individual + collections which can be painful in long term. + + Attributes: + collection_name (str): + Name of the collection in Fauna + userID (str): + Fauna ref of user that is tied to the group + groupID (str): + Fauna ref of a collection that is tied with the user + permissions (str[]): + List of permissions, `['create', 'read', 'delete', 'write']` + """ + collection_name = 'users_groups' + userID = ReferenceField(env('USER_COLLECTION', 'pfunk.contrib.auth.collections.user.User')) + groupID = ReferenceField(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.group.Group')) + permissions = ListField() + + def __unicode__(self): + return f"{self.userID}, {self.groupID}, {self.permissions}" diff --git a/pfunk/contrib/auth/collections/group.py b/pfunk/contrib/auth/collections/group.py new file mode 100644 index 0000000..4ae5a96 --- /dev/null +++ b/pfunk/contrib/auth/collections/group.py @@ -0,0 +1,16 @@ +from envs import env + +from pfunk.collection import Collection +from pfunk.fields import SlugField, ManyToManyField, StringField + + +class Group(Collection): + """ Group collection that the user belongs to """ + name = StringField(required=True) + slug = SlugField(unique=True, required=False) + users = ManyToManyField( + env('USER_COLLECTION', 'pfunk.contrib.auth.collections.user.User'), + relation_name='users_groups') + + def __unicode__(self): + return self.name # pragma: no cover diff --git a/pfunk/contrib/auth/collections/group_user.py b/pfunk/contrib/auth/collections/group_user.py new file mode 100644 index 0000000..e69de29 diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections/user.py similarity index 63% rename from pfunk/contrib/auth/collections.py rename to pfunk/contrib/auth/collections/user.py index 9a7930c..6fca5d3 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections/user.py @@ -1,103 +1,23 @@ -import datetime -import json -import random import uuid -import jwt -from cryptography.fernet import Fernet -from dateutil import tz from envs import env -from faunadb.errors import BadRequest, NotFound -from jwt import ExpiredSignatureError +from faunadb.errors import BadRequest from valley.exceptions import ValidationException from valley.utils import import_util -from werkzeug.utils import cached_property from pfunk.client import q from pfunk.collection import Collection, Enum +from pfunk.contrib.auth.collections import Key from pfunk.contrib.auth.resources import LoginUser, UpdatePassword, Public, UserRole, LogoutUser -from pfunk.contrib.auth.views import ForgotPasswordChangeView, LoginView, SignUpView, VerifyEmailView, LogoutView, UpdatePasswordView, ForgotPasswordView +from pfunk.contrib.auth.views import ForgotPasswordChangeView, LoginView, SignUpView, VerifyEmailView, LogoutView, \ + UpdatePasswordView, ForgotPasswordView from pfunk.contrib.email.base import send_email -from pfunk.exceptions import LoginFailed, DocNotFound, Unauthorized -from pfunk.fields import EmailField, SlugField, ManyToManyField, ListField, ReferenceField, StringField, EnumField +from pfunk.exceptions import LoginFailed, DocNotFound +from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField AccountStatus = Enum(name='AccountStatus', choices=['ACTIVE', 'INACTIVE']) -class Key(object): - - @classmethod - def create_keys(cls): - c = cls() - keys = {} - for i in range(10): - kid = str(uuid.uuid4()) - k = {'signature_key': Fernet.generate_key().decode(), 'payload_key': Fernet.generate_key().decode(), - 'kid': kid} - keys[kid] = k - return keys - - @classmethod - def import_keys(cls): - try: - keys = import_util(env('KEY_MODULE', 'bad.import')) - except ImportError: - keys = {} - return keys - - @classmethod - def get_keys(cls): - keys = cls.import_keys() - return list(keys.values()) - - @classmethod - def get_key(cls): - - return random.choice(cls.get_keys()) - - @classmethod - def create_jwt(cls, secret_claims): - - key = cls.get_key() - pay_f = Fernet(key.get('payload_key')) - gmt = tz.gettz('GMT') - now = datetime.datetime.now(tz=gmt) - exp = now + datetime.timedelta(days=1) - payload = { - 'iat': now.timestamp(), - 'exp': exp.timestamp(), - 'nbf': now.timestamp(), - 'iss': env('PROJECT_NAME', 'pfunk'), - 'til': pay_f.encrypt(json.dumps(secret_claims).encode()).decode() - } - return jwt.encode(payload, key.get('signature_key'), algorithm="HS256", headers={'kid': key.get('kid')}), exp - - @classmethod - def decrypt_jwt(cls, encoded): - headers = jwt.get_unverified_header(encoded) - keys = cls.import_keys() - key = keys.get(headers.get('kid')) - try: - decoded = jwt.decode(encoded, key.get('signature_key'), algorithms="HS256", verify=True, - options={"require": ["iat", "exp", "nbf", 'iss', 'til']}) - except ExpiredSignatureError: - raise Unauthorized('Unauthorized') - pay_f = Fernet(key.get('payload_key').encode()) - k = pay_f.decrypt(decoded.get('til').encode()) - return json.loads(k.decode()) - - -class Group(Collection): - """ Group collection that the user belongs to """ - name = StringField(required=True) - slug = SlugField(unique=True, required=False) - users = ManyToManyField( - 'pfunk.contrib.auth.collections.User', relation_name='users_groups') - - def __unicode__(self): - return self.name # pragma: no cover - - def attach_verification_key(doc): if not doc.ref and doc.use_email_verification: doc.attach_verification_key() @@ -119,9 +39,10 @@ class BaseUser(Collection): collection_roles = [Public, UserRole] non_public_fields = ['groups'] use_email_verification = True - group_class = env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group') + group_class = import_util(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.group.Group')) # Views - collection_views = [LoginView, SignUpView, VerifyEmailView, LogoutView, UpdatePasswordView, ForgotPasswordView, ForgotPasswordChangeView] + collection_views = [LoginView, SignUpView, VerifyEmailView, LogoutView, UpdatePasswordView, ForgotPasswordView, + ForgotPasswordChangeView] # Signals pre_create_signals = [attach_verification_key] post_create_signals = [send_verification_email] @@ -148,7 +69,7 @@ def login(cls, username, password, _token=None): try: return c.client(_token=_token).query( q.call("login_user", { - "username": username, "password": password}) + "username": username, "password": password}) ) except BadRequest: raise LoginFailed( @@ -162,6 +83,7 @@ def logout(cls, _token=None): q.call("logout_user") ) + def permissions(self, _token=None): return [] @@ -245,7 +167,7 @@ def send_verification_email(self, from_email=None, verification_type='signup'): @classmethod def forgot_password(cls, email): - """ Sends forgot password email to let user + """ Sends forgot password email to let user use that link to reset their password """ user = cls.get_by('unique_User_email', email) @@ -321,65 +243,11 @@ def __unicode__(self): return self.username # pragma: no cover -class UserGroups(Collection): - """ Many-to-many collection of the user-group relationship - - The native fauna-way of holding many-to-many relationship - is to only have the ID of the 2 object. Here in pfunk, we - leverage the flexibility of the collection to have another - field, which is `permissions`, this field holds the capablities - of a user, allowing us to add easier permission handling. - Instead of manually going to roles and adding individual - collections which can be painful in long term. - - Attributes: - collection_name (str): - Name of the collection in Fauna - userID (str): - Fauna ref of user that is tied to the group - groupID (str): - Fauna ref of a collection that is tied with the user - permissions (str[]): - List of permissions, `['create', 'read', 'delete', 'write']` - """ - collection_name = 'users_groups' - userID = ReferenceField(env('USER_COLLECTION', 'pfunk.contrib.auth.collections.User')) - groupID = ReferenceField(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group')) - permissions = ListField() - - def __unicode__(self): - return f"{self.userID}, {self.groupID}, {self.permissions}" - - -class PermissionGroup(object): - """ List of permission that a user/object has - - Attributes: - collection (`pfunk.collection.Collection`, required): - Collection to allow permissions - permission (list, required): - What operations should be allowed `['create', 'read', 'delete', 'write']` - """ - valid_actions: list = ['create', 'read', 'delete', 'write'] - - def __init__(self, collection: Collection, permissions: list): - if not issubclass(collection, Collection): - raise ValueError( - 'Permission class requires a Collection class as the first argument.') - self.collection = collection - self._permissions = permissions - self.collection_name = self.collection.get_class_name() - - @cached_property - def permissions(self): - """ Lists all collections and its given permissions """ - return [f'{self.collection_name}-{i}'.lower() for i in self._permissions if i in self.valid_actions] - - class User(BaseUser): + user_group_class = import_util('pfunk.contrib.auth.collections.common.UserGroups') + group_class = import_util('pfunk.contrib.auth.collections.group.Group') """ User that has permission capabilities. Extension of `BaseUser` """ - groups = ManyToManyField(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group'), 'users_groups') - + groups = ManyToManyField(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.group.Group'), 'users_groups') @classmethod def get_permissions(cls, ref, _token=None): @@ -407,8 +275,8 @@ def permissions(self, _token=None): """ perm_list = [] for i in self.get_groups(_token=_token): - ug = UserGroups.get_index('users_groups_by_group_and_user', [ - i.ref, self.ref], _token=_token) + ug = self.user_group_class.get_index('users_groups_by_group_and_user', [ + i.ref, self.ref], _token=_token) for user_group in ug: p = [] if isinstance(user_group.permissions, list): @@ -418,24 +286,24 @@ def permissions(self, _token=None): return perm_list def add_permissions(self, group, permissions: list, _token=None): - """ Adds permission for the user - - Adds permission by extending the list of permission - in the many-to-many collection of the user, i.e. in + """ Adds permission for the user + + Adds permission by extending the list of permission + in the many-to-many collection of the user, i.e. in the `UserGroup` collection. Args: - group (str, required): + group (str, required): Group collection of the User permissions (list, required): Permissions to give, `['create', 'read', 'delete', 'write']` Just add the operation you need _token (str, required): auth token of the user - + Returns: UserGroup (`contrib.auth.collections.UserGroup`): - `UserGroup` instance which has the added permissions + `UserGroup` instance which has the added permissions of the user """ perm_list = [] @@ -443,9 +311,9 @@ def add_permissions(self, group, permissions: list, _token=None): perm_list.extend(i.permissions) try: - user_group = UserGroups.get_by('users_groups_by_group_and_user', terms=[group.ref, self.ref]) + user_group = self.user_group_class.get_by('users_groups_by_group_and_user', terms=[group.ref, self.ref]) except DocNotFound: - user_group = UserGroups.create(userID=self.ref, groupID=group.ref, permissions=perm_list) + user_group = self.user_group_class.create(userID=self.ref, groupID=group.ref, permissions=perm_list) if user_group.permissions != perm_list: user_group.permissions = perm_list user_group.save() diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 40a560f..ab0c65a 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -1,5 +1,5 @@ from pfunk.client import q -from pfunk.resources import Function, Role, Index +from pfunk.resources import Function, Role class AuthFunction(Function): diff --git a/pfunk/contrib/auth/views.py b/pfunk/contrib/auth/views.py index fbfea54..ee3de70 100644 --- a/pfunk/contrib/auth/views.py +++ b/pfunk/contrib/auth/views.py @@ -1,7 +1,6 @@ from abc import ABC from envs import env -from werkzeug.http import http_date from werkzeug.routing import Rule from pfunk.web.views.base import ActionMixin @@ -118,8 +117,8 @@ class ForgotPasswordChangeView(ActionMixin, JSONAuthView): def get_query(self): kwargs = self.get_query_kwargs() return self.collection.verify_email( - str(kwargs['verification_key']), - verify_type='forgot', + str(kwargs['verification_key']), + verify_type='forgot', password=kwargs['password']) diff --git a/pfunk/contrib/ecommerce/collections.py b/pfunk/contrib/ecommerce/collections.py index 569d58e..a0ea7fb 100644 --- a/pfunk/contrib/ecommerce/collections.py +++ b/pfunk/contrib/ecommerce/collections.py @@ -2,15 +2,14 @@ from envs import env from pfunk.collection import Collection -from pfunk.contrib.auth.collections import User, Group -from pfunk.exceptions import DocNotFound -from pfunk.fields import EmailField, SlugField, ManyToManyField, ListField, ReferenceField, StringField, EnumField, FloatField -from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole, Public, UserRole -from pfunk.contrib.ecommerce.resources import StripePublic +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User +from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole from pfunk.contrib.ecommerce.views import ListStripePackage, DetailStripePackage +from pfunk.exceptions import DocNotFound +from pfunk.fields import ReferenceField, StringField, FloatField from pfunk.web.views.json import CreateView, UpdateView, DeleteView - stripe.api_key = env('STRIPE_API_KEY') @@ -38,7 +37,7 @@ def __unicode__(self): @property def stripe_price(self): - return int(self.price*100) + return int(self.price * 100) class StripeCustomer(Collection): diff --git a/pfunk/contrib/ecommerce/resources.py b/pfunk/contrib/ecommerce/resources.py index da8460a..ebd0729 100644 --- a/pfunk/contrib/ecommerce/resources.py +++ b/pfunk/contrib/ecommerce/resources.py @@ -1,5 +1,5 @@ from pfunk.client import q -from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole, Public, UserRole +from pfunk.contrib.auth.resources import Public class StripePublic(Public): diff --git a/pfunk/contrib/ecommerce/views.py b/pfunk/contrib/ecommerce/views.py index e48813e..71b80d8 100644 --- a/pfunk/contrib/ecommerce/views.py +++ b/pfunk/contrib/ecommerce/views.py @@ -1,19 +1,17 @@ import collections import json +from json import JSONDecodeError + +import bleach import requests import stripe -import bleach from envs import env -from datetime import datetime -from json import JSONDecodeError from jinja2 import Environment, BaseLoader -from pfunk.contrib.email import ses -from pfunk.exceptions import DocNotFound -from pfunk.web.views.json import JSONView, ListView, DetailView, CreateView from pfunk.contrib.email.ses import SESBackend -from pfunk.contrib.auth.collections import Group, User +from pfunk.exceptions import DocNotFound from pfunk.web.views.base import ActionMixin +from pfunk.web.views.json import JSONView, ListView, DetailView stripe.api_key = env('STRIPE_API_KEY') STRIPE_PUBLISHABLE_KEY = env('STRIPE_PUBLISHABLE_KEY') @@ -44,7 +42,7 @@ class CheckoutView(DetailView): def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) customer = self.collection.objects.get_or_create_customer( - self.request.user) # `StripeCustomer` collection + self.request.user) # `StripeCustomer` collection session = stripe.checkout.Session.create( payment_method_types=['card'], customer=customer.customer_id, diff --git a/pfunk/contrib/email/base.py b/pfunk/contrib/email/base.py index 452da44..a5c87a8 100644 --- a/pfunk/contrib/email/base.py +++ b/pfunk/contrib/email/base.py @@ -10,7 +10,8 @@ class EmailBackend(object): """ Base email backend class """ - def get_template(self, template:str): + + def get_template(self, template: str): """ Get the template based on the template location string Args: @@ -81,5 +82,5 @@ def send_email(subject: str, to_emails: list, html_template: str = None, txt_tem with warnings.catch_warnings(): warnings.simplefilter('ignore', category=ResourceWarning) email_backend().send_email(subject=subject, to_emails=to_emails, html_template=html_template, - txt_template=txt_template, from_email=from_email, cc_emails=cc_emails, - bcc_emails=bcc_emails, fail_silently=fail_silently, **kwargs) \ No newline at end of file + txt_template=txt_template, from_email=from_email, cc_emails=cc_emails, + bcc_emails=bcc_emails, fail_silently=fail_silently, **kwargs) diff --git a/pfunk/contrib/email/ses.py b/pfunk/contrib/email/ses.py index 20077e5..fd181d2 100644 --- a/pfunk/contrib/email/ses.py +++ b/pfunk/contrib/email/ses.py @@ -1,5 +1,6 @@ import boto3 from envs import env + from pfunk.contrib.email.base import EmailBackend @@ -47,5 +48,5 @@ def send_email(self, subject: str, to_emails: list, html_template: str = None, t 'Body': self.get_body_kwargs(html_template=html_template, txt_template=txt_template, **kwargs) } ) - - return res \ No newline at end of file + + return res diff --git a/pfunk/contrib/generic.py b/pfunk/contrib/generic.py index a0b00b9..204b541 100644 --- a/pfunk/contrib/generic.py +++ b/pfunk/contrib/generic.py @@ -1,13 +1,12 @@ -from pfunk.resources import Function, Index from pfunk.client import q - +from pfunk.resources import Function class GenericFunction(Function): action = 'create' def get_role(self): - return None # pragma: no cover + return None # pragma: no cover def get_name(self): return f"{self.action}_{self.collection.get_class_name()}" @@ -24,19 +23,19 @@ def get_name(self): def get_body(self): return q.query( q.lambda_(["input"], - q.map_( - q.lambda_(['ref'], - q.get(q.var('ref')) - ), - q.paginate( - q.match(q.index(self.collection.all_index_name())), - q.select('size', q.var('input')) - ) - ) - ) + q.map_( + q.lambda_(['ref'], + q.get(q.var('ref')) + ), + q.paginate( + q.match(q.index(self.collection.all_index_name())), + q.select('size', q.var('input')) + ) + ) + ) ) - - + + class GenericCreate(GenericFunction): def get_body(self): @@ -68,13 +67,13 @@ def get_body(self): )) - class GenericDelete(GenericFunction): action = 'delete' def get_body(self): return q.query( q.lambda_(["input"], - q.delete(q.ref(q.collection(self.collection.get_collection_name()), q.select('id', q.var("input")))) + q.delete( + q.ref(q.collection(self.collection.get_collection_name()), q.select('id', q.var("input")))) ) - ) \ No newline at end of file + ) diff --git a/pfunk/contrib/templates.py b/pfunk/contrib/templates.py index e86940a..cbb4c9a 100644 --- a/pfunk/contrib/templates.py +++ b/pfunk/contrib/templates.py @@ -2,5 +2,4 @@ from jinja2 import Environment from jinja2.loaders import FileSystemLoader - -temp_env = Environment(loader=FileSystemLoader(env('TEMPLATE_ROOT_DIR'))) \ No newline at end of file +temp_env = Environment(loader=FileSystemLoader(env('TEMPLATE_ROOT_DIR'))) diff --git a/pfunk/exceptions.py b/pfunk/exceptions.py index fc128e1..9625b75 100644 --- a/pfunk/exceptions.py +++ b/pfunk/exceptions.py @@ -1,5 +1,3 @@ - - class LoginFailed(Exception): """Exception raised when an attempt to login fails.""" pass @@ -22,4 +20,4 @@ class Unauthorized(Exception): class GraphQLError(Exception): """Graphql SyntaxError""" - pass \ No newline at end of file + pass diff --git a/pfunk/fields.py b/pfunk/fields.py index d0e06e3..e376333 100644 --- a/pfunk/fields.py +++ b/pfunk/fields.py @@ -5,11 +5,10 @@ from valley.properties import CharProperty, IntegerProperty, DateTimeProperty, DateProperty, FloatProperty, \ BooleanProperty, EmailProperty, SlugProperty, BaseProperty, ForeignProperty, ForeignListProperty, ListProperty from valley.utils import import_util +from valley.validators import ChoiceValidator, ForeignValidator -from valley.validators import Validator, ChoiceValidator, ForeignValidator - -from pfunk.collection import Enum from pfunk.client import Ref +from pfunk.collection import Enum class ChoiceListValidator(ChoiceValidator): @@ -113,6 +112,7 @@ def validate(self, value, key): raise ValidationException('{0}: This value ({1}) should be an instance of {2}.'.format( key, value, self.foreign_class.__name__)) + class ReferenceField(GraphQLMixin, ForeignProperty): def get_validators(self): @@ -153,7 +153,7 @@ def validate(self, value, key): self.foreign_class = import_util(self.foreign_class) if value: for obj in value: - if not isinstance(obj,self.foreign_class): + if not isinstance(obj, self.foreign_class): raise ValidationException( '{0}: This value ({1}) should be an instance of {2}.'.format( key, obj, self.foreign_class.__name__)) @@ -162,7 +162,8 @@ def validate(self, value, key): class ManyToManyField(GraphQLMixin, ForeignListProperty): relation_field = True - def __init__(self, foreign_class, relation_name, return_type=None,return_prop=None,**kwargs): + def __init__(self, foreign_class, relation_name, return_type=None, return_prop=None, **kwargs): + self.foreign_class = foreign_class self.relation_name = relation_name super(ManyToManyField, self).__init__(foreign_class, return_type=return_type, return_prop=return_prop, **kwargs) @@ -187,8 +188,13 @@ def get_python_value(self, value): c.ref = i c._lazied = True ra(c) - if isinstance(i, self.foreign_class): - ra(i) + + try: + if isinstance(i, self.foreign_class): + ra(i) + except TypeError: + if f'{i.__class__.__module__}.{i.__class__.__name__}' == self.foreign_class: + ra(i) return ref_list def get_db_value(self, value): diff --git a/pfunk/project.py b/pfunk/project.py index e3c3a85..7e688d4 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -1,14 +1,11 @@ import logging - -import requests from io import BytesIO +import requests from envs import env - from faunadb.client import FaunaClient from jinja2 import Template from valley.contrib import Schema - from valley.properties import CharProperty, ForeignProperty from valley.utils import import_util from werkzeug import Request as WerkzeugRequest @@ -180,10 +177,13 @@ def publish(self, mode: str = 'merge') -> int: test_mode = env('PFUNK_TEST_MODE', False, var_type='boolean') if not test_mode: print('GraphQL Schema Imported Successfully!!') # pragma: no cover + else: + print('Error Publishing GraphQL!!') + print('----------------------------------------') + print(resp.content) + return for col in set(self.collections): col.publish() - if resp.status_code != 200: - print(resp.content) return resp.status_code def unpublish(self) -> None: diff --git a/pfunk/queryset.py b/pfunk/queryset.py index e9195cc..5c49e96 100644 --- a/pfunk/queryset.py +++ b/pfunk/queryset.py @@ -25,4 +25,4 @@ def __len__(self): return len(self.data) def __getitem__(self, x): - return self.data[x] \ No newline at end of file + return self.data[x] diff --git a/pfunk/resources.py b/pfunk/resources.py index a4e9058..c31f98e 100644 --- a/pfunk/resources.py +++ b/pfunk/resources.py @@ -2,8 +2,8 @@ from faunadb.query import query -from pfunk.utils.publishing import create_or_update_function, create_or_update_role, create_or_pass_index from pfunk.client import q +from pfunk.utils.publishing import create_or_update_function, create_or_update_role, create_or_pass_index class Resource(object): @@ -52,20 +52,20 @@ def get_payload(self) -> dict: return payload_dict def publish(self): - raise NotImplementedError # pragma: no cover + raise NotImplementedError # pragma: no cover def unpublish(self): - raise NotImplementedError # pragma: no cover + raise NotImplementedError # pragma: no cover def get_body(self): - raise NotImplementedError # pragma: no cover + raise NotImplementedError # pragma: no cover class Function(Resource): def get_role(self): """Gets the role to use when calling the function.""" - return None # pragma: no cover + return None # pragma: no cover def publish(self): """ @@ -88,7 +88,7 @@ class Role(Resource): user_table: str = None def get_lambda(self, resource_type): - return # pragma: no cover + return # pragma: no cover def get_payload(self) -> dict: """ @@ -98,12 +98,14 @@ def get_payload(self) -> dict: """ payload_dict = { "name": self.get_name(), - "membership": self.get_membership(), "privileges": self.get_privileges(), } data = self.get_data() + membership = self.get_membership() if data: payload_dict['data'] = data + if membership: + payload_dict['membership'] = membership return payload_dict def get_data(self) -> dict: @@ -112,10 +114,10 @@ def get_data(self) -> dict: Returns: dict """ - return None # pragma: no cover + return None # pragma: no cover def get_privileges(self): - raise NotImplementedError # pragma: no cover + raise NotImplementedError # pragma: no cover def get_membership_lambda(self): """ @@ -125,10 +127,10 @@ def get_membership_lambda(self): """ return q.query( q.lambda_(['object_ref'], - q.equals( - q.select('account_status', q.select('data', q.get(q.var('object_ref')))), - "ACTIVE" - ) + q.equals( + q.select('account_status', q.select('data', q.get(q.var('object_ref')))), + "ACTIVE" + ) )) def get_membership(self) -> dict: @@ -137,10 +139,13 @@ def get_membership(self) -> dict: Returns: dict """ - return { + membership = self.get_membership_lambda() + payload_dict = { 'resource': q.collection(self.user_table or self.collection.get_collection_name()), - 'predicate': self.get_membership_lambda() } + if membership: + payload_dict['predicate'] = self.get_membership_lambda() + return payload_dict def publish(self): """ @@ -189,7 +194,6 @@ def get_kwargs(self) -> dict: kwargs = {'name': self.name, 'source': q.collection(self.source), } if self.terms: - kwargs['terms'] = self.terms if self.values: kwargs['values'] = self.values @@ -246,4 +250,3 @@ def get_body(self): ) ) ) - diff --git a/pfunk/template.py b/pfunk/template.py index a61f68f..090ea9c 100644 --- a/pfunk/template.py +++ b/pfunk/template.py @@ -56,4 +56,4 @@ key_template = Template(""" KEYS = {{keys}} -""") \ No newline at end of file +""") diff --git a/pfunk/testcase.py b/pfunk/testcase.py index eb6d022..054bbab 100644 --- a/pfunk/testcase.py +++ b/pfunk/testcase.py @@ -1,10 +1,10 @@ +import os import unittest - import uuid -import os from valley.utils import import_util from werkzeug.test import Client + from pfunk import Project from pfunk.client import FaunaClient, q from pfunk.template import key_template @@ -41,8 +41,13 @@ class CollectionTestCase(PFunkTestCase): def setUp(self) -> None: super(CollectionTestCase, self).setUp() self.project = Project() - - self.project.add_resources(self.collections) + coll = [] + for i in self.collections: + if isinstance(i, str): + coll.append(import_util(i)) + else: + coll.append(i) + self.project.add_resources(coll) self.project.publish() @@ -59,7 +64,6 @@ def setUp(self) -> None: with open(self.keys_path, 'w+') as f: f.write(key_template.render(keys=keys)) - def tearDown(self) -> None: super(APITestCase, self).tearDown() if os.path.exists(self.keys_path): diff --git a/pfunk/tests/__init__.py b/pfunk/tests/__init__.py index 74257c0..936292b 100644 --- a/pfunk/tests/__init__.py +++ b/pfunk/tests/__init__.py @@ -1,7 +1,6 @@ from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField -from pfunk.resources import Index -from pfunk.contrib.auth.collections import User, Group from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole +from pfunk.resources import Index GENDER_PRONOUN = Enum(name='gender_pronouns', choices=['he', 'her', 'they']) @@ -32,7 +31,7 @@ class Person(Collection): last_name = StringField(required=True) gender_pronoun = EnumField(GENDER_PRONOUN) sport = ReferenceField(Sport) - group = ReferenceField(Group) + group = ReferenceField('pfunk.contrib.auth.collections.group.Group') def __unicode__(self): return f"{self.first_name} {self.last_name}" @@ -41,7 +40,7 @@ def __unicode__(self): class House(Collection): collection_roles = [GenericUserBasedRole] address = StringField(required=True) - user = ReferenceField(User) + user = ReferenceField('pfunk.contrib.auth.collections.user.User') def __unicode__(self): - return self.address \ No newline at end of file + return self.address diff --git a/pfunk/tests/test_auth.py b/pfunk/tests/test_auth.py index 33ec70a..afc8204 100644 --- a/pfunk/tests/test_auth.py +++ b/pfunk/tests/test_auth.py @@ -1,13 +1,16 @@ from faunadb.errors import PermissionDenied from pfunk.contrib.auth.collections import PermissionGroup -from pfunk.tests import User, Group, Sport, Person, House +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User from pfunk.exceptions import LoginFailed from pfunk.testcase import CollectionTestCase +from pfunk.tests import Sport, Person, House class AuthTestCase(CollectionTestCase): - collections = [User, Group, Sport, Person, House] + collections = [User, Group, + Sport, Person, House] def setUp(self) -> None: super(AuthTestCase, self).setUp() diff --git a/pfunk/tests/test_collection.py b/pfunk/tests/test_collection.py index 717b164..2692497 100644 --- a/pfunk/tests/test_collection.py +++ b/pfunk/tests/test_collection.py @@ -50,7 +50,3 @@ def test_get_unique_together(self): sport = Sport() sport.get_unique_together() self.assertEqual(len(sport.collection_indexes), 1) - - - - diff --git a/pfunk/tests/test_crud.py b/pfunk/tests/test_crud.py index c09c17e..b7abc64 100644 --- a/pfunk/tests/test_crud.py +++ b/pfunk/tests/test_crud.py @@ -1,6 +1,5 @@ -from faunadb.errors import PermissionDenied - -from pfunk.tests import User, Group +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User from pfunk.testcase import CollectionTestCase @@ -12,8 +11,8 @@ def setUp(self) -> None: self.managers = Group.create(name='Managers', slug='managers') self.power_users = Group.create(name='Power Users', slug='power-users') self.user = User.create(username='test', email='tlasso@example.org', first_name='Ted', - last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.managers]) - + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.managers]) def test_create_user(self): self.assertEqual(2, len(Group.all())) @@ -33,7 +32,3 @@ def test_update(self): self.user.save() u = User.get(self.user.ref.id()) self.assertEqual(u.username, 'test-c') - - - - diff --git a/pfunk/tests/test_deployment.py b/pfunk/tests/test_deployment.py index 587f9f0..c938b29 100644 --- a/pfunk/tests/test_deployment.py +++ b/pfunk/tests/test_deployment.py @@ -1,8 +1,9 @@ -from pfunk.contrib.auth.collections import Group, User -from pfunk.testcase import PFunkTestCase -from pfunk.project import Project from pfunk.client import q -from pfunk.tests import Sport, Person, User, Group +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User +from pfunk.project import Project +from pfunk.testcase import PFunkTestCase +from pfunk.tests import Sport, Person class DeploymentTestCase(PFunkTestCase): @@ -42,6 +43,3 @@ def test_project_publish(self): # functions self.project.publish() self.project.publish() - - - diff --git a/pfunk/tests/test_email.py b/pfunk/tests/test_email.py index 491a3a2..af42f6d 100644 --- a/pfunk/tests/test_email.py +++ b/pfunk/tests/test_email.py @@ -1,13 +1,14 @@ import tempfile -from werkzeug.test import Client from unittest import mock -import os + from jinja2.exceptions import TemplateNotFound +from werkzeug.test import Client -from pfunk.tests import User, Group -from pfunk.testcase import APITestCase -from pfunk.contrib.email.ses import SESBackend +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User from pfunk.contrib.email.base import EmailBackend +from pfunk.contrib.email.ses import SESBackend +from pfunk.testcase import APITestCase class TestEmailBackend(APITestCase): @@ -22,7 +23,6 @@ def setUp(self) -> None: self.backend = EmailBackend() def test_get_template(self): - template = self.backend.get_template('email/email_template.html') # test jinja render if no exceptions template.render(unittest_value="random value") @@ -58,7 +58,6 @@ def setUp(self) -> None: @mock.patch('boto3.client') def test_send_email(self, mocked): - res = self.SES.send_email( subject="test", to_emails=["testemail@email.com"], diff --git a/pfunk/tests/test_jwt.py b/pfunk/tests/test_jwt.py index a9ff9b3..571a907 100644 --- a/pfunk/tests/test_jwt.py +++ b/pfunk/tests/test_jwt.py @@ -1,10 +1,8 @@ -from faunadb.errors import PermissionDenied - from pfunk.contrib.auth.collections import Key -from pfunk.tests import User, Group +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User from pfunk.exceptions import LoginFailed from pfunk.testcase import APITestCase -from pfunk.contrib.auth.collections import Key class AuthToken(APITestCase): diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index db859a9..5afb853 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -1,4 +1,5 @@ import unittest + from pfunk.project import Project from pfunk.tests import Person, Sport, GENDER_PRONOUN @@ -26,5 +27,3 @@ def test_render(self): self.assertTrue('type Person' in gql) self.assertTrue('type Sport' in gql) self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) - - diff --git a/pfunk/tests/test_resources.py b/pfunk/tests/test_resources.py index 05a7117..faf2f02 100644 --- a/pfunk/tests/test_resources.py +++ b/pfunk/tests/test_resources.py @@ -1,6 +1,8 @@ import unittest -from pfunk.tests import SimpleIndex + from pfunk.client import q +from pfunk.tests import SimpleIndex + class IndexTestCase(unittest.TestCase): @@ -20,9 +22,9 @@ def test_get_kwargs(self): self.assertEqual( self.index.get_kwargs(), { - 'name':'simple-index', + 'name': 'simple-index', 'source': q.collection('Project'), 'terms': ['name', 'slug'], 'unique': True } - ) \ No newline at end of file + ) diff --git a/pfunk/tests/test_web_change_password.py b/pfunk/tests/test_web_change_password.py index 85e6fc7..bdc2d7a 100644 --- a/pfunk/tests/test_web_change_password.py +++ b/pfunk/tests/test_web_change_password.py @@ -1,4 +1,5 @@ -from pfunk.tests import User, Group +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User from pfunk.testcase import APITestCase @@ -25,12 +26,12 @@ def test_update_password(self): headers={ "Authorization": self.token }) - + new_token, new_exp = User.api_login("test", "updated_password") self.assertIsNotNone(new_token) self.assertTrue(res.json['success']) - + def test_update_pass_wrong_current(self): """ Tests `pfunk.contrib.auth.views.UpdatePasswordView` throw an error if the current password given was wrong """ res = self.c.post('/user/update-password/', @@ -43,6 +44,6 @@ def test_update_pass_wrong_current(self): "Authorization": self.token }) expected = {'success': False, 'data': {'validation_errors': {'current_password': ' Password update failed.'}}} - + self.assertDictEqual(res.json, expected) self.assertFalse(res.json['success']) diff --git a/pfunk/tests/test_web_crud.py b/pfunk/tests/test_web_crud.py index fe0fd2e..71123e6 100644 --- a/pfunk/tests/test_web_crud.py +++ b/pfunk/tests/test_web_crud.py @@ -1,7 +1,7 @@ from werkzeug.test import Client -from pfunk.tests import User, Group -from pfunk.exceptions import LoginFailed +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User from pfunk.testcase import APITestCase from pfunk.tests import House @@ -45,7 +45,7 @@ def test_create(self): self.assertTrue(res.json['success']) self.assertIn("the street somewhere", [ - house.address for house in House.all()]) + house.address for house in House.all()]) def test_update(self): self.assertNotIn("the updated street somewhere", [ @@ -59,7 +59,7 @@ def test_update(self): self.assertTrue(res.json['success']) self.assertIn("the updated street somewhere", [ - house.address for house in House.all()]) + house.address for house in House.all()]) def test_delete(self): res = self.c.delete(f'/house/delete/{self.house.ref.id()}/', diff --git a/pfunk/tests/test_web_forgot_password.py b/pfunk/tests/test_web_forgot_password.py index 4b81492..83c3e32 100644 --- a/pfunk/tests/test_web_forgot_password.py +++ b/pfunk/tests/test_web_forgot_password.py @@ -1,6 +1,7 @@ from werkzeug.test import Client -from pfunk.tests import User, Group +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User from pfunk.testcase import APITestCase @@ -26,16 +27,16 @@ def test_send_forgot_req(self): "Content-Type": "application/json"}) self.assertTrue(res.json['success']) - + def test_submit_key_for_forgot_pass(self): """ Submits the key from the forgot password email to initiate password reset """ - + res = self.c.put(f'/user/forgot-password/', - json={ - "verification_key": self.key, - "password": "new_updated_pass"}, - headers={ - "Content-Type": "application/json"}) + json={ + "verification_key": self.key, + "password": "new_updated_pass"}, + headers={ + "Content-Type": "application/json"}) new_login = User.api_login("test", "new_updated_pass") self.assertTrue(res.json['success']) @@ -45,11 +46,11 @@ def test_submit_wrong_key_for_forgot_pass(self): """ Submit a wrong key for verification of reset password. Should return `Not Found` """ key = 'wrong-key' res = self.c.put(f'/user/forgot-password/', - json={ - "verification_key": key, - "password": "forgotten_password"}, - headers={ - "Content-Type": "application/json"}) + json={ + "verification_key": key, + "password": "forgotten_password"}, + headers={ + "Content-Type": "application/json"}) expected = {'data': 'Not Found', 'success': False} self.assertFalse(res.json['success']) diff --git a/pfunk/tests/test_web_login.py b/pfunk/tests/test_web_login.py index b08cc27..4a895c6 100644 --- a/pfunk/tests/test_web_login.py +++ b/pfunk/tests/test_web_login.py @@ -1,6 +1,7 @@ from werkzeug.test import Client -from pfunk.tests import User, Group +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User from pfunk.exceptions import LoginFailed from pfunk.testcase import APITestCase @@ -24,6 +25,7 @@ def test_login(self): # check if response has cookies self.assertIsNotNone(res.headers['Set-Cookie']) + self.assertTrue(res.json['success']) def test_wrong_login(self): @@ -36,11 +38,11 @@ def test_wrong_login(self): def test_logout(self): """ Tests `pfunk.contrib.auth.views.LogoutView` invalidate token login and remove cookie """ token, exp = User.api_login("test", "abc123") + res = self.c.post('/user/logout/', headers={ "Authorization": token, "Content-Type": "application/json" }) - self.assertTrue(res.json['success']) def test_wrong_logout(self): diff --git a/pfunk/tests/test_web_signup.py b/pfunk/tests/test_web_signup.py index f1c5fa4..c651084 100644 --- a/pfunk/tests/test_web_signup.py +++ b/pfunk/tests/test_web_signup.py @@ -1,6 +1,7 @@ from werkzeug.test import Client -from pfunk.tests import User, Group +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User from pfunk.testcase import APITestCase @@ -27,7 +28,7 @@ def test_signup(self): "email": "testemail@email.com", "first_name": "Forest", "last_name": "Gump", - "_credential_field": "password" + "_credential_field": "password" }) # token = User.login(username="new_user", password="password") @@ -41,7 +42,7 @@ def test_signup_not_unique(self): "email": "testemail@email.com", "first_name": "Forest", "last_name": "Gump", - "_credential_field": "password" + "_credential_field": "password" }) self.assertFalse(res.json['success']) diff --git a/pfunk/tests/test_web_stripe.py b/pfunk/tests/test_web_stripe.py index ab20d42..af1fe1a 100644 --- a/pfunk/tests/test_web_stripe.py +++ b/pfunk/tests/test_web_stripe.py @@ -1,8 +1,8 @@ from werkzeug.test import Client -from pfunk.tests import User, Group -from pfunk.contrib.auth.collections import PermissionGroup -from pfunk.contrib.ecommerce.collections import StripePackage, StripeCustomer +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User +from pfunk.contrib.ecommerce.collections import StripePackage from pfunk.testcase import APITestCase @@ -57,30 +57,24 @@ def test_create_package(self): "Content-Type": "application/json" }) - - # TODO: Fix `forbidden` error in stripe views def test_update_package(self): res = self.c.put(f'/stripepackage/update/{self.stripe_pkg.ref.id()}/', - json={ - 'stripe_id': '123', - 'name': 'stripe_pkg', - 'price': 10.10, - 'description': 'a test package' - }, - headers={ - "Authorization": self.token, - "Content-Type": "application/json" - }) - - + json={ + 'stripe_id': '123', + 'name': 'stripe_pkg', + 'price': 10.10, + 'description': 'a test package' + }, + headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) # TODO: Fix `forbidden` error in stripe views def test_delete_package(self): res = self.c.delete(f'/stripepackage/delete/{self.stripe_pkg.ref.id()}/', - headers={ - "Authorization": self.token, - "Content-Type": "application/json" - }) - - \ No newline at end of file + headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) diff --git a/pfunk/utils/deploy.py b/pfunk/utils/deploy.py index f07ec12..a78cae2 100644 --- a/pfunk/utils/deploy.py +++ b/pfunk/utils/deploy.py @@ -1,9 +1,10 @@ -import boto3 import datetime import json import os -import pip import shutil + +import boto3 +import pip import sammy as sm s3 = boto3.client('s3') diff --git a/pfunk/utils/json_utils.py b/pfunk/utils/json_utils.py index ee7342f..15de3a0 100644 --- a/pfunk/utils/json_utils.py +++ b/pfunk/utils/json_utils.py @@ -20,4 +20,4 @@ def default(self, obj): try: return super(PFunkEncoder, self).default(obj) except AttributeError: - return str(obj) \ No newline at end of file + return str(obj) diff --git a/pfunk/utils/publishing.py b/pfunk/utils/publishing.py index 4d08373..60633bd 100644 --- a/pfunk/utils/publishing.py +++ b/pfunk/utils/publishing.py @@ -7,6 +7,7 @@ class BearerAuth(requests.auth.AuthBase): """ Bearer Token Auth class for the requests library. """ + def __init__(self, token): """ @@ -19,7 +20,8 @@ def __call__(self, r): r.headers["authorization"] = "Bearer " + self.token return r -def create_or_update_role(client, payload:dict={}): + +def create_or_update_role(client, payload: dict = {}): """ Utility that attempts to create a role and if that fails it attempts to update it. Args: @@ -96,4 +98,4 @@ def create_or_update_function(client, payload): ) ) - return response \ No newline at end of file + return response diff --git a/pfunk/web/request.py b/pfunk/web/request.py index c19a0b6..d794c88 100644 --- a/pfunk/web/request.py +++ b/pfunk/web/request.py @@ -20,7 +20,7 @@ def __init__(self, event, kwargs): self.user = None self.token: str = None self.jwt: str = None - + def get_cookies(self, raw_cookies): """ Returns dict of cookies @@ -59,6 +59,7 @@ class WSGIRequest(Request): """ WSGI Request """ + def __init__(self, event, kwargs=None): super(WSGIRequest, self).__init__(event, kwargs=kwargs) self.method = event.method @@ -98,6 +99,7 @@ class HTTPRequest(BaseAPIGatewayRequest): """ HTTP Request: For HTTP API Gateway """ + def __init__(self, event, kwargs=None): super(HTTPRequest, self).__init__(event, kwargs=kwargs) self.raw_event = event @@ -114,5 +116,3 @@ def __init__(self, event, kwargs=None): def get_cookies(self, raw_cookies): return parse_cookie(';'.join(raw_cookies)) - - diff --git a/pfunk/web/response.py b/pfunk/web/response.py index 0feef25..b81e471 100644 --- a/pfunk/web/response.py +++ b/pfunk/web/response.py @@ -33,7 +33,7 @@ def response(self): 'statusCode': self.status_code, 'body': self.body, 'headers': self.headers - } + } class NotFoundResponseMixin(object): @@ -122,4 +122,4 @@ class HttpBadRequestResponse(BadRequestResponseMixin, Response): class JSONBadRequestResponse(BadRequestResponseMixin, JSONResponse): - pass \ No newline at end of file + pass diff --git a/pfunk/web/views/base.py b/pfunk/web/views/base.py index 375e286..c0f4503 100644 --- a/pfunk/web/views/base.py +++ b/pfunk/web/views/base.py @@ -1,7 +1,8 @@ from envs import env -from faunadb.errors import NotFound as FaunaNotFound, PermissionDenied, BadRequest, ErrorData +from faunadb.errors import NotFound as FaunaNotFound, PermissionDenied, BadRequest from jwt import InvalidSignatureError from valley.exceptions import ValidationException +from valley.utils import import_util from werkzeug.exceptions import NotFound, MethodNotAllowed from werkzeug.http import dump_cookie from werkzeug.routing import Rule @@ -358,6 +359,8 @@ def get_query_kwargs(self): for k, v in fields.items(): current_value = data.get(k) col = v.get('foreign_class') + if isinstance(col, str): + col = import_util(col) if current_value: obj = col.get(current_value) data[k] = obj diff --git a/pfunk/web/views/graphql.py b/pfunk/web/views/graphql.py index cbd6065..f0842d7 100644 --- a/pfunk/web/views/graphql.py +++ b/pfunk/web/views/graphql.py @@ -1,13 +1,13 @@ import requests from envs import env +from graphql.exceptions import SyntaxError as GQLSyntaxError +from graphql.parser import GraphQLParser from werkzeug.routing import Rule from pfunk.exceptions import GraphQLError from pfunk.utils.publishing import BearerAuth from pfunk.web.response import GraphQLResponse from pfunk.web.views.json import JSONView -from graphql.parser import GraphQLParser -from graphql.exceptions import SyntaxError as GQLSyntaxError parser = GraphQLParser() @@ -55,12 +55,12 @@ class GraphQLView(JSONView): def get_query(self): gql = self.process_graphql() resp = requests.request( - method='post', - url=env('FAUNA_GRAPHQL_URL', 'https://graphql.fauna.com/graphql'), - json=self.request.get_json(), - auth=BearerAuth(self.request.token), - allow_redirects=False - ) + method='post', + url=env('FAUNA_GRAPHQL_URL', 'https://graphql.fauna.com/graphql'), + json=self.request.get_json(), + auth=BearerAuth(self.request.token), + allow_redirects=False + ) return resp.json() def process_graphql(self): @@ -76,4 +76,4 @@ def process_graphql(self): @classmethod def url(cls, collection=None): return Rule(f'/graphql/', endpoint=cls.as_view(), - methods=cls.http_methods) \ No newline at end of file + methods=cls.http_methods) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 1459001..b83958e 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -1,6 +1,6 @@ +from pfunk.client import q from pfunk.web.response import JSONResponse, JSONNotFoundResponse, JSONBadRequestResponse, \ JSONMethodNotAllowedResponse, JSONUnauthorizedResponse, JSONForbiddenResponse -from pfunk.client import q from pfunk.web.views.base import ActionMixin, HTTPView, IDMixin, ObjectMixin, QuerysetMixin, UpdateMixin @@ -99,4 +99,4 @@ class ListView(QuerysetMixin, ActionMixin, JSONView): class GraphQLView(HTTPView): - pass \ No newline at end of file + pass diff --git a/poetry.lock b/poetry.lock index 619a60b..144a0c1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,6 +1,6 @@ [[package]] name = "appnope" -version = "0.1.2" +version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" category = "dev" optional = false @@ -71,6 +71,21 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "beautifulsoup4" +version = "4.11.1" +description = "Screen-scraping library" +category = "dev" +optional = false +python-versions = ">=3.6.0" + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + [[package]] name = "bleach" version = "4.1.0" @@ -86,15 +101,15 @@ webencodings = "*" [[package]] name = "boto3" -version = "1.20.46" +version = "1.23.1" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.6" [package.dependencies] -botocore = ">=1.23.46,<1.24.0" -jmespath = ">=0.7.1,<1.0.0" +botocore = ">=1.26.1,<1.27.0" +jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.5.0,<0.6.0" [package.extras] @@ -102,19 +117,19 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.23.46" +version = "1.26.1" description = "Low-level, data-driven core of boto 3." category = "main" optional = false python-versions = ">= 3.6" [package.dependencies] -jmespath = ">=0.7.1,<1.0.0" +jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.12.5)"] +crt = ["awscrt (==0.13.8)"] [[package]] name = "cachetools" @@ -145,7 +160,7 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "2.0.10" +version = "2.0.12" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false @@ -156,11 +171,11 @@ unicode_backport = ["unicodedata2"] [[package]] name = "click" -version = "8.0.3" +version = "8.1.3" description = "Composable command line interface toolkit" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} @@ -206,11 +221,11 @@ test = ["pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pret [[package]] name = "debugpy" -version = "1.5.1" +version = "1.6.0" description = "An implementation of the Debug Adapter Protocol for Python" category = "dev" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +python-versions = ">=3.7" [[package]] name = "decorator" @@ -230,11 +245,11 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "entrypoints" -version = "0.3" +version = "0.4" description = "Discover and load entry points from installed packages." category = "dev" optional = false -python-versions = ">=2.7" +python-versions = ">=3.6" [[package]] name = "envs" @@ -247,9 +262,20 @@ python-versions = ">=3.6,<4.0" [package.extras] cli = ["click[cli] (>=8.0.3,<9.0.0)", "Jinja2[cli] (>=3.0.3,<4.0.0)", "terminaltables[cli] (>=3.1.10,<4.0.0)"] +[[package]] +name = "fastjsonschema" +version = "2.15.3" +description = "Fastest Python implementation of JSON schema" +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] + [[package]] name = "faunadb" -version = "4.1.1" +version = "4.2.0" description = "FaunaDB Python driver" category = "main" optional = false @@ -337,7 +363,7 @@ python-versions = ">=3.5" [[package]] name = "importlib-metadata" -version = "4.10.1" +version = "4.11.3" description = "Read metadata from Python packages" category = "main" optional = false @@ -348,28 +374,28 @@ typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] [[package]] name = "importlib-resources" -version = "5.4.0" +version = "5.7.1" description = "Read resources from Python packages" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] [[package]] name = "ipykernel" -version = "6.7.0" +version = "6.13.0" description = "IPython Kernel for Jupyter" category = "dev" optional = false @@ -377,20 +403,22 @@ python-versions = ">=3.7" [package.dependencies] appnope = {version = "*", markers = "platform_system == \"Darwin\""} -debugpy = ">=1.0.0,<2.0" +debugpy = ">=1.0" ipython = ">=7.23.1" -jupyter-client = "<8.0" -matplotlib-inline = ">=0.1.0,<0.2.0" +jupyter-client = ">=6.1.12" +matplotlib-inline = ">=0.1" nest-asyncio = "*" -tornado = ">=4.2,<7.0" -traitlets = ">=5.1.0,<6.0" +packaging = "*" +psutil = "*" +tornado = ">=6.1" +traitlets = ">=5.1.0" [package.extras] -test = ["pytest (!=5.3.4)", "pytest-cov", "flaky", "ipyparallel"] +test = ["pytest (>=6.0)", "pytest-cov", "flaky", "ipyparallel", "pre-commit", "pytest-timeout"] [[package]] name = "ipython" -version = "7.31.1" +version = "7.33.0" description = "IPython: Productive Interactive Computing" category = "dev" optional = false @@ -430,7 +458,7 @@ python-versions = "*" [[package]] name = "ipywidgets" -version = "7.6.5" +version = "7.7.0" description = "IPython HTML widgets for Jupyter" category = "dev" optional = false @@ -443,7 +471,7 @@ ipython-genutils = ">=0.2.0,<0.3.0" jupyterlab-widgets = {version = ">=1.0.0", markers = "python_version >= \"3.6\""} nbformat = ">=4.2.0" traitlets = ">=4.3.1" -widgetsnbextension = ">=3.5.0,<3.6.0" +widgetsnbextension = ">=3.6.0,<3.7.0" [package.extras] test = ["pytest (>=3.6.0)", "pytest-cov", "mock"] @@ -487,15 +515,15 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jmespath" -version = "0.10.0" +version = "1.0.0" description = "JSON Matching Expressions" category = "main" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.7" [[package]] name = "jsonschema" -version = "4.4.0" +version = "4.5.1" description = "An implementation of JSON Schema validation for Python" category = "dev" optional = false @@ -530,28 +558,28 @@ qtconsole = "*" [[package]] name = "jupyter-client" -version = "7.1.2" +version = "7.3.1" description = "Jupyter protocol implementation and client libraries" category = "dev" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7" [package.dependencies] entrypoints = "*" -jupyter-core = ">=4.6.0" -nest-asyncio = ">=1.5" -python-dateutil = ">=2.1" -pyzmq = ">=13" -tornado = ">=4.1" +jupyter-core = ">=4.9.2" +nest-asyncio = ">=1.5.4" +python-dateutil = ">=2.8.2" +pyzmq = ">=22.3" +tornado = ">=6.0" traitlets = "*" [package.extras] -doc = ["myst-parser", "sphinx (>=1.3.6)", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] -test = ["codecov", "coverage", "ipykernel", "ipython", "mock", "mypy", "pre-commit", "pytest", "pytest-asyncio", "pytest-cov", "pytest-timeout", "jedi (<0.18)"] +doc = ["ipykernel", "myst-parser", "sphinx (>=1.3.6)", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] +test = ["codecov", "coverage", "ipykernel (>=6.5)", "ipython", "mypy", "pre-commit", "pytest", "pytest-asyncio (>=0.18)", "pytest-cov", "pytest-timeout"] [[package]] name = "jupyter-console" -version = "6.4.0" +version = "6.4.3" description = "Jupyter terminal console" category = "dev" optional = false @@ -560,7 +588,7 @@ python-versions = ">=3.6" [package.dependencies] ipykernel = "*" ipython = "*" -jupyter-client = "*" +jupyter-client = ">=7.0.0" prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" pygments = "*" @@ -569,30 +597,30 @@ test = ["pexpect"] [[package]] name = "jupyter-core" -version = "4.9.1" +version = "4.10.0" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} traitlets = "*" +[package.extras] +test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] + [[package]] name = "jupyterlab-pygments" -version = "0.1.2" +version = "0.2.2" description = "Pygments theme using JupyterLab CSS variables" category = "dev" optional = false -python-versions = "*" - -[package.dependencies] -pygments = ">=2.4.1,<3" +python-versions = ">=3.7" [[package]] name = "jupyterlab-widgets" -version = "1.0.2" +version = "1.1.0" description = "A JupyterLab extension." category = "dev" optional = false @@ -600,11 +628,11 @@ python-versions = ">=3.6" [[package]] name = "markupsafe" -version = "2.0.1" +version = "2.1.1" description = "Safely add untrusted strings to HTML/XML markup." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "matplotlib-inline" @@ -627,7 +655,7 @@ python-versions = "*" [[package]] name = "nbclient" -version = "0.5.10" +version = "0.6.3" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." category = "dev" optional = false @@ -637,63 +665,65 @@ python-versions = ">=3.7.0" jupyter-client = ">=6.1.5" nbformat = ">=5.0" nest-asyncio = "*" -traitlets = ">=4.2" +traitlets = ">=5.0.0" [package.extras] -sphinx = ["Sphinx (>=1.7)", "sphinx-book-theme", "mock", "moto", "myst-parser"] -test = ["ipython", "ipykernel", "ipywidgets (<8.0.0)", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "check-manifest", "flake8", "mypy", "xmltodict", "black", "pip (>=18.1)", "wheel (>=0.31.0)", "setuptools (>=38.6.0)", "twine (>=1.11.0)"] +sphinx = ["autodoc-traits", "mock", "moto", "myst-parser", "Sphinx (>=1.7)", "sphinx-book-theme"] +test = ["black", "check-manifest", "flake8", "ipykernel", "ipython (<8.0.0)", "ipywidgets (<8.0.0)", "mypy", "pip (>=18.1)", "pre-commit", "pytest (>=4.1)", "pytest-asyncio", "pytest-cov (>=2.6.1)", "setuptools (>=60.0)", "testpath", "twine (>=1.11.0)", "xmltodict"] [[package]] name = "nbconvert" -version = "6.4.1" +version = "6.5.0" description = "Converting Jupyter Notebooks" category = "dev" optional = false python-versions = ">=3.7" [package.dependencies] +beautifulsoup4 = "*" bleach = "*" defusedxml = "*" entrypoints = ">=0.2.2" -jinja2 = ">=2.4" -jupyter-core = "*" +jinja2 = ">=3.0" +jupyter-core = ">=4.7" jupyterlab-pygments = "*" +MarkupSafe = ">=2.0" mistune = ">=0.8.1,<2" -nbclient = ">=0.5.0,<0.6.0" -nbformat = ">=4.4" +nbclient = ">=0.5.0" +nbformat = ">=5.1" +packaging = "*" pandocfilters = ">=1.4.1" pygments = ">=2.4.1" -testpath = "*" +tinycss2 = "*" traitlets = ">=5.0" [package.extras] -all = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (==0.2.6)", "tornado (>=4.0)", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] +all = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pre-commit", "pyppeteer (>=1,<1.1)", "tornado (>=6.1)", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] docs = ["sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] -serve = ["tornado (>=4.0)"] -test = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (==0.2.6)"] -webpdf = ["pyppeteer (==0.2.6)"] +serve = ["tornado (>=6.1)"] +test = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pre-commit", "pyppeteer (>=1,<1.1)"] +webpdf = ["pyppeteer (>=1,<1.1)"] [[package]] name = "nbformat" -version = "5.1.3" +version = "5.4.0" description = "The Jupyter Notebook format" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" [package.dependencies] -ipython-genutils = "*" -jsonschema = ">=2.4,<2.5.0 || >2.5.0" +fastjsonschema = "*" +jsonschema = ">=2.6" jupyter-core = "*" -traitlets = ">=4.1" +traitlets = ">=5.1" [package.extras] -fast = ["fastjsonschema"] -test = ["check-manifest", "fastjsonschema", "testpath", "pytest", "pytest-cov"] +test = ["check-manifest", "testpath", "pytest", "pre-commit"] [[package]] name = "nest-asyncio" -version = "1.5.4" +version = "1.5.5" description = "Patch asyncio to allow nested event loops" category = "dev" optional = false @@ -701,11 +731,11 @@ python-versions = ">=3.5" [[package]] name = "notebook" -version = "6.4.10" +version = "6.4.11" description = "A web-based notebook environment for interactive computing" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] argon2-cffi = "*" @@ -727,7 +757,7 @@ traitlets = ">=4.2.1" [package.extras] docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"] json-logging = ["json-logging"] -test = ["pytest", "coverage", "requests", "nbval", "selenium", "pytest-cov", "requests-unixsocket"] +test = ["pytest", "coverage", "requests", "testpath", "nbval", "selenium", "pytest-cov", "requests-unixsocket"] [[package]] name = "packaging" @@ -806,7 +836,7 @@ python-versions = "*" [[package]] name = "prometheus-client" -version = "0.13.1" +version = "0.14.1" description = "Python client for the Prometheus monitoring system." category = "dev" optional = false @@ -817,7 +847,7 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.26" +version = "3.0.29" description = "Library for building powerful interactive command lines in Python" category = "dev" optional = false @@ -826,6 +856,17 @@ python-versions = ">=3.6.2" [package.dependencies] wcwidth = "*" +[[package]] +name = "psutil" +version = "5.9.0" +description = "Cross-platform lib for process and system monitoring in Python." +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +test = ["ipaddress", "mock", "unittest2", "enum34", "pywin32", "wmi"] + [[package]] name = "ptyprocess" version = "0.7.0" @@ -852,15 +893,15 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.11.2" +version = "2.12.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [[package]] name = "pyjwt" -version = "2.3.0" +version = "2.4.0" description = "JSON Web Token implementation in Python" category = "main" optional = false @@ -874,14 +915,14 @@ tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"] [[package]] name = "pyparsing" -version = "3.0.7" -description = "Python parsing module" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.6.8" [package.extras] -diagrams = ["jinja2", "railroad-diagrams"] +diagrams = ["railroad-diagrams", "jinja2"] [[package]] name = "pyrsistent" @@ -912,7 +953,7 @@ python-versions = "*" [[package]] name = "pywin32" -version = "303" +version = "304" description = "Python for Window Extensions" category = "dev" optional = false @@ -920,11 +961,11 @@ python-versions = "*" [[package]] name = "pywinpty" -version = "2.0.1" +version = "2.0.5" description = "Pseudo terminal support for Windows from Python." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "pyyaml" @@ -948,11 +989,11 @@ py = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "qtconsole" -version = "5.2.2" +version = "5.3.0" description = "Jupyter Qt console" category = "dev" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" [package.dependencies] ipykernel = ">=4.1" @@ -961,7 +1002,7 @@ jupyter-client = ">=4.1" jupyter-core = "*" pygments = "*" pyzmq = ">=17.1" -qtpy = "*" +qtpy = ">=2.0.1" traitlets = "*" [package.extras] @@ -970,17 +1011,17 @@ test = ["flaky", "pytest", "pytest-qt"] [[package]] name = "qtpy" -version = "2.0.0" +version = "2.1.0" description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] packaging = "*" [package.extras] -test = ["pytest (>=6.0.0,<7.0)", "pytest-cov (>=2.11.0)"] +test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] [[package]] name = "requests" @@ -1002,7 +1043,7 @@ use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] [[package]] name = "s3transfer" -version = "0.5.0" +version = "0.5.2" description = "An Amazon S3 Transfer Manager" category = "main" optional = false @@ -1048,9 +1089,17 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "soupsieve" +version = "2.3.2.post1" +description = "A modern CSS selector implementation for Beautiful Soup." +category = "dev" +optional = false +python-versions = ">=3.6" + [[package]] name = "stripe" -version = "2.65.0" +version = "2.76.0" description = "Python bindings for the Stripe API" category = "main" optional = false @@ -1061,7 +1110,7 @@ requests = {version = ">=2.20", markers = "python_version >= \"3.0\""} [[package]] name = "terminado" -version = "0.13.1" +version = "0.15.0" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." category = "dev" optional = false @@ -1070,21 +1119,25 @@ python-versions = ">=3.7" [package.dependencies] ptyprocess = {version = "*", markers = "os_name != \"nt\""} pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} -tornado = ">=4" +tornado = ">=6.1.0" [package.extras] -test = ["pytest"] +test = ["pre-commit", "pytest-timeout", "pytest (>=6.0)"] [[package]] -name = "testpath" -version = "0.5.0" -description = "Test utilities for code working with files and commands" +name = "tinycss2" +version = "1.1.1" +description = "A tiny CSS parser" category = "dev" optional = false -python-versions = ">= 3.5" +python-versions = ">=3.6" + +[package.dependencies] +webencodings = ">=0.4" [package.extras] -test = ["pytest", "pathlib2"] +doc = ["sphinx", "sphinx-rtd-theme"] +test = ["pytest", "pytest-cov", "pytest-flake8", "pytest-isort", "coverage"] [[package]] name = "tornado" @@ -1096,33 +1149,33 @@ python-versions = ">= 3.5" [[package]] name = "traitlets" -version = "5.1.1" -description = "Traitlets Python configuration system" +version = "5.2.1.post0" +description = "" category = "dev" optional = false python-versions = ">=3.7" [package.extras] -test = ["pytest"] +test = ["pre-commit", "pytest"] [[package]] name = "typing-extensions" -version = "4.0.1" -description = "Backported and Experimental Type Hints for Python 3.6+" +version = "4.2.0" +description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "urllib3" -version = "1.26.8" +version = "1.26.9" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] -brotli = ["brotlipy (>=0.6.0)"] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] @@ -1155,18 +1208,18 @@ python-versions = "*" [[package]] name = "werkzeug" -version = "2.0.2" +version = "2.1.2" description = "The comprehensive WSGI web application library." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] watchdog = ["watchdog"] [[package]] name = "widgetsnbextension" -version = "3.5.2" +version = "3.6.0" description = "IPython HTML widgets for Jupyter" category = "dev" optional = false @@ -1177,15 +1230,15 @@ notebook = ">=4.4.1" [[package]] name = "zipp" -version = "3.7.0" +version = "3.8.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" @@ -1194,8 +1247,8 @@ content-hash = "4e8046eb9b632ff1bbcc07c5141f30c51bc7d7ff11b8a22cc5a38b250d46afcd [metadata.files] appnope = [ - {file = "appnope-0.1.2-py2.py3-none-any.whl", hash = "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442"}, - {file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"}, + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, ] argon2-cffi = [ {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, @@ -1236,17 +1289,21 @@ backcall = [ {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, ] +beautifulsoup4 = [ + {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, + {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, +] bleach = [ {file = "bleach-4.1.0-py2.py3-none-any.whl", hash = "sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994"}, {file = "bleach-4.1.0.tar.gz", hash = "sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da"}, ] boto3 = [ - {file = "boto3-1.20.46-py3-none-any.whl", hash = "sha256:a2ffce001160d7e7c72a90c3084700d50eb64ea4a3aae8afe21566971d1fd611"}, - {file = "boto3-1.20.46.tar.gz", hash = "sha256:d7effba509d7298ef49316ba2da7a2ea115f2a7ff691f875f6354666663cf386"}, + {file = "boto3-1.23.1-py3-none-any.whl", hash = "sha256:4e3ef99d211266175a97b35d78103c31e3d01af31fd02bf599185421e5873fc0"}, + {file = "boto3-1.23.1.tar.gz", hash = "sha256:3b50b49c5c0d3f19406cfbcefa32467c199cd6537d80f6fd04f18588670bdeeb"}, ] botocore = [ - {file = "botocore-1.23.46-py3-none-any.whl", hash = "sha256:354bce55e5adc8e2fe106acfd455ce448f9b920d7b697d06faa8cf200fd6566b"}, - {file = "botocore-1.23.46.tar.gz", hash = "sha256:38dd4564839f531725b667db360ba7df2125ceb3752b0ba12759c3e918015b95"}, + {file = "botocore-1.26.1-py3-none-any.whl", hash = "sha256:598304f20df607944c6db3870e005f2775230ec4558c8280d870f861a8003632"}, + {file = "botocore-1.26.1.tar.gz", hash = "sha256:a805cbd8e79b64da0f719869b6b4c698cf5db7991b1aa412b086b25fb2892795"}, ] cachetools = [ {file = "cachetools-4.2.4-py3-none-any.whl", hash = "sha256:92971d3cb7d2a97efff7c7bb1657f21a8f5fb309a37530537c71b1774189f2d1"}, @@ -1309,12 +1366,12 @@ cffi = [ {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, ] charset-normalizer = [ - {file = "charset-normalizer-2.0.10.tar.gz", hash = "sha256:876d180e9d7432c5d1dfd4c5d26b72f099d503e8fcc0feb7532c9289be60fcbd"}, - {file = "charset_normalizer-2.0.10-py3-none-any.whl", hash = "sha256:cb957888737fc0bbcd78e3df769addb41fd1ff8cf950dc9e7ad7793f1bf44455"}, + {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, + {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, ] click = [ - {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, - {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, @@ -1396,27 +1453,24 @@ cryptography = [ {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, ] debugpy = [ - {file = "debugpy-1.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:70b422c63a833630c33e3f9cdbd9b6971f8c5afd452697e464339a21bbe862ba"}, - {file = "debugpy-1.5.1-cp310-cp310-win32.whl", hash = "sha256:3a457ad9c0059a21a6c7d563c1f18e924f5cf90278c722bd50ede6f56b77c7fe"}, - {file = "debugpy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:5d76a4fd028d8009c3faf1185b4b78ceb2273dd2499447664b03939e0368bb90"}, - {file = "debugpy-1.5.1-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:16db27b4b91991442f91d73604d32080b30de655aca9ba821b1972ea8171021b"}, - {file = "debugpy-1.5.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2b073ad5e8d8c488fbb6a116986858bab0c9c4558f28deb8832c7a5a27405bd6"}, - {file = "debugpy-1.5.1-cp36-cp36m-win32.whl", hash = "sha256:318f81f37341e4e054b4267d39896b73cddb3612ca13b39d7eea45af65165e1d"}, - {file = "debugpy-1.5.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b5b3157372e0e0a1297a8b6b5280bcf1d35a40f436c7973771c972726d1e32d5"}, - {file = "debugpy-1.5.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:1ec3a086e14bba6c472632025b8fe5bdfbaef2afa1ebd5c6615ce6ed8d89bc67"}, - {file = "debugpy-1.5.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:26fbe53cca45a608679094791ce587b6e2798acd1d4777a8b303b07622e85182"}, - {file = "debugpy-1.5.1-cp37-cp37m-win32.whl", hash = "sha256:d876db8c312eeb02d85611e0f696abe66a2c1515e6405943609e725d5ff36f2a"}, - {file = "debugpy-1.5.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4404a62fb5332ea5c8c9132290eef50b3a0ba38cecacad5529e969a783bcbdd7"}, - {file = "debugpy-1.5.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f3a3dca9104aa14fd4210edcce6d9ce2b65bd9618c0b222135a40b9d6e2a9eeb"}, - {file = "debugpy-1.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2df2c373e85871086bd55271c929670cd4e1dba63e94a08d442db830646203b"}, - {file = "debugpy-1.5.1-cp38-cp38-win32.whl", hash = "sha256:82f5f9ce93af6861a0713f804e62ab390bb12a17f113153e47fea8bbb1dfbe36"}, - {file = "debugpy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:17a25ce9d7714f92fc97ef00cc06269d7c2b163094990ada30156ed31d9a5030"}, - {file = "debugpy-1.5.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:01e98c594b3e66d529e40edf314f849cd1a21f7a013298df58cd8e263bf8e184"}, - {file = "debugpy-1.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f73988422b17f071ad3c4383551ace1ba5ed810cbab5f9c362783d22d40a08dc"}, - {file = "debugpy-1.5.1-cp39-cp39-win32.whl", hash = "sha256:23df67fc56d59e386c342428a7953c2c06cc226d8525b11319153e96afb65b0c"}, - {file = "debugpy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:a2aa64f6d2ca7ded8a7e8a4e7cae3bc71866b09876b7b05cecad231779cb9156"}, - {file = "debugpy-1.5.1-py2.py3-none-any.whl", hash = "sha256:194f95dd3e84568b5489aab5689a3a2c044e8fdc06f1890b8b4f70b6b89f2778"}, - {file = "debugpy-1.5.1.zip", hash = "sha256:d2b09e91fbd1efa4f4fda121d49af89501beda50c18ed7499712c71a4bf3452e"}, + {file = "debugpy-1.6.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:eb1946efac0c0c3d411cea0b5ac772fbde744109fd9520fb0c5a51979faf05ad"}, + {file = "debugpy-1.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e3513399177dd37af4c1332df52da5da1d0c387e5927dc4c0709e26ee7302e8f"}, + {file = "debugpy-1.6.0-cp310-cp310-win32.whl", hash = "sha256:5c492235d6b68f879df3bdbdb01f25c15be15682665517c2c7d0420e5658d71f"}, + {file = "debugpy-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:40de9ba137d355538432209d05e0f5fe5d0498dce761c39119ad4b950b51db31"}, + {file = "debugpy-1.6.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:0d383b91efee57dbb923ba20801130cf60450a0eda60bce25bccd937de8e323a"}, + {file = "debugpy-1.6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1ff853e60e77e1c16f85a31adb8360bb2d98ca588d7ed645b7f0985b240bdb5e"}, + {file = "debugpy-1.6.0-cp37-cp37m-win32.whl", hash = "sha256:8e972c717d95f56b6a3a7a29a5ede1ee8f2c3802f6f0e678203b0778eb322bf1"}, + {file = "debugpy-1.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a8aaeb53e87225141fda7b9081bd87155c1debc13e2f5a532d341112d1983b65"}, + {file = "debugpy-1.6.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:132defb585b518955358321d0f42f6aa815aa15b432be27db654807707c70b2f"}, + {file = "debugpy-1.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ee75844242b4537beb5899f3e60a578454d1f136b99e8d57ac424573797b94a"}, + {file = "debugpy-1.6.0-cp38-cp38-win32.whl", hash = "sha256:a65a2499761d47df3e9ea9567109be6e73d412e00ac3ffcf74839f3ddfcdf028"}, + {file = "debugpy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:bd980d533d0ddfc451e03a3bb32acb2900049fec39afc3425b944ebf0889be62"}, + {file = "debugpy-1.6.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:245c7789a012f86210847ec7ee9f38c30a30d4c2223c3e111829a76c9006a5d0"}, + {file = "debugpy-1.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e3aa2368883e83e7b689ddff3cafb595f7b711f6a065886b46a96a7fef874e7"}, + {file = "debugpy-1.6.0-cp39-cp39-win32.whl", hash = "sha256:72bcfa97f3afa0064afc77ab811f48ad4a06ac330f290b675082c24437730366"}, + {file = "debugpy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:30abefefd2ff5a5481162d613cb70e60e2fa80a5eb4c994717c0f008ed25d2e1"}, + {file = "debugpy-1.6.0-py2.py3-none-any.whl", hash = "sha256:4de7777842da7e08652f2776c552070bbdd758557fdec73a15d7be0e4aab95ce"}, + {file = "debugpy-1.6.0.zip", hash = "sha256:7b79c40852991f7b6c3ea65845ed0f5f6b731c37f4f9ad9c61e2ab4bd48a9275"}, ] decorator = [ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, @@ -1427,15 +1481,19 @@ defusedxml = [ {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, ] entrypoints = [ - {file = "entrypoints-0.3-py2.py3-none-any.whl", hash = "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19"}, - {file = "entrypoints-0.3.tar.gz", hash = "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"}, + {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, + {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, ] envs = [ {file = "envs-1.4-py3-none-any.whl", hash = "sha256:4a1fcf85e4d4443e77c348ff7cdd3bfc4c0178b181d447057de342e4172e5ed1"}, {file = "envs-1.4.tar.gz", hash = "sha256:9d8435c6985d1cdd68299e04c58e2bdb8ae6cf66b2596a8079e6f9a93f2a0398"}, ] +fastjsonschema = [ + {file = "fastjsonschema-2.15.3-py3-none-any.whl", hash = "sha256:ddb0b1d8243e6e3abb822bd14e447a89f4ab7439342912d590444831fa00b6a0"}, + {file = "fastjsonschema-2.15.3.tar.gz", hash = "sha256:0a572f0836962d844c1fc435e200b2e4f4677e4e6611a2e3bdd01ba697c275ec"}, +] faunadb = [ - {file = "faunadb-4.1.1-py2.py3-none-any.whl", hash = "sha256:e197d356b783dfade08a1ffa7a4b32f2156c165c44d4e29b6605a97d38dfca02"}, + {file = "faunadb-4.2.0-py2.py3-none-any.whl", hash = "sha256:73d5f560bddb7fc45f9201d526f97fcd0a7b0ef74ce3d4d46b8e116f4ce6e219"}, ] future = [ {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, @@ -1464,28 +1522,28 @@ idna = [ {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.10.1-py3-none-any.whl", hash = "sha256:899e2a40a8c4a1aec681feef45733de8a6c58f3f6a0dbed2eb6574b4387a77b6"}, - {file = "importlib_metadata-4.10.1.tar.gz", hash = "sha256:951f0d8a5b7260e9db5e41d429285b5f451e928479f19d80818878527d36e95e"}, + {file = "importlib_metadata-4.11.3-py3-none-any.whl", hash = "sha256:1208431ca90a8cca1a6b8af391bb53c1a2db74e5d1cef6ddced95d4b2062edc6"}, + {file = "importlib_metadata-4.11.3.tar.gz", hash = "sha256:ea4c597ebf37142f827b8f39299579e31685c31d3a438b59f469406afd0f2539"}, ] importlib-resources = [ - {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, - {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"}, + {file = "importlib_resources-5.7.1-py3-none-any.whl", hash = "sha256:e447dc01619b1e951286f3929be820029d48c75eb25d265c28b92a16548212b8"}, + {file = "importlib_resources-5.7.1.tar.gz", hash = "sha256:b6062987dfc51f0fcb809187cffbd60f35df7acb4589091f154214af6d0d49d3"}, ] ipykernel = [ - {file = "ipykernel-6.7.0-py3-none-any.whl", hash = "sha256:6203ccd5510ff148e9433fd4a2707c5ce8d688f026427f46e13d7ebf9b3e9787"}, - {file = "ipykernel-6.7.0.tar.gz", hash = "sha256:d82b904fdc2fd8c7b1fbe0fa481c68a11b4cd4c8ef07e6517da1f10cc3114d24"}, + {file = "ipykernel-6.13.0-py3-none-any.whl", hash = "sha256:2b0987af43c0d4b62cecb13c592755f599f96f29aafe36c01731aaa96df30d39"}, + {file = "ipykernel-6.13.0.tar.gz", hash = "sha256:0e28273e290858393e86e152b104e5506a79c13d25b951ac6eca220051b4be60"}, ] ipython = [ - {file = "ipython-7.31.1-py3-none-any.whl", hash = "sha256:55df3e0bd0f94e715abd968bedd89d4e8a7bce4bf498fb123fed4f5398fea874"}, - {file = "ipython-7.31.1.tar.gz", hash = "sha256:b5548ec5329a4bcf054a5deed5099b0f9622eb9ea51aaa7104d215fece201d8c"}, + {file = "ipython-7.33.0-py3-none-any.whl", hash = "sha256:916a3126896e4fd78dd4d9cf3e21586e7fd93bae3f1cd751588b75524b64bf94"}, + {file = "ipython-7.33.0.tar.gz", hash = "sha256:bcffb865a83b081620301ba0ec4d95084454f26b91d6d66b475bff3dfb0218d4"}, ] ipython-genutils = [ {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, ] ipywidgets = [ - {file = "ipywidgets-7.6.5-py2.py3-none-any.whl", hash = "sha256:d258f582f915c62ea91023299603be095de19afb5ee271698f88327b9fe9bf43"}, - {file = "ipywidgets-7.6.5.tar.gz", hash = "sha256:00974f7cb4d5f8d494c19810fedb9fa9b64bffd3cda7c2be23c133a1ad3c99c5"}, + {file = "ipywidgets-7.7.0-py2.py3-none-any.whl", hash = "sha256:e58ff58bc94d481e91ecb6e13a5cb96a87b6b8ade135e055603d0ca24593df38"}, + {file = "ipywidgets-7.7.0.tar.gz", hash = "sha256:ab4a5596855a88b83761921c768707d65e5847068139bc1729ddfe834703542a"}, ] iso8601 = [ {file = "iso8601-1.0.2-py3-none-any.whl", hash = "sha256:d7bc01b1c2a43b259570bb307f057abc578786ea734ba2b87b836c5efc5bd443"}, @@ -1500,12 +1558,12 @@ jinja2 = [ {file = "Jinja2-3.0.1.tar.gz", hash = "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4"}, ] jmespath = [ - {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"}, - {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"}, + {file = "jmespath-1.0.0-py3-none-any.whl", hash = "sha256:e8dcd576ed616f14ec02eed0005c85973b5890083313860136657e24784e4c04"}, + {file = "jmespath-1.0.0.tar.gz", hash = "sha256:a490e280edd1f57d6de88636992d05b71e97d69a26a19f058ecf7d304474bf5e"}, ] jsonschema = [ - {file = "jsonschema-4.4.0-py3-none-any.whl", hash = "sha256:77281a1f71684953ee8b3d488371b162419767973789272434bbc3f29d9c8823"}, - {file = "jsonschema-4.4.0.tar.gz", hash = "sha256:636694eb41b3535ed608fe04129f26542b59ed99808b4f688aa32dcf55317a83"}, + {file = "jsonschema-4.5.1-py3-none-any.whl", hash = "sha256:71b5e39324422543546572954ce71c67728922c104902cb7ce252e522235b33f"}, + {file = "jsonschema-4.5.1.tar.gz", hash = "sha256:7c6d882619340c3347a1bf7315e147e6d3dae439033ae6383d6acb908c101dfc"}, ] jupyter = [ {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, @@ -1513,95 +1571,66 @@ jupyter = [ {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, ] jupyter-client = [ - {file = "jupyter_client-7.1.2-py3-none-any.whl", hash = "sha256:d56f1c57bef42ff31e61b1185d3348a5b2bcde7c9a05523ae4dbe5ee0871797c"}, - {file = "jupyter_client-7.1.2.tar.gz", hash = "sha256:4ea61033726c8e579edb55626d8ee2e6bf0a83158ddf3751b8dd46b2c5cd1e96"}, + {file = "jupyter_client-7.3.1-py3-none-any.whl", hash = "sha256:404abe552540aff3527e66e16beb114b6b4ff58479d51a301f4eb9701e4f52ef"}, + {file = "jupyter_client-7.3.1.tar.gz", hash = "sha256:05d4ff6a0ade25138c6bb0fbeac7ddc26b5fe835e7dd816b64b4a45b931bdc0b"}, ] jupyter-console = [ - {file = "jupyter_console-6.4.0-py3-none-any.whl", hash = "sha256:7799c4ea951e0e96ba8260575423cb323ea5a03fcf5503560fa3e15748869e27"}, - {file = "jupyter_console-6.4.0.tar.gz", hash = "sha256:242248e1685039cd8bff2c2ecb7ce6c1546eb50ee3b08519729e6e881aec19c7"}, + {file = "jupyter_console-6.4.3-py3-none-any.whl", hash = "sha256:e630bcb682c0088dda45688ad7c2424d4a825c8acf494cb036ced03ed0424841"}, + {file = "jupyter_console-6.4.3.tar.gz", hash = "sha256:55f32626b0be647a85e3217ddcdb22db69efc79e8b403b9771eb9ecc696019b5"}, ] jupyter-core = [ - {file = "jupyter_core-4.9.1-py3-none-any.whl", hash = "sha256:1c091f3bbefd6f2a8782f2c1db662ca8478ac240e962ae2c66f0b87c818154ea"}, - {file = "jupyter_core-4.9.1.tar.gz", hash = "sha256:dce8a7499da5a53ae3afd5a9f4b02e5df1d57250cf48f3ad79da23b4778cd6fa"}, + {file = "jupyter_core-4.10.0-py3-none-any.whl", hash = "sha256:e7f5212177af7ab34179690140f188aa9bf3d322d8155ed972cbded19f55b6f3"}, + {file = "jupyter_core-4.10.0.tar.gz", hash = "sha256:a6de44b16b7b31d7271130c71a6792c4040f077011961138afed5e5e73181aec"}, ] jupyterlab-pygments = [ - {file = "jupyterlab_pygments-0.1.2-py2.py3-none-any.whl", hash = "sha256:abfb880fd1561987efaefcb2d2ac75145d2a5d0139b1876d5be806e32f630008"}, - {file = "jupyterlab_pygments-0.1.2.tar.gz", hash = "sha256:cfcda0873626150932f438eccf0f8bf22bfa92345b814890ab360d666b254146"}, + {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, + {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, ] jupyterlab-widgets = [ - {file = "jupyterlab_widgets-1.0.2-py3-none-any.whl", hash = "sha256:f5d9efface8ec62941173ba1cffb2edd0ecddc801c11ae2931e30b50492eb8f7"}, - {file = "jupyterlab_widgets-1.0.2.tar.gz", hash = "sha256:7885092b2b96bf189c3a705cc3c412a4472ec5e8382d0b47219a66cccae73cfa"}, + {file = "jupyterlab_widgets-1.1.0-py3-none-any.whl", hash = "sha256:c2a9bd3789f120f64d73268c066ed3b000c56bc1dda217be5cdc43e7b4ebad3f"}, + {file = "jupyterlab_widgets-1.1.0.tar.gz", hash = "sha256:d5f41bc1713795385f718d44dcba47e1e1473c6289f28a95aa6b2c0782ee372a"}, ] markupsafe = [ - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, - {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, ] matplotlib-inline = [ {file = "matplotlib-inline-0.1.3.tar.gz", hash = "sha256:a04bfba22e0d1395479f866853ec1ee28eea1485c1d69a6faf00dc3e24ff34ee"}, @@ -1612,24 +1641,24 @@ mistune = [ {file = "mistune-0.8.4.tar.gz", hash = "sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e"}, ] nbclient = [ - {file = "nbclient-0.5.10-py3-none-any.whl", hash = "sha256:5b582e21c8b464e6676a9d60acc6871d7fbc3b080f74bef265a9f90411b31f6f"}, - {file = "nbclient-0.5.10.tar.gz", hash = "sha256:b5fdea88d6fa52ca38de6c2361401cfe7aaa7cd24c74effc5e489cec04d79088"}, + {file = "nbclient-0.6.3-py3-none-any.whl", hash = "sha256:2747ac9b385720d8a6c34f2f71e72cbe64aec6cadaadcc064a4df0b0e99c5874"}, + {file = "nbclient-0.6.3.tar.gz", hash = "sha256:b80726fc1fb89a0e8f8be1e77e28d0026b1e8ed90bc143c8a0c7622e4f8cdd9e"}, ] nbconvert = [ - {file = "nbconvert-6.4.1-py3-none-any.whl", hash = "sha256:fe93bc42485c54c5a49a2324c834aca1ff315f320a535bed3e3c4e085d3eebe3"}, - {file = "nbconvert-6.4.1.tar.gz", hash = "sha256:7dce3f977c2f9651841a3c49b5b7314c742f24dd118b99e51b8eec13c504f555"}, + {file = "nbconvert-6.5.0-py3-none-any.whl", hash = "sha256:c56dd0b8978a1811a5654f74c727ff16ca87dd5a43abd435a1c49b840fcd8360"}, + {file = "nbconvert-6.5.0.tar.gz", hash = "sha256:223e46e27abe8596b8aed54301fadbba433b7ffea8196a68fd7b1ff509eee99d"}, ] nbformat = [ - {file = "nbformat-5.1.3-py3-none-any.whl", hash = "sha256:eb8447edd7127d043361bc17f2f5a807626bc8e878c7709a1c647abda28a9171"}, - {file = "nbformat-5.1.3.tar.gz", hash = "sha256:b516788ad70771c6250977c1374fcca6edebe6126fd2adb5a69aa5c2356fd1c8"}, + {file = "nbformat-5.4.0-py3-none-any.whl", hash = "sha256:0d6072aaec95dddc39735c144ee8bbc6589c383fb462e4058abc855348152dad"}, + {file = "nbformat-5.4.0.tar.gz", hash = "sha256:44ba5ca6acb80c5d5a500f1e5b83ede8cbe364d5a495c4c8cf60aaf1ba656501"}, ] nest-asyncio = [ - {file = "nest_asyncio-1.5.4-py3-none-any.whl", hash = "sha256:3fdd0d6061a2bb16f21fe8a9c6a7945be83521d81a0d15cff52e9edee50101d6"}, - {file = "nest_asyncio-1.5.4.tar.gz", hash = "sha256:f969f6013a16fadb4adcf09d11a68a4f617c6049d7af7ac2c676110169a63abd"}, + {file = "nest_asyncio-1.5.5-py3-none-any.whl", hash = "sha256:b98e3ec1b246135e4642eceffa5a6c23a3ab12c82ff816a92c612d68205813b2"}, + {file = "nest_asyncio-1.5.5.tar.gz", hash = "sha256:e442291cd942698be619823a17a86a5759eabe1f8613084790de189fe9e16d65"}, ] notebook = [ - {file = "notebook-6.4.10-py3-none-any.whl", hash = "sha256:49cead814bff0945fcb2ee07579259418672ac175d3dc3d8102a4b0a656ed4df"}, - {file = "notebook-6.4.10.tar.gz", hash = "sha256:2408a76bc6289283a8eecfca67e298ec83c67db51a4c2e1b713dd180bb39e90e"}, + {file = "notebook-6.4.11-py3-none-any.whl", hash = "sha256:b4a6baf2eba21ce67a0ca11a793d1781b06b8078f34d06c710742e55f3eee505"}, + {file = "notebook-6.4.11.tar.gz", hash = "sha256:709b1856a564fe53054796c80e17a67262071c86bfbdfa6b96aaa346113c555a"}, ] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, @@ -1659,12 +1688,46 @@ ply = [ {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, ] prometheus-client = [ - {file = "prometheus_client-0.13.1-py3-none-any.whl", hash = "sha256:357a447fd2359b0a1d2e9b311a0c5778c330cfbe186d880ad5a6b39884652316"}, - {file = "prometheus_client-0.13.1.tar.gz", hash = "sha256:ada41b891b79fca5638bd5cfe149efa86512eaa55987893becd2c6d8d0a5dfc5"}, + {file = "prometheus_client-0.14.1-py3-none-any.whl", hash = "sha256:522fded625282822a89e2773452f42df14b5a8e84a86433e3f8a189c1d54dc01"}, + {file = "prometheus_client-0.14.1.tar.gz", hash = "sha256:5459c427624961076277fdc6dc50540e2bacb98eebde99886e59ec55ed92093a"}, ] prompt-toolkit = [ - {file = "prompt_toolkit-3.0.26-py3-none-any.whl", hash = "sha256:4bcf119be2200c17ed0d518872ef922f1de336eb6d1ddbd1e089ceb6447d97c6"}, - {file = "prompt_toolkit-3.0.26.tar.gz", hash = "sha256:a51d41a6a45fd9def54365bca8f0402c8f182f2b6f7e29c74d55faeb9fb38ac4"}, + {file = "prompt_toolkit-3.0.29-py3-none-any.whl", hash = "sha256:62291dad495e665fca0bda814e342c69952086afb0f4094d0893d357e5c78752"}, + {file = "prompt_toolkit-3.0.29.tar.gz", hash = "sha256:bd640f60e8cecd74f0dc249713d433ace2ddc62b65ee07f96d358e0b152b6ea7"}, +] +psutil = [ + {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:55ce319452e3d139e25d6c3f85a1acf12d1607ddedea5e35fb47a552c051161b"}, + {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:7336292a13a80eb93c21f36bde4328aa748a04b68c13d01dfddd67fc13fd0618"}, + {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cb8d10461c1ceee0c25a64f2dd54872b70b89c26419e147a05a10b753ad36ec2"}, + {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:7641300de73e4909e5d148e90cc3142fb890079e1525a840cf0dfd39195239fd"}, + {file = "psutil-5.9.0-cp27-none-win32.whl", hash = "sha256:ea42d747c5f71b5ccaa6897b216a7dadb9f52c72a0fe2b872ef7d3e1eacf3ba3"}, + {file = "psutil-5.9.0-cp27-none-win_amd64.whl", hash = "sha256:ef216cc9feb60634bda2f341a9559ac594e2eeaadd0ba187a4c2eb5b5d40b91c"}, + {file = "psutil-5.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90a58b9fcae2dbfe4ba852b57bd4a1dded6b990a33d6428c7614b7d48eccb492"}, + {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff0d41f8b3e9ebb6b6110057e40019a432e96aae2008951121ba4e56040b84f3"}, + {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:742c34fff804f34f62659279ed5c5b723bb0195e9d7bd9907591de9f8f6558e2"}, + {file = "psutil-5.9.0-cp310-cp310-win32.whl", hash = "sha256:8293942e4ce0c5689821f65ce6522ce4786d02af57f13c0195b40e1edb1db61d"}, + {file = "psutil-5.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:9b51917c1af3fa35a3f2dabd7ba96a2a4f19df3dec911da73875e1edaf22a40b"}, + {file = "psutil-5.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e9805fed4f2a81de98ae5fe38b75a74c6e6ad2df8a5c479594c7629a1fe35f56"}, + {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c51f1af02334e4b516ec221ee26b8fdf105032418ca5a5ab9737e8c87dafe203"}, + {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32acf55cb9a8cbfb29167cd005951df81b567099295291bcfd1027365b36591d"}, + {file = "psutil-5.9.0-cp36-cp36m-win32.whl", hash = "sha256:e5c783d0b1ad6ca8a5d3e7b680468c9c926b804be83a3a8e95141b05c39c9f64"}, + {file = "psutil-5.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d62a2796e08dd024b8179bd441cb714e0f81226c352c802fca0fd3f89eeacd94"}, + {file = "psutil-5.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d00a664e31921009a84367266b35ba0aac04a2a6cad09c550a89041034d19a0"}, + {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7779be4025c540d1d65a2de3f30caeacc49ae7a2152108adeaf42c7534a115ce"}, + {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072664401ae6e7c1bfb878c65d7282d4b4391f1bc9a56d5e03b5a490403271b5"}, + {file = "psutil-5.9.0-cp37-cp37m-win32.whl", hash = "sha256:df2c8bd48fb83a8408c8390b143c6a6fa10cb1a674ca664954de193fdcab36a9"}, + {file = "psutil-5.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1d7b433519b9a38192dfda962dd8f44446668c009833e1429a52424624f408b4"}, + {file = "psutil-5.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3400cae15bdb449d518545cbd5b649117de54e3596ded84aacabfbb3297ead2"}, + {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2237f35c4bbae932ee98902a08050a27821f8f6dfa880a47195e5993af4702d"}, + {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1070a9b287846a21a5d572d6dddd369517510b68710fca56b0e9e02fd24bed9a"}, + {file = "psutil-5.9.0-cp38-cp38-win32.whl", hash = "sha256:76cebf84aac1d6da5b63df11fe0d377b46b7b500d892284068bacccf12f20666"}, + {file = "psutil-5.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:3151a58f0fbd8942ba94f7c31c7e6b310d2989f4da74fcbf28b934374e9bf841"}, + {file = "psutil-5.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:539e429da49c5d27d5a58e3563886057f8fc3868a5547b4f1876d9c0f007bccf"}, + {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58c7d923dc209225600aec73aa2c4ae8ea33b1ab31bc11ef8a5933b027476f07"}, + {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3611e87eea393f779a35b192b46a164b1d01167c9d323dda9b1e527ea69d697d"}, + {file = "psutil-5.9.0-cp39-cp39-win32.whl", hash = "sha256:4e2fb92e3aeae3ec3b7b66c528981fd327fb93fd906a77215200404444ec1845"}, + {file = "psutil-5.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:7d190ee2eaef7831163f254dc58f6d2e2a22e27382b936aab51c835fc080c3d3"}, + {file = "psutil-5.9.0.tar.gz", hash = "sha256:869842dbd66bb80c3217158e629d6fceaecc3a3166d3d1faee515b05dd26ca25"}, ] ptyprocess = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, @@ -1679,16 +1742,16 @@ pycparser = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] pygments = [ - {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"}, - {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"}, + {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, + {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, ] pyjwt = [ - {file = "PyJWT-2.3.0-py3-none-any.whl", hash = "sha256:e0c4bb8d9f0af0c7f5b1ec4c5036309617d03d56932877f2f7a0beeb5318322f"}, - {file = "PyJWT-2.3.0.tar.gz", hash = "sha256:b888b4d56f06f6dcd777210c334e69c737be74755d3e5e9ee3fe67dc18a0ee41"}, + {file = "PyJWT-2.4.0-py3-none-any.whl", hash = "sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf"}, + {file = "PyJWT-2.4.0.tar.gz", hash = "sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba"}, ] pyparsing = [ - {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, - {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] pyrsistent = [ {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, @@ -1722,25 +1785,27 @@ pytz = [ {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, ] pywin32 = [ - {file = "pywin32-303-cp310-cp310-win32.whl", hash = "sha256:6fed4af057039f309263fd3285d7b8042d41507343cd5fa781d98fcc5b90e8bb"}, - {file = "pywin32-303-cp310-cp310-win_amd64.whl", hash = "sha256:51cb52c5ec6709f96c3f26e7795b0bf169ee0d8395b2c1d7eb2c029a5008ed51"}, - {file = "pywin32-303-cp311-cp311-win32.whl", hash = "sha256:d9b5d87ca944eb3aa4cd45516203ead4b37ab06b8b777c54aedc35975dec0dee"}, - {file = "pywin32-303-cp311-cp311-win_amd64.whl", hash = "sha256:fcf44032f5b14fcda86028cdf49b6ebdaea091230eb0a757282aa656e4732439"}, - {file = "pywin32-303-cp36-cp36m-win32.whl", hash = "sha256:aad484d52ec58008ca36bd4ad14a71d7dd0a99db1a4ca71072213f63bf49c7d9"}, - {file = "pywin32-303-cp36-cp36m-win_amd64.whl", hash = "sha256:2a09632916b6bb231ba49983fe989f2f625cea237219530e81a69239cd0c4559"}, - {file = "pywin32-303-cp37-cp37m-win32.whl", hash = "sha256:b1675d82bcf6dbc96363fca747bac8bff6f6e4a447a4287ac652aa4b9adc796e"}, - {file = "pywin32-303-cp37-cp37m-win_amd64.whl", hash = "sha256:c268040769b48a13367221fced6d4232ed52f044ffafeda247bd9d2c6bdc29ca"}, - {file = "pywin32-303-cp38-cp38-win32.whl", hash = "sha256:5f9ec054f5a46a0f4dfd72af2ce1372f3d5a6e4052af20b858aa7df2df7d355b"}, - {file = "pywin32-303-cp38-cp38-win_amd64.whl", hash = "sha256:793bf74fce164bcffd9d57bb13c2c15d56e43c9542a7b9687b4fccf8f8a41aba"}, - {file = "pywin32-303-cp39-cp39-win32.whl", hash = "sha256:7d3271c98434617a11921c5ccf74615794d97b079e22ed7773790822735cc352"}, - {file = "pywin32-303-cp39-cp39-win_amd64.whl", hash = "sha256:79cbb862c11b9af19bcb682891c1b91942ec2ff7de8151e2aea2e175899cda34"}, + {file = "pywin32-304-cp310-cp310-win32.whl", hash = "sha256:3c7bacf5e24298c86314f03fa20e16558a4e4138fc34615d7de4070c23e65af3"}, + {file = "pywin32-304-cp310-cp310-win_amd64.whl", hash = "sha256:4f32145913a2447736dad62495199a8e280a77a0ca662daa2332acf849f0be48"}, + {file = "pywin32-304-cp310-cp310-win_arm64.whl", hash = "sha256:d3ee45adff48e0551d1aa60d2ec066fec006083b791f5c3527c40cd8aefac71f"}, + {file = "pywin32-304-cp311-cp311-win32.whl", hash = "sha256:30c53d6ce44c12a316a06c153ea74152d3b1342610f1b99d40ba2795e5af0269"}, + {file = "pywin32-304-cp311-cp311-win_amd64.whl", hash = "sha256:7ffa0c0fa4ae4077e8b8aa73800540ef8c24530057768c3ac57c609f99a14fd4"}, + {file = "pywin32-304-cp311-cp311-win_arm64.whl", hash = "sha256:cbbe34dad39bdbaa2889a424d28752f1b4971939b14b1bb48cbf0182a3bcfc43"}, + {file = "pywin32-304-cp36-cp36m-win32.whl", hash = "sha256:be253e7b14bc601718f014d2832e4c18a5b023cbe72db826da63df76b77507a1"}, + {file = "pywin32-304-cp36-cp36m-win_amd64.whl", hash = "sha256:de9827c23321dcf43d2f288f09f3b6d772fee11e809015bdae9e69fe13213988"}, + {file = "pywin32-304-cp37-cp37m-win32.whl", hash = "sha256:f64c0377cf01b61bd5e76c25e1480ca8ab3b73f0c4add50538d332afdf8f69c5"}, + {file = "pywin32-304-cp37-cp37m-win_amd64.whl", hash = "sha256:bb2ea2aa81e96eee6a6b79d87e1d1648d3f8b87f9a64499e0b92b30d141e76df"}, + {file = "pywin32-304-cp38-cp38-win32.whl", hash = "sha256:94037b5259701988954931333aafd39cf897e990852115656b014ce72e052e96"}, + {file = "pywin32-304-cp38-cp38-win_amd64.whl", hash = "sha256:ead865a2e179b30fb717831f73cf4373401fc62fbc3455a0889a7ddac848f83e"}, + {file = "pywin32-304-cp39-cp39-win32.whl", hash = "sha256:25746d841201fd9f96b648a248f731c1dec851c9a08b8e33da8b56148e4c65cc"}, + {file = "pywin32-304-cp39-cp39-win_amd64.whl", hash = "sha256:d24a3382f013b21aa24a5cfbfad5a2cd9926610c0affde3e8ab5b3d7dbcf4ac9"}, ] pywinpty = [ - {file = "pywinpty-2.0.1-cp310-none-win_amd64.whl", hash = "sha256:ec7d4841c82980519f31d2c61b7f93db4b773a66fce489a8a72377045fe04c4b"}, - {file = "pywinpty-2.0.1-cp37-none-win_amd64.whl", hash = "sha256:29550aafda86962b3b68e3454c11e26c1b8cf646dfafec33a4325c8d70ab4f36"}, - {file = "pywinpty-2.0.1-cp38-none-win_amd64.whl", hash = "sha256:dfdbcd0407c157c2024b0ea91b855caae25510fcf6c4da21c075253f05991a3a"}, - {file = "pywinpty-2.0.1-cp39-none-win_amd64.whl", hash = "sha256:c7cd0b30da5edd3e0b967842baa2aef1d205d991aa63a13c05afdb95d0812e69"}, - {file = "pywinpty-2.0.1.tar.gz", hash = "sha256:14e7321c6d43743af0de175fca9f111c5cc8d0a9f7c608c9e1cc69ec0d6ac146"}, + {file = "pywinpty-2.0.5-cp310-none-win_amd64.whl", hash = "sha256:f86c76e2881c37e69678cbbf178109f8da1fa8584db24d58e1b9369b0276cfcb"}, + {file = "pywinpty-2.0.5-cp37-none-win_amd64.whl", hash = "sha256:ff9b52f182650cfdf3db1b264a6fe0963eb9d996a7a1fa843ac406c1e32111f8"}, + {file = "pywinpty-2.0.5-cp38-none-win_amd64.whl", hash = "sha256:651ee1467bd7eb6f64d44dbc954b7ab7d15ab6d8adacc4e13299692c67c5d5d2"}, + {file = "pywinpty-2.0.5-cp39-none-win_amd64.whl", hash = "sha256:e59a508ae78374febada3e53b5bbc90b5ad07ae68cbfd72a2e965f9793ae04f3"}, + {file = "pywinpty-2.0.5.tar.gz", hash = "sha256:e125d3f1804d8804952b13e33604ad2ca8b9b2cac92b27b521c005d1604794f8"}, ] pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, @@ -1827,20 +1892,20 @@ pyzmq = [ {file = "pyzmq-22.3.0.tar.gz", hash = "sha256:8eddc033e716f8c91c6a2112f0a8ebc5e00532b4a6ae1eb0ccc48e027f9c671c"}, ] qtconsole = [ - {file = "qtconsole-5.2.2-py3-none-any.whl", hash = "sha256:4aa6a3e600e0c8cf16853f2378311bc2371f57cb0f22ecfc28994f4cf409ee2e"}, - {file = "qtconsole-5.2.2.tar.gz", hash = "sha256:8f9db97b27782184efd0a0f2d57ea3bd852d053747a2e442a9011329c082976d"}, + {file = "qtconsole-5.3.0-py3-none-any.whl", hash = "sha256:75f2ded876444454edcb5a53262149e33b53db3a4a53116b7c3df52830905b0f"}, + {file = "qtconsole-5.3.0.tar.gz", hash = "sha256:8e3520fdc75e46abc4cc6cffeca16fa2652754109b8ae839fa28e27d1eba5625"}, ] qtpy = [ - {file = "QtPy-2.0.0-py3-none-any.whl", hash = "sha256:74bf26be3288aadc843cf3381d5ef0b82f11417ecdcbf26718a408f32590f1ac"}, - {file = "QtPy-2.0.0.tar.gz", hash = "sha256:777e333df4d711b2ec9743117ab319dadfbd743a5a0eee35923855ca3d35cd9d"}, + {file = "QtPy-2.1.0-py3-none-any.whl", hash = "sha256:aee0586081f943029312becece9f63977b0a9e3788f77a6ac8cc74802bb173d6"}, + {file = "QtPy-2.1.0.tar.gz", hash = "sha256:ca8cd4217175186344299ee4c0f7e7adcf362c70852ba35b255a534077025c06"}, ] requests = [ {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, ] s3transfer = [ - {file = "s3transfer-0.5.0-py3-none-any.whl", hash = "sha256:9c1dc369814391a6bda20ebbf4b70a0f34630592c9aa520856bf384916af2803"}, - {file = "s3transfer-0.5.0.tar.gz", hash = "sha256:50ed823e1dc5868ad40c8dc92072f757aa0e653a192845c94a3b676f4a62da4c"}, + {file = "s3transfer-0.5.2-py3-none-any.whl", hash = "sha256:7a6f4c4d1fdb9a2b640244008e142cbc2cd3ae34b386584ef044dd0f27101971"}, + {file = "s3transfer-0.5.2.tar.gz", hash = "sha256:95c58c194ce657a5f4fb0b9e60a84968c808888aed628cd98ab8771fe1db98ed"}, ] sammy = [ {file = "sammy-0.4.3-py2.py3-none-any.whl", hash = "sha256:f7fc12fae537ba22830a9de761686fac439e23c67339807f2f3d026d019bae9e"}, @@ -1854,17 +1919,21 @@ six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +soupsieve = [ + {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, + {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, +] stripe = [ - {file = "stripe-2.65.0-py2.py3-none-any.whl", hash = "sha256:16a8d1dfc0ba414b24612d31ede0f57ff260bccebd6dc18e17277cb24f58c6b7"}, - {file = "stripe-2.65.0.tar.gz", hash = "sha256:2e55d4d7262085de9cef2228f14581925c35350ba58a332352b1ec9e19a7b7a6"}, + {file = "stripe-2.76.0-py2.py3-none-any.whl", hash = "sha256:756bf6c1206f438d1fa23bb90cdf1233c9383478f854f2720a8a3e1eaf1f715b"}, + {file = "stripe-2.76.0.tar.gz", hash = "sha256:fd3fc6935c3b6189967191607b6f38ebe490005a590b4d0d43fbe3aba45deca8"}, ] terminado = [ - {file = "terminado-0.13.1-py3-none-any.whl", hash = "sha256:f446b522b50a7aa68b5def0a02893978fb48cb82298b0ebdae13003c6ee6f198"}, - {file = "terminado-0.13.1.tar.gz", hash = "sha256:5b82b5c6e991f0705a76f961f43262a7fb1e55b093c16dca83f16384a7f39b7b"}, + {file = "terminado-0.15.0-py3-none-any.whl", hash = "sha256:0d5f126fbfdb5887b25ae7d9d07b0d716b1cc0ccaacc71c1f3c14d228e065197"}, + {file = "terminado-0.15.0.tar.gz", hash = "sha256:ab4eeedccfcc1e6134bfee86106af90852c69d602884ea3a1e8ca6d4486e9bfe"}, ] -testpath = [ - {file = "testpath-0.5.0-py3-none-any.whl", hash = "sha256:8044f9a0bab6567fc644a3593164e872543bb44225b0e24846e2c89237937589"}, - {file = "testpath-0.5.0.tar.gz", hash = "sha256:1acf7a0bcd3004ae8357409fc33751e16d37ccc650921da1094a86581ad1e417"}, +tinycss2 = [ + {file = "tinycss2-1.1.1-py3-none-any.whl", hash = "sha256:fe794ceaadfe3cf3e686b22155d0da5780dd0e273471a51846d0a02bc204fec8"}, + {file = "tinycss2-1.1.1.tar.gz", hash = "sha256:b2e44dd8883c360c35dd0d1b5aad0b610e5156c2cb3b33434634e539ead9d8bf"}, ] tornado = [ {file = "tornado-6.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:d371e811d6b156d82aa5f9a4e08b58debf97c302a35714f6f45e35139c332e32"}, @@ -1910,16 +1979,16 @@ tornado = [ {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, ] traitlets = [ - {file = "traitlets-5.1.1-py3-none-any.whl", hash = "sha256:2d313cc50a42cd6c277e7d7dc8d4d7fedd06a2c215f78766ae7b1a66277e0033"}, - {file = "traitlets-5.1.1.tar.gz", hash = "sha256:059f456c5a7c1c82b98c2e8c799f39c9b8128f6d0d46941ee118daace9eb70c7"}, + {file = "traitlets-5.2.1.post0-py3-none-any.whl", hash = "sha256:f44b708d33d98b0addb40c29d148a761f44af740603a8fd0e2f8b5b27cf0f087"}, + {file = "traitlets-5.2.1.post0.tar.gz", hash = "sha256:70815ecb20ec619d1af28910ade523383be13754283aef90528eb3d47b77c5db"}, ] typing-extensions = [ - {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, - {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, + {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"}, + {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"}, ] urllib3 = [ - {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"}, - {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"}, + {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, + {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, ] valley = [ {file = "valley-1.5.6-py3-none-any.whl", hash = "sha256:fa2e5fc51d59901e5eb178116a4fb15b712928b4c87809f59cdf02a934d63cf6"}, @@ -1934,14 +2003,14 @@ webencodings = [ {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, ] werkzeug = [ - {file = "Werkzeug-2.0.2-py3-none-any.whl", hash = "sha256:63d3dc1cf60e7b7e35e97fa9861f7397283b75d765afcaefd993d6046899de8f"}, - {file = "Werkzeug-2.0.2.tar.gz", hash = "sha256:aa2bb6fc8dee8d6c504c0ac1e7f5f7dc5810a9903e793b6f715a9f015bdadb9a"}, + {file = "Werkzeug-2.1.2-py3-none-any.whl", hash = "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255"}, + {file = "Werkzeug-2.1.2.tar.gz", hash = "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6"}, ] widgetsnbextension = [ - {file = "widgetsnbextension-3.5.2-py2.py3-none-any.whl", hash = "sha256:763a9fdc836d141fa080005a886d63f66f73d56dba1fb5961afc239c77708569"}, - {file = "widgetsnbextension-3.5.2.tar.gz", hash = "sha256:e0731a60ba540cd19bbbefe771a9076dcd2dde90713a8f87f27f53f2d1db7727"}, + {file = "widgetsnbextension-3.6.0-py2.py3-none-any.whl", hash = "sha256:4fd321cad39fdcf8a8e248a657202d42917ada8e8ed5dd3f60f073e0d54ceabd"}, + {file = "widgetsnbextension-3.6.0.tar.gz", hash = "sha256:e84a7a9fcb9baf3d57106e184a7389a8f8eb935bf741a5eb9d60aa18cc029a80"}, ] zipp = [ - {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, - {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, + {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, + {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, ] From bba4f2b8c7f83a73f88dcd2bea5111b7dbeb7909 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 2 Jun 2022 15:11:30 +0800 Subject: [PATCH 037/214] added support for splitting the docs to get params of the view --- pfunk/tests/test_project.py | 32 ++++++++++----------- pfunk/utils/swagger.py | 56 ++++++++++++++++++++++++++++++++----- 2 files changed, 65 insertions(+), 23 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index d96985f..dfe0290 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -8,24 +8,24 @@ class ProjectTestCase(unittest.TestCase): def setUp(self) -> None: self.project = Project() - def test_add_resource(self): - self.project.add_resource(Person) - self.project.add_resource(Person) - # Test that no duplicates are there - self.assertEqual(self.project.collections, set([Person])) + # def test_add_resource(self): + # self.project.add_resource(Person) + # self.project.add_resource(Person) + # # Test that no duplicates are there + # self.assertEqual(self.project.collections, set([Person])) - def test_add_resources(self): - self.project.add_resources([Person, Sport]) - self.assertEqual(self.project.collections, set([Person, Sport])) + # def test_add_resources(self): + # self.project.add_resources([Person, Sport]) + # self.assertEqual(self.project.collections, set([Person, Sport])) - def test_render(self): - self.project.add_resources([Person, Sport]) - gql = self.project.render() - self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - self.assertTrue('enum gender_pronouns' in gql) - self.assertTrue('type Person' in gql) - self.assertTrue('type Sport' in gql) - self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + # def test_render(self): + # self.project.add_resources([Person, Sport]) + # gql = self.project.render() + # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + # self.assertTrue('enum gender_pronouns' in gql) + # self.assertTrue('type Person' in gql) + # self.assertTrue('type Sport' in gql) + # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): self.project.add_resources([Person, Sport, Group, User]) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 6335fbd..eae74e0 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -160,6 +160,7 @@ def get_operations(self, col: Collection): An array of `Path` that can be consumed using `swaggyp.SwaggerTemplate` to show available paths + ``` """ for view in col.collection_views: route = view.url(col) @@ -188,6 +189,7 @@ def get_operations(self, col: Collection): # Skip HEAD operations continue + # Acquire path parameters of URL if args is None or len(args) == 0: # if `defaults` weren't used in URL building, use the argument defined in the URL string for converter, arguments, variable in parse_rule(rule): @@ -196,10 +198,10 @@ def get_operations(self, col: Collection): args = variable arg_type = converter - # Replace werkzeug params () to swagger-style params ({id}) - swagger_rule = self._convert_url_to_swagger(args, rule) + params = [] + # Construct path parameters for swagger generation if arg_type: - params = sw.Parameter( + path_params = sw.Parameter( name=args, _type=WERKZEUG_URL_TO_YAML_TYPES.get(arg_type), _in='path', @@ -207,18 +209,55 @@ def get_operations(self, col: Collection): required=True, allowEmptyValue=False ) + params.append(path_params) + + # Acquire payload of the view from the View's docstring + # where to cut the docstrings to use the definition for the payload of the view + oas_str_split = '[OAS]\n' + view_docs = view.__doc__ + view_payload = None + if (view_docs and len(view_docs.split(oas_str_split)) > 1): + view_payload = view_docs.split(oas_str_split)[1] + + # Construct payload for swagger generation + if view_payload: + for field in json.loads(view_payload).get('data'): + param = sw.Parameter( + name=field.get('name'), + _type=field.get('type'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + allowEmptyValue=False + ) + params.append(param) + + docs_description = view_docs if not len(view_docs.split( + oas_str_split)) > 1 else view_docs.split(oas_str_split)[0] + consumes = ['application/json', + 'application/x-www-form-urlencoded'] + produces = ['application/json', + 'application/x-www-form-urlencoded'] + if params: op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=view.__doc__, + description=docs_description, responses=responses, - parameters=[params]) + consumes=consumes, + produces=produces, + parameters=params) else: op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=view.__doc__, - responses=responses) + description=docs_description, + responses=responses, + consumes=consumes, + produces=produces) + + # Replace werkzeug params () to swagger-style params ({id}) + swagger_rule = self._convert_url_to_swagger(args, rule) p = sw.Path(endpoint=swagger_rule, operations=[op]) self.paths.append(p) return self.paths @@ -240,6 +279,9 @@ def get_model_definitions(self, col: Collection): An array of `Definition` that can be consumed using `swaggyp.SwaggerTemplate` to show available models + + Payload: + """ # Define model definitions by iterating through collection's fields for its properties From 680299a9cc082fe644bb2feb9897786793fdef84 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 2 Jun 2022 15:30:35 +0800 Subject: [PATCH 038/214] switch acquiring of swagger docs for payload from docstrings to function return instead --- pfunk/utils/swagger.py | 20 ++++++++------------ pfunk/web/views/json.py | 34 ++++++++++++++++++++++++++++++---- 2 files changed, 38 insertions(+), 16 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index eae74e0..e952883 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -163,6 +163,7 @@ def get_operations(self, col: Collection): ``` """ for view in col.collection_views: + v = view() route = view.url(col) rule = route.rule methods = route.methods @@ -211,17 +212,13 @@ def get_operations(self, col: Collection): ) params.append(path_params) - # Acquire payload of the view from the View's docstring - # where to cut the docstrings to use the definition for the payload of the view - oas_str_split = '[OAS]\n' - view_docs = view.__doc__ - view_payload = None - if (view_docs and len(view_docs.split(oas_str_split)) > 1): - view_payload = view_docs.split(oas_str_split)[1] + # Acquire payload of the view from the View's `_payload_docs` + view_payload = view()._payload_docs() # Construct payload for swagger generation + # TODO: support referencing of models if view_payload: - for field in json.loads(view_payload).get('data'): + for field in view_payload.get('data'): param = sw.Parameter( name=field.get('name'), _type=field.get('type'), @@ -232,17 +229,16 @@ def get_operations(self, col: Collection): ) params.append(param) - docs_description = view_docs if not len(view_docs.split( - oas_str_split)) > 1 else view_docs.split(oas_str_split)[0] consumes = ['application/json', 'application/x-www-form-urlencoded'] produces = ['application/json', 'application/x-www-form-urlencoded'] + view_docs = view.__doc__ if params: op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=docs_description, + description=view_docs, responses=responses, consumes=consumes, produces=produces, @@ -251,7 +247,7 @@ def get_operations(self, col: Collection): op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=docs_description, + description=view_docs, responses=responses, consumes=consumes, produces=produces) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index cfbe739..47e896a 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -25,6 +25,29 @@ def get_response(self): headers=self.get_headers() ) + def _payload_docs(self): + """ Used in defining payload parameters for the view. + + Should return a dict that has the fields of a swagger parameter e.g. + {"data": [ + { + "name":"name", + "in":"formData", + "description":"name of the pet", + "required": true, + "type": "string" + }, + { + "name": "status", + "in": "formData", + "description": "status of the pet", + "required":true, + "type":"string" + } + ]} + """ + return {} + class CreateView(UpdateMixin, ActionMixin, JSONView): """ Define a `Create` view that allows `creation` of an entity in the collection """ @@ -34,7 +57,8 @@ class CreateView(UpdateMixin, ActionMixin, JSONView): def get_query(self): """ Entity created in a collection """ - obj = self.collection.create(**self.get_query_kwargs(), _token=self.request.token) + obj = self.collection.create( + **self.get_query_kwargs(), _token=self.request.token) return obj def get_m2m_kwargs(self, obj): @@ -50,7 +74,8 @@ def get_m2m_kwargs(self, obj): """ data = self.request.get_json() - fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') + fields = self.collection.get_foreign_fields_by_type( + 'pfunk.fields.ManyToManyField') for k, v in fields.items(): current_value = data.get(k) col = v.get('foreign_class')() @@ -70,7 +95,8 @@ class UpdateView(UpdateMixin, IDMixin, JSONView): def get_query(self): """ Entity in collection updated by an ID """ - obj = self.collection.get(self.request.kwargs.get('id'), _token=self.request.token) + obj = self.collection.get(self.request.kwargs.get( + 'id'), _token=self.request.token) obj._data.update(self.get_query_kwargs()) obj.save() return obj @@ -102,4 +128,4 @@ class ListView(QuerysetMixin, ActionMixin, JSONView): class GraphQLView(HTTPView): - pass \ No newline at end of file + pass From 3843493cde25a4ba01820efaa523fba18365181e Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:09:34 +0800 Subject: [PATCH 039/214] added ability to also detect model references from _payload_docs function --- pfunk/tests/test_project.py | 32 ++++++++++++++++---------------- pfunk/utils/swagger.py | 29 ++++++++++++++++++++--------- pfunk/web/views/json.py | 14 +++++++------- 3 files changed, 43 insertions(+), 32 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index dfe0290..d96985f 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -8,24 +8,24 @@ class ProjectTestCase(unittest.TestCase): def setUp(self) -> None: self.project = Project() - # def test_add_resource(self): - # self.project.add_resource(Person) - # self.project.add_resource(Person) - # # Test that no duplicates are there - # self.assertEqual(self.project.collections, set([Person])) + def test_add_resource(self): + self.project.add_resource(Person) + self.project.add_resource(Person) + # Test that no duplicates are there + self.assertEqual(self.project.collections, set([Person])) - # def test_add_resources(self): - # self.project.add_resources([Person, Sport]) - # self.assertEqual(self.project.collections, set([Person, Sport])) + def test_add_resources(self): + self.project.add_resources([Person, Sport]) + self.assertEqual(self.project.collections, set([Person, Sport])) - # def test_render(self): - # self.project.add_resources([Person, Sport]) - # gql = self.project.render() - # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - # self.assertTrue('enum gender_pronouns' in gql) - # self.assertTrue('type Person' in gql) - # self.assertTrue('type Sport' in gql) - # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + def test_render(self): + self.project.add_resources([Person, Sport]) + gql = self.project.render() + self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + self.assertTrue('enum gender_pronouns' in gql) + self.assertTrue('type Person' in gql) + self.assertTrue('type Sport' in gql) + self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): self.project.add_resources([Person, Sport, Group, User]) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index e952883..c6fe21f 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -216,17 +216,28 @@ def get_operations(self, col: Collection): view_payload = view()._payload_docs() # Construct payload for swagger generation - # TODO: support referencing of models if view_payload: for field in view_payload.get('data'): - param = sw.Parameter( - name=field.get('name'), - _type=field.get('type'), - _in=field.get('in'), - description=field.get('description'), - required=field.get('required'), - allowEmptyValue=False - ) + if field.get('schema'): + schema = sw.SwagSchema( + ref=field.get('schema') + ) + param = sw.Parameter( + name=field.get('name'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + schema=schema + ) + else: + param = sw.Parameter( + name=field.get('name'), + _type=field.get('type'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + allowEmptyValue=False + ) params.append(param) consumes = ['application/json', diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 47e896a..5d092ec 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -26,7 +26,7 @@ def get_response(self): ) def _payload_docs(self): - """ Used in defining payload parameters for the view. + """ Used in custom defining payload parameters for the view. Should return a dict that has the fields of a swagger parameter e.g. {"data": [ @@ -34,15 +34,15 @@ def _payload_docs(self): "name":"name", "in":"formData", "description":"name of the pet", - "required": true, + "required": True, "type": "string" }, { - "name": "status", - "in": "formData", - "description": "status of the pet", - "required":true, - "type":"string" + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": "#/definitions/Person" } ]} """ From 8bd2a3318e59ed147eeda1adeb58b7206379166d Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:18:08 +0800 Subject: [PATCH 040/214] Updated the docstrings of _payload_docs --- pfunk/utils/swagger.py | 4 +--- pfunk/web/views/json.py | 51 ++++++++++++++++++++++++++++++++++------- 2 files changed, 44 insertions(+), 11 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index c6fe21f..33c393b 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -219,9 +219,7 @@ def get_operations(self, col: Collection): if view_payload: for field in view_payload.get('data'): if field.get('schema'): - schema = sw.SwagSchema( - ref=field.get('schema') - ) + schema = sw.SwagSchema(ref=field.get('schema')) param = sw.Parameter( name=field.get('name'), _in=field.get('in'), diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 5d092ec..bf31dcf 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -28,15 +28,31 @@ def get_response(self): def _payload_docs(self): """ Used in custom defining payload parameters for the view. - Should return a dict that has the fields of a swagger parameter e.g. + Should return a dict that has the fields of a swagger parameter. + If there is an error in the swagger, it will not be raised. + Usage of `https://editor.swagger.io` to validate is recommended + e.g. + ``` + # Defining formdata + {"data": [ + { + "name":"name", + "in":"formData", + "description":"name of the pet", + "required": true, + "type": "string" + }, + { + "name": "status", + "in": "formData", + "description": "status of the pet", + "required":true, + "type":"string" + } + ]} + + # Defining a payload that references a model {"data": [ - { - "name":"name", - "in":"formData", - "description":"name of the pet", - "required": True, - "type": "string" - }, { "name": "body", "in": "body", @@ -45,6 +61,7 @@ def _payload_docs(self): "schema": "#/definitions/Person" } ]} + ``` """ return {} @@ -86,6 +103,24 @@ def get_m2m_kwargs(self, obj): ) ) + def _payload_docs(self): + return {"data": [ + { + "name": "name", + "in": "formData", + "description": "name of the pet", + "required": True, + "type": "string" + }, + { + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": "#/definitions/Person" + } + ]} + class UpdateView(UpdateMixin, IDMixin, JSONView): """ Define a view to allow `Update` operations """ From 74d2e64d28631a4028647e1387c27607a17b76a2 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:18:41 +0800 Subject: [PATCH 041/214] Updated the docstrings of _payload_docs --- pfunk/web/views/json.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index bf31dcf..7d01533 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -26,7 +26,7 @@ def get_response(self): ) def _payload_docs(self): - """ Used in custom defining payload parameters for the view. + """ Used in custom defining payload parameters for the view in Swagger generation. Should return a dict that has the fields of a swagger parameter. If there is an error in the swagger, it will not be raised. From 74c05665157f3aefdc20244da420621881636a4b Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 22 Jun 2022 15:19:06 +0800 Subject: [PATCH 042/214] updated swaggyp package --- pfunk/utils/swagger.py | 1 - pfunk/web/views/json.py | 27 +++++++++++---------------- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 4 files changed, 16 insertions(+), 22 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 33c393b..1a27812 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -163,7 +163,6 @@ def get_operations(self, col: Collection): ``` """ for view in col.collection_views: - v = view() route = view.url(col) rule = route.rule methods = route.methods diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 7d01533..fb8e9ae 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -104,22 +104,17 @@ def get_m2m_kwargs(self, obj): ) def _payload_docs(self): - return {"data": [ - { - "name": "name", - "in": "formData", - "description": "name of the pet", - "required": True, - "type": "string" - }, - { - "name": "body", - "in": "body", - "description": "Collection object to add", - "required": True, - "schema": "#/definitions/Person" - } - ]} + # TODO: Get view's collection class name by default + if self.collection: + return {"data": [ + { + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": f"#/definitions/Collection" + } + ]} class UpdateView(UpdateMixin, IDMixin, JSONView): diff --git a/poetry.lock b/poetry.lock index 83b3b94..5746ad8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1160,7 +1160,7 @@ requests = {version = ">=2.20", markers = "python_version >= \"3.0\""} [[package]] name = "swaggyp" -version = "0.2.0" +version = "0.3.0" description = "Python library for generating Swagger templates based on valley" category = "main" optional = false @@ -1293,7 +1293,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "6b06fa36eba0e2e7e8d19a1efecff127af145281ccb3d10c02e1baebb062d147" +content-hash = "b98fdaeac6227f48d169512de02fc0155103f626a8e7c5d074c25fef2bef9cd5" [metadata.files] appnope = [ @@ -1992,8 +1992,8 @@ stripe = [ {file = "stripe-2.70.0.tar.gz", hash = "sha256:ed8897f68e6bac3398cc998eb5634551840630d6504c0026fcfd0ad91c9a74a4"}, ] swaggyp = [ - {file = "swaggyp-0.2.0-py3-none-any.whl", hash = "sha256:030dca0f0a24469abcf7e1f047eaa01c84c206a8d891cfb1a4a24f40e2a6a146"}, - {file = "swaggyp-0.2.0.tar.gz", hash = "sha256:b39ab7f8a49fdb17af7862c5ac81b872a5f7f95c3b0989a6a285fde3f4ee4885"}, + {file = "swaggyp-0.3.0-py3-none-any.whl", hash = "sha256:d678daa026f374c09690bfae93670e8824c616fe7186375cca531dd9530d2c41"}, + {file = "swaggyp-0.3.0.tar.gz", hash = "sha256:2af8b74e014ba83287d4b2fcbe355636ca8ec09ca70bffba4290b3f833c8ad46"}, ] terminado = [ {file = "terminado-0.13.3-py3-none-any.whl", hash = "sha256:874d4ea3183536c1782d13c7c91342ef0cf4e5ee1d53633029cbc972c8760bd8"}, diff --git a/pyproject.toml b/pyproject.toml index 46062d0..5aaa2ab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,8 +25,8 @@ graphql-py = "^0.8.1" sammy = "^0.4.3" stripe = "^2.61.0" bleach = "^4.1.0" -swaggyp = "^0.2.0" openapi-spec-validator = "^0.4.0" +swaggyp = "^0.3.0" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From 9f9c81824918fab35845c8b080411d2744c96fe1 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 23 Jun 2022 15:13:06 +0800 Subject: [PATCH 043/214] Fixed calling of view's _payload_docs to pass the correct argument. Made Create and Update views have default reference to model --- pfunk/utils/swagger.py | 2 +- pfunk/web/views/json.py | 17 +++++++++++++++-- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 1a27812..2820efe 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -212,7 +212,7 @@ def get_operations(self, col: Collection): params.append(path_params) # Acquire payload of the view from the View's `_payload_docs` - view_payload = view()._payload_docs() + view_payload = view(col)._payload_docs() # Construct payload for swagger generation if view_payload: diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index fb8e9ae..4f04991 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -104,7 +104,7 @@ def get_m2m_kwargs(self, obj): ) def _payload_docs(self): - # TODO: Get view's collection class name by default + # Reference the collection by default if self.collection: return {"data": [ { @@ -112,7 +112,7 @@ def _payload_docs(self): "in": "body", "description": "Collection object to add", "required": True, - "schema": f"#/definitions/Collection" + "schema": f"#/definitions/{self.collection.__class__.__name__}" } ]} @@ -131,6 +131,19 @@ def get_query(self): obj.save() return obj + def _payload_docs(self): + # Reference the collection by default + if self.collection: + return {"data": [ + { + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": f"#/definitions/{self.collection.__class__.__name__}" + } + ]} + class DetailView(ObjectMixin, IDMixin, JSONView): """ Define a view to allow single entity operations """ From 7220df46f01c109b449e6367f0bf5eda3a3d7301 Mon Sep 17 00:00:00 2001 From: Brian Jinwright Date: Tue, 26 Jul 2022 20:38:04 -0400 Subject: [PATCH 044/214] added some pep8 love and test fixes to the swagger code --- pfunk/tests/test_project.py | 4 ++++ pfunk/tests/test_web_stripe.py | 33 ++++++++++++++++----------------- 2 files changed, 20 insertions(+), 17 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index 267d249..858dcc3 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -29,6 +29,10 @@ def setUp(self) -> None: def tearDown(self) -> None: os.remove("pfunk.json") + try: + os.remove('swagger.yaml') + except FileNotFoundError: + pass def test_add_resource(self): self.project.add_resource(Person) diff --git a/pfunk/tests/test_web_stripe.py b/pfunk/tests/test_web_stripe.py index d560349..23d833a 100644 --- a/pfunk/tests/test_web_stripe.py +++ b/pfunk/tests/test_web_stripe.py @@ -1,17 +1,15 @@ -import json -from lib2to3.pytree import Base import tempfile -from werkzeug.test import Client from types import SimpleNamespace from unittest import mock -from pfunk.contrib.auth.collections import PermissionGroup +from werkzeug.test import Client +from pfunk.contrib.auth.collections import PermissionGroup from pfunk.contrib.auth.collections.group import Group from pfunk.contrib.auth.collections.user import User from pfunk.contrib.ecommerce.collections import StripePackage, StripeCustomer -from pfunk.testcase import APITestCase from pfunk.contrib.ecommerce.views import BaseWebhookView +from pfunk.testcase import APITestCase from pfunk.web.request import HTTPRequest @@ -25,7 +23,8 @@ def setUp(self) -> None: last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.group]) self.stripe_pkg = StripePackage.create(group=self.group, - stripe_id='100', price='10', description='unit testing...', name='unit test package') + stripe_id='100', price='10', description='unit testing...', + name='unit test package') self.stripe_cus = StripeCustomer.create( user=self.user, stripe_id='100') @@ -70,7 +69,7 @@ def test_create_package(self): self.assertTrue(res.json['success']) self.assertIn("new stripe pkg", [ - pkg.name for pkg in StripePackage.all()]) + pkg.name for pkg in StripePackage.all()]) def test_update_package(self): self.assertNotIn("updated pkg", [ @@ -122,7 +121,7 @@ def test_create_customer(self): self.assertTrue(res.json['success']) self.assertIn(stripe_id, [ - cus.stripe_id for cus in StripeCustomer.all()]) + cus.stripe_id for cus in StripeCustomer.all()]) def test_list_customers(self): res = self.c.get('/stripecustomer/list/', headers={ @@ -234,14 +233,13 @@ def test_check_ip(self): @mock.patch('boto3.client') def test_send_html_email(self, mocked): # Requires to have `TEMPLATE_ROOT_DIR=/tmp` in your .env file - with tempfile.NamedTemporaryFile(prefix='/tmp/', suffix='.html') as tmp: - res = self.view.send_html_email( - subject='Test Subject', - from_email='unittesting@email.com', - to_email_list=['recipient@email.com'], - template_name=(tmp.name.split("/")[-1]) - ) - self.assertTrue(True) # if there are no exceptions, then it passed + res = self.view.send_html_email( + subject='Test Subject', + from_email='unittesting@email.com', + to_email_list=['recipient@email.com'], + template_name=('email/email_template.html') + ) + self.assertTrue(True) # if there are no exceptions, then it passed @mock.patch('stripe.Webhook') def test_check_signing_secret(self, mocked): @@ -275,7 +273,8 @@ def setUp(self) -> None: groups=[self.group]) self.token, self.exp = User.api_login("test", "abc123") self.stripe_pkg = StripePackage.create(group=self.group, - stripe_id='100', price='10', description='unit testing...', name='unit test package') + stripe_id='100', price='10', description='unit testing...', + name='unit test package') self.app = self.project.wsgi_app self.c = Client(self.app) From 830a5ca6af2494ad50186a1496e0aa70f5872153 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 5 Apr 2022 15:08:35 +0800 Subject: [PATCH 045/214] Added aws.py as a skeleton for upcoming AWS features --- pfunk/utils/aws.py | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 pfunk/utils/aws.py diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py new file mode 100644 index 0000000..7413120 --- /dev/null +++ b/pfunk/utils/aws.py @@ -0,0 +1,36 @@ +import boto3 +import swaggyp as sw + +class ApiGateway(object): + + def __init__(self): + self.client = boto3.client('apigateway') + pass + + def create_api_from_yaml(self, yaml_file): + # response = client.import_rest_api( + # failOnWarnings=True|False, + # parameters={ + # 'string': 'string' + # }, + # body=b'bytes'|file + # ) + pass + + def update_api_from_yaml(self, yaml_file): + # response = client.put_rest_api( + # restApiId='string', + # mode='merge'|'overwrite', + # failOnWarnings=True|False, + # parameters={ + # 'string': 'string' + # }, + # body=b'bytes'|file + # ) + pass + + def validate_yaml(self, yaml_file): + """ Validate YAML file if it is valid for using """ + pass + + def \ No newline at end of file From 5fbec38e4334593117c392d433ec6df8d3e116a9 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 5 Apr 2022 15:11:41 +0800 Subject: [PATCH 046/214] Added skeleton tests for aws features --- pfunk/tests/test_aws.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 pfunk/tests/test_aws.py diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py new file mode 100644 index 0000000..c3cdc45 --- /dev/null +++ b/pfunk/tests/test_aws.py @@ -0,0 +1,20 @@ +import unittest + +from pfunk.utils.aws import ApiGateway +from pfunk.tests import User, Group +from pfunk.project import Project + + +class ApiGatewayTests(unittest.TestCase): + + def setUp(self) -> None: + self.project = Project() + + def test_validate_yaml(self): + pass + + def test_create_api_from_yaml(self): + pass + + def test_update_api_from_yaml(self): + pass From 6c34dc507b62a514d6779ecf2950072d0f39e2ec Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 7 Apr 2022 15:29:22 +0800 Subject: [PATCH 047/214] Added yaml validator and have it return specific errors --- pfunk/tests/test_aws.py | 34 ++++++++++++++++++++++----- pfunk/utils/aws.py | 21 ++++++++++++----- pfunk/utils/swagger.py | 1 + poetry.lock | 52 ++++++++++++++++++++++++++++++++++++----- pyproject.toml | 1 + 5 files changed, 91 insertions(+), 18 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index c3cdc45..a6b1314 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -1,20 +1,42 @@ import unittest +from unittest import mock from pfunk.utils.aws import ApiGateway -from pfunk.tests import User, Group +from pfunk.tests import User, Group, Person, Sport from pfunk.project import Project class ApiGatewayTests(unittest.TestCase): - def setUp(self) -> None: - self.project = Project() + @classmethod + def setUpCls(cls) -> None: + cls.project = Project() + cls.aws_client = ApiGateway() + cls.project.add_resources([Person, Sport, Group, User]) + cls.api_yaml = cls.project.generate_swagger() def test_validate_yaml(self): - pass + result = self.aws_client.validate_yaml(self.api_yaml) + self.assertIsNone(result) # if there are no errors, then spec is valid + def test_validate_wrong_yaml(self): + result = self.aws_client.validate_yaml('wrong yaml...33::39') + self.assertIsNotNone(result) # if there are returned objs, there is an error + + @mock.patch('boto3.client') def test_create_api_from_yaml(self): - pass + result = self.aws_client.create_api_from_yaml() + self.assertTrue(result['success']) + @mock.patch('boto3.client') def test_update_api_from_yaml(self): - pass + result = self.aws_client.create_api_from_yaml() + self.assertTrue(result['success']) + + def test_create_api_from_wrong_yaml(self): + result = self.aws_client.create_api_from_yaml('wrong yaml...21320:: asdkas') + self.assertEqual(result, 'Bad Request. YAML is not valid.') + + def test_update_api_from_wrong_yaml(self): + result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') + self.assertEqual(result, 'Bad Request. YAML is not valid.') \ No newline at end of file diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 7413120..4c61506 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,5 +1,9 @@ import boto3 import swaggyp as sw +from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator +from openapi_spec_validator.readers import read_from_filename +from openapi_spec_validator.exceptions import OpenAPIValidationError + class ApiGateway(object): @@ -7,6 +11,17 @@ def __init__(self): self.client = boto3.client('apigateway') pass + def validate_yaml(self, yaml_file): + """ Validate YAML file if it is valid for using OpenAPI Spec v2""" + try: + spec_dict, spec_url = read_from_filename(yaml_file) + validate_v2_spec(spec_dict) + except OpenAPIValidationError as err: + errors = [{err.message: err.json_path} + for err in openapi_v2_spec_validator.iter_errors(spec_dict)] + return errors + return None + def create_api_from_yaml(self, yaml_file): # response = client.import_rest_api( # failOnWarnings=True|False, @@ -28,9 +43,3 @@ def update_api_from_yaml(self, yaml_file): # body=b'bytes'|file # ) pass - - def validate_yaml(self, yaml_file): - """ Validate YAML file if it is valid for using """ - pass - - def \ No newline at end of file diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 9711904..175d0ea 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -110,6 +110,7 @@ def write_to_yaml(self): if not os.path.exists(f'swagger.yaml'): with open(f'swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) + return t.to_yaml() else: print('There is an existing swagger file. Kindly move/delete it to generate a new one. Printing instead...') print(t.to_yaml()) diff --git a/poetry.lock b/poetry.lock index 8121a5c..ede9b77 100644 --- a/poetry.lock +++ b/poetry.lock @@ -66,7 +66,7 @@ six = ">=1.6.1,<2.0" name = "attrs" version = "21.4.0" description = "Classes Without Boilerplate" -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" @@ -385,7 +385,7 @@ python-versions = ">=3.5" name = "importlib-resources" version = "5.7.1" description = "Read resources from Python packages" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -531,7 +531,7 @@ python-versions = ">=3.7" name = "jsonschema" version = "4.5.1" description = "An implementation of JSON Schema validation for Python" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -763,6 +763,38 @@ docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "m json-logging = ["json-logging"] test = ["pytest", "coverage", "requests", "testpath", "nbval", "selenium", "pytest-cov", "requests-unixsocket"] +[[package]] +name = "openapi-schema-validator" +version = "0.2.3" +description = "OpenAPI schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.0.0,<5.0.0" + +[package.extras] +rfc3339-validator = ["rfc3339-validator"] +strict-rfc3339 = ["strict-rfc3339"] +isodate = ["isodate"] + +[[package]] +name = "openapi-spec-validator" +version = "0.4.0" +description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3.0 spec validator" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.2.0,<5.0.0" +openapi-schema-validator = ">=0.2.0,<0.3.0" +PyYAML = ">=5.1" + +[package.extras] +requests = ["requests"] + [[package]] name = "packaging" version = "21.3" @@ -943,7 +975,7 @@ diagrams = ["railroad-diagrams", "jinja2"] name = "pyrsistent" version = "0.18.1" description = "Persistent/Functional/Immutable data structures" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -1267,7 +1299,7 @@ notebook = ">=4.4.1" name = "zipp" version = "3.8.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -1278,7 +1310,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "45c80cdba27ede0a7a28b611355294d4166ccfd7d4209b7fb6b75582d854b5a7" +content-hash = "6b06fa36eba0e2e7e8d19a1efecff127af145281ccb3d10c02e1baebb062d147" [metadata.files] appnope = [ @@ -1699,6 +1731,14 @@ notebook = [ {file = "notebook-6.4.11-py3-none-any.whl", hash = "sha256:b4a6baf2eba21ce67a0ca11a793d1781b06b8078f34d06c710742e55f3eee505"}, {file = "notebook-6.4.11.tar.gz", hash = "sha256:709b1856a564fe53054796c80e17a67262071c86bfbdfa6b96aaa346113c555a"}, ] +openapi-schema-validator = [ + {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, + {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, +] +openapi-spec-validator = [ + {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, + {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, +] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, diff --git a/pyproject.toml b/pyproject.toml index 25bd35e..46062d0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,6 +26,7 @@ sammy = "^0.4.3" stripe = "^2.61.0" bleach = "^4.1.0" swaggyp = "^0.2.0" +openapi-spec-validator = "^0.4.0" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From 0a959cb57e5f18573fb8a24b44bf7bcac27a399f Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 11 Apr 2022 15:54:06 +0800 Subject: [PATCH 048/214] Made yaml validation to properly separate openapi errors and python errors. Refactored swagger.py to return the swagger file directory --- pfunk/tests/test_aws.py | 46 +++++++++++++++---------- pfunk/utils/aws.py | 74 ++++++++++++++++++++++++++++++++++------- pfunk/utils/swagger.py | 36 ++++++++++++++------ 3 files changed, 117 insertions(+), 39 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index a6b1314..6ec5841 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -1,4 +1,6 @@ +import os import unittest +import tempfile from unittest import mock from pfunk.utils.aws import ApiGateway @@ -9,34 +11,44 @@ class ApiGatewayTests(unittest.TestCase): @classmethod - def setUpCls(cls) -> None: + def setUpClass(cls) -> None: cls.project = Project() cls.aws_client = ApiGateway() cls.project.add_resources([Person, Sport, Group, User]) - cls.api_yaml = cls.project.generate_swagger() + + swagger = cls.project.generate_swagger() + cls.swagger_dir = swagger['dir'] + cls.swagger_file = swagger['swagger_file'] + print(cls.swagger_dir) def test_validate_yaml(self): - result = self.aws_client.validate_yaml(self.api_yaml) + result = self.aws_client.validate_yaml(self.swagger_dir) self.assertIsNone(result) # if there are no errors, then spec is valid def test_validate_wrong_yaml(self): result = self.aws_client.validate_yaml('wrong yaml...33::39') - self.assertIsNotNone(result) # if there are returned objs, there is an error + # if there are returned objs, there is an error + self.assertIsNotNone(result) @mock.patch('boto3.client') - def test_create_api_from_yaml(self): - result = self.aws_client.create_api_from_yaml() + def test_create_api_from_yaml(self, mocked): + result = self.aws_client.create_api_from_yaml( + yaml_file=self.swagger_dir) self.assertTrue(result['success']) @mock.patch('boto3.client') - def test_update_api_from_yaml(self): - result = self.aws_client.create_api_from_yaml() - self.assertTrue(result['success']) - - def test_create_api_from_wrong_yaml(self): - result = self.aws_client.create_api_from_yaml('wrong yaml...21320:: asdkas') - self.assertEqual(result, 'Bad Request. YAML is not valid.') - - def test_update_api_from_wrong_yaml(self): - result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') - self.assertEqual(result, 'Bad Request. YAML is not valid.') \ No newline at end of file + def test_create_api_from_wrong_yaml(self, mocked): + with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: + tmp.seek(0) + tmp.write('test wrong yaml') + result = self.aws_client.create_api_from_yaml(tmp.name) + self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') + + # @mock.patch('boto3.client') + # def test_update_api_from_yaml(self): + # result = self.aws_client.update_api_from_yaml(yaml_file=self.api_yaml) + # self.assertTrue(result['success']) + + # def test_update_api_from_wrong_yaml(self): + # result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') + # self.assertEqual(result, 'Bad Request. YAML is not valid.') diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 4c61506..b1c26c0 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,36 +1,86 @@ import boto3 import swaggyp as sw +# from botocore.exceptions import BadReq +from envs import env from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator from openapi_spec_validator.readers import read_from_filename from openapi_spec_validator.exceptions import OpenAPIValidationError class ApiGateway(object): + region_name = env('SES_REGION_NAME', 'us-east-1') def __init__(self): - self.client = boto3.client('apigateway') - pass + self.client = boto3.client('apigateway', region_name=self.region_name) def validate_yaml(self, yaml_file): """ Validate YAML file if it is valid for using OpenAPI Spec v2""" try: spec_dict, spec_url = read_from_filename(yaml_file) validate_v2_spec(spec_dict) + except (OSError, AttributeError) as err: + return {'errors': str(err)} except OpenAPIValidationError as err: + return self._iterate_validator_errors(spec_dict) + return None + + def _iterate_validator_errors(self, spec_dict): + """ Iterates through list of errors that the `openapi_spec_validator` returned + + This method was implemented due to `openapi_spec_validator` design + that if an error happened while iterating through the YAML file + it returns a Python error. + + Args: + spec_dict (dict, required): + `spec_dict` generated from `openapi_spec_validator.readers.read_from_filename` + Returns: + list of errors + """ + try: errors = [{err.message: err.json_path} for err in openapi_v2_spec_validator.iter_errors(spec_dict)] return errors - return None + except (OSError, AttributeError) as err: + return str(err) - def create_api_from_yaml(self, yaml_file): - # response = client.import_rest_api( - # failOnWarnings=True|False, - # parameters={ - # 'string': 'string' - # }, - # body=b'bytes'|file - # ) - pass + def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): + """ Creates an API for AWS API Gateway from a YAML swagger file + + Args: + yaml_file (yaml file, required): + The OpenAPI swagger file to create API from + fail_on_warnings (bool, optional): + Specifies if the method will error on warnings. Default: `True` + """ + _yaml_valid = self.validate_yaml(yaml_file) + if _yaml_valid: + return { + "error": 'Bad Request. YAML is not valid.', + "yaml_err": _yaml_valid + } + + try: + if not type(yaml_file) == 'string': + with open(yaml_file, 'r') as file: + response = self.client.import_rest_api( + failOnWarnings=fail_on_warnings, + body=file + ) + else: + response = self.client.import_rest_api( + failOnWarnings=fail_on_warnings, + body=yaml_file + ) + + if response: + return { + 'success': True, + response: response + } + # TODO: Specify boto exceptions + except Exception as err: + return err def update_api_from_yaml(self, yaml_file): # response = client.put_rest_api( diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 175d0ea..086ade7 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -77,11 +77,20 @@ def __init__(self, collections, rules=[]): def _convert_url_to_swagger(self, replacement: str, to_replace: str) -> str: return re.sub('<\w+:\w+>', f'{{{replacement}}}', to_replace) - def write_to_yaml(self): + def write_to_yaml(self, dir=''): """ Using the class' variables, write it to a swagger (yaml) file It will create `swagger.yaml` file in current directory, if there is already one, it will print the yaml file instead. + + Args: + dir (str, optional): + custom directory of the swagger file. If there are no provided, create one in current dir. + Returns: + dir (str, required): + directory of the created swagger file + swagger_file (str, required): + the contents of the swagger yaml file """ if not os.path.exists(f'pfunk.json'): raise Exception('Missing JSON Config file.') @@ -107,14 +116,16 @@ def write_to_yaml(self): schemes=schemes, definitions=self.definitions) - if not os.path.exists(f'swagger.yaml'): - with open(f'swagger.yaml', 'x') as swag_doc: + if not os.path.exists(f'{dir}/swagger.yaml'): + with open(f'{dir}/swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) - return t.to_yaml() else: - print('There is an existing swagger file. Kindly move/delete it to generate a new one. Printing instead...') - print(t.to_yaml()) - return t.to_yaml() + print('There is an existing swagger file. Kindly move/delete it to generate a new one.') + # print(t.to_yaml()) + return { + "dir": f'{dir}/swagger.yaml', + "swagger_file": t.to_yaml() + } def get_operations(self, col: Collection): """ Acquires all of the endpoint in the collections and make it @@ -234,10 +245,15 @@ def get_model_definitions(self, col: Collection): self.definitions.append(model) return self.definitions - def generate_swagger(self): - """ One-function-to-call needed function to generate a swagger documentation """ + def generate_swagger(self, dir=''): + """ One-function-to-call needed function to generate a swagger documentation + + Args: + dir (str, optional): + directory to create the yaml file + """ for i in self.collections: col = i() self.get_operations(col) self.get_model_definitions(col) - return self.write_to_yaml() \ No newline at end of file + return self.write_to_yaml(dir) \ No newline at end of file From 882b54cf9a622ca3f3459893bd2f925c6bb71025 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 11 Apr 2022 15:55:52 +0800 Subject: [PATCH 049/214] Refactored default dir of swagger file. Refactored unit tests for AWS utils --- pfunk/tests/test_aws.py | 1 - pfunk/utils/swagger.py | 6 +++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index 6ec5841..571f3c1 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -19,7 +19,6 @@ def setUpClass(cls) -> None: swagger = cls.project.generate_swagger() cls.swagger_dir = swagger['dir'] cls.swagger_file = swagger['swagger_file'] - print(cls.swagger_dir) def test_validate_yaml(self): result = self.aws_client.validate_yaml(self.swagger_dir) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 086ade7..edc9bfd 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -116,14 +116,14 @@ def write_to_yaml(self, dir=''): schemes=schemes, definitions=self.definitions) - if not os.path.exists(f'{dir}/swagger.yaml'): - with open(f'{dir}/swagger.yaml', 'x') as swag_doc: + if not os.path.exists(f'{dir}swagger.yaml'): + with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) else: print('There is an existing swagger file. Kindly move/delete it to generate a new one.') # print(t.to_yaml()) return { - "dir": f'{dir}/swagger.yaml', + "dir": f'{dir}swagger.yaml', "swagger_file": t.to_yaml() } From eecacffd644931b0aacfa67b047c08fb28452dd0 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 12 Apr 2022 16:15:50 +0800 Subject: [PATCH 050/214] Finished create/update api from yaml. Added writing to config file if API is created. Added reading from config file if API is to be updated --- pfunk/tests/test_aws.py | 23 +++++--- pfunk/utils/aws.py | 128 ++++++++++++++++++++++++++++++++-------- 2 files changed, 119 insertions(+), 32 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index 571f3c1..2789dc2 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -11,7 +11,8 @@ class ApiGatewayTests(unittest.TestCase): @classmethod - def setUpClass(cls) -> None: + @mock.patch('boto3.client') + def setUpClass(cls, mocked) -> None: cls.project = Project() cls.aws_client = ApiGateway() cls.project.add_resources([Person, Sport, Group, User]) @@ -43,11 +44,17 @@ def test_create_api_from_wrong_yaml(self, mocked): result = self.aws_client.create_api_from_yaml(tmp.name) self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') - # @mock.patch('boto3.client') - # def test_update_api_from_yaml(self): - # result = self.aws_client.update_api_from_yaml(yaml_file=self.api_yaml) - # self.assertTrue(result['success']) + @mock.patch('boto3.client') + def test_update_api_from_yaml(self, mocked): + result = self.aws_client.update_api_from_yaml( + yaml_file=self.swagger_dir, mode='merge') + print(result) + self.assertTrue(result['success']) - # def test_update_api_from_wrong_yaml(self): - # result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') - # self.assertEqual(result, 'Bad Request. YAML is not valid.') + @mock.patch('boto3.client') + def test_update_api_from_wrong_yaml(self, mocked): + with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: + tmp.seek(0) + tmp.write('test wrong yaml') + result = self.aws_client.update_api_from_yaml(tmp.name, mode='merge') + self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index b1c26c0..7427db3 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,4 +1,6 @@ +import datetime import boto3 +import json import swaggyp as sw # from botocore.exceptions import BadReq from envs import env @@ -6,12 +8,54 @@ from openapi_spec_validator.readers import read_from_filename from openapi_spec_validator.exceptions import OpenAPIValidationError +AWS_ACCESS_KEY = env('AWS_ACCESS_KEY') +AWS_SECRET_ACCESS_KEY = env('AWS_SECRET_ACCESS_KEY') +AWS_DEFAULT_REGION = env('AWS_DEFAULT_REGION') + + +def _json_dt_helper(o): + """ Helps serializing `datetime` objects to a readable string """ + if isinstance(o, (datetime.date, datetime.datetime)): + return o.isoformat() + + +def write_to_config(obj, config_file_dir='pfunk.json'): + """ Writes to pfunk config file + + Args: + obj (dict, required): + key, value pairs to write to json file + config_file_dir (str, optional): + directory of the config json file, default='pfunk.json' + Returns: + config_file (dict, required): + the current value of config file (pfunk.json) + """ + with open(config_file_dir, 'r+') as f: + data = json.load(f) + data.update(obj) + f.seek(0) + f.truncate() + json.dump(data, f, indent=4, sort_keys=True, default=_json_dt_helper) + return data + + +def read_from_config_file(config_file_dir='pfunk.json'): + """ Returns data from config file in dict form """ + with open(config_file_dir, 'r') as f: + data = json.load(f) + return data + class ApiGateway(object): region_name = env('SES_REGION_NAME', 'us-east-1') def __init__(self): - self.client = boto3.client('apigateway', region_name=self.region_name) + self.client = boto3.client( + 'apigateway', + aws_access_key_id=AWS_ACCESS_KEY, + aws_secret_access_key=AWS_SECRET_ACCESS_KEY, + region_name=AWS_DEFAULT_REGION) def validate_yaml(self, yaml_file): """ Validate YAML file if it is valid for using OpenAPI Spec v2""" @@ -63,33 +107,69 @@ def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): try: if not type(yaml_file) == 'string': with open(yaml_file, 'r') as file: - response = self.client.import_rest_api( - failOnWarnings=fail_on_warnings, - body=file - ) - else: - response = self.client.import_rest_api( - failOnWarnings=fail_on_warnings, - body=yaml_file - ) + yaml_file = file.read() + response = self.client.import_rest_api( + failOnWarnings=fail_on_warnings, + body=yaml_file) + + # TODO: Fix -- if using mocked obj, don't write anything + if response: + write_to_config({'api': response}) + return { + 'success': True, + 'response': response + } + # TODO: Specify boto exceptions + except Exception as err: + return { + 'error': str(err) + } + + def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnings=True): + """ Updates rest API using yaml file + + Args: + rest_api_id (string, required): + ID of the API for updating, if not provided, use API ID from `pfunk.json` + yaml_file (yaml file, required): + The OpenAPI swagger file to create API from + mode (string, required): + Mode of update, choice=['merge', 'overwrite'] + fail_on_warnings (bool, optional): + Specifies if the method will error on warnings. Default: `True` + """ + _yaml_valid = self.validate_yaml(yaml_file) + if _yaml_valid: + return { + "error": 'Bad Request. YAML is not valid.', + "yaml_err": _yaml_valid + } + + try: + if not type(yaml_file) == 'string': + with open(yaml_file, 'r') as file: + yaml_file = file.read() + # Acquire REST API ID from config file if not provided + if not rest_api_id: + data = read_from_config_file() + if data.get('api'): + rest_api_id = (data.get('api') + .get('id')) + + response = self.client.put_rest_api( + restApiId=rest_api_id, + mode=mode, + failOnWarnings=fail_on_warnings, + body=yaml_file + ) if response: return { 'success': True, - response: response + 'response': response } # TODO: Specify boto exceptions except Exception as err: - return err - - def update_api_from_yaml(self, yaml_file): - # response = client.put_rest_api( - # restApiId='string', - # mode='merge'|'overwrite', - # failOnWarnings=True|False, - # parameters={ - # 'string': 'string' - # }, - # body=b'bytes'|file - # ) - pass + return { + 'error': str(err) + } From a4b98118a6aef1b281667c5c85c10a194b8ca4e4 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 13 Apr 2022 13:44:30 +0800 Subject: [PATCH 051/214] Reworked what exceptions does aws util react to. Added doc for aws util tests --- pfunk/tests/test_aws.py | 10 +++++++++- pfunk/utils/aws.py | 15 ++++++--------- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index 2789dc2..d28c852 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -9,6 +9,15 @@ class ApiGatewayTests(unittest.TestCase): + """ Unit tests for creation of API from Swagger file + + Note that the unittests uses mocked boto3 normally. If + you want to test against a real endpoint, remove the + patch decorator at `setUpClass` and the `mocked` + param. Also make sure you have the required + env vars for AWS credentials and you have + the json config in the current env. + """ @classmethod @mock.patch('boto3.client') @@ -48,7 +57,6 @@ def test_create_api_from_wrong_yaml(self, mocked): def test_update_api_from_yaml(self, mocked): result = self.aws_client.update_api_from_yaml( yaml_file=self.swagger_dir, mode='merge') - print(result) self.assertTrue(result['success']) @mock.patch('boto3.client') diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 7427db3..13164f8 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -2,7 +2,7 @@ import boto3 import json import swaggyp as sw -# from botocore.exceptions import BadReq +from botocore.exceptions import ClientError, NoCredentialsError from envs import env from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator from openapi_spec_validator.readers import read_from_filename @@ -20,7 +20,7 @@ def _json_dt_helper(o): def write_to_config(obj, config_file_dir='pfunk.json'): - """ Writes to pfunk config file + """ Appends object to pfunk config file Args: obj (dict, required): @@ -111,16 +111,14 @@ def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): response = self.client.import_rest_api( failOnWarnings=fail_on_warnings, body=yaml_file) - - # TODO: Fix -- if using mocked obj, don't write anything + if response: write_to_config({'api': response}) return { 'success': True, 'response': response } - # TODO: Specify boto exceptions - except Exception as err: + except (ClientError, NoCredentialsError) as err: return { 'error': str(err) } @@ -154,7 +152,7 @@ def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnin data = read_from_config_file() if data.get('api'): rest_api_id = (data.get('api') - .get('id')) + .get('id')) response = self.client.put_rest_api( restApiId=rest_api_id, @@ -168,8 +166,7 @@ def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnin 'success': True, 'response': response } - # TODO: Specify boto exceptions - except Exception as err: + except (ClientError, NoCredentialsError) as err: return { 'error': str(err) } From 4a751b6f218e0448bcb97fb94d7cec509c4f55aa Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 27 Apr 2022 14:41:51 +0800 Subject: [PATCH 052/214] Did cleaning up of swaggerdoc docstrings --- pfunk/utils/swagger.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index edc9bfd..2c5f02f 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -42,6 +42,7 @@ def __init__(self, collections, rules=[]): """ Generates swagger doc. Details are going to be acquired from the collections The acquisition of the information needed for docs are as follows: + ``` Response: Description (str): View's `get_query` docstrings Status Code (int): @@ -56,9 +57,16 @@ def __init__(self, collections, rules=[]): Model: Name (str): The class name of the `collection` Properties (str): The fields of the collection and their type - + ``` + + Args: + collections ([`pfunk.collection.Collection`]): + array of collection of the project to generate models from + rules ([`werkzeug.routing.Rule`]): + array of additional URLs that the given collection doesn't have Returns: - Generated YAML file + swagger.yaml (yaml, required): + Generated YAML file """ self.collections = collections self.rules = rules From d79a82a8a097f2cac5e3295f58ea3f45a422671f Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 4 May 2022 14:52:41 +0800 Subject: [PATCH 053/214] added generate-swagger as a cli command, creates a swagger file from the provided json config file of a project --- pfunk/cli.py | 41 +++++++++++++++++++++++++++++++++-------- pfunk/project.py | 18 +++++++++++++++--- pfunk/utils/swagger.py | 19 ++++++++++++++----- 3 files changed, 62 insertions(+), 16 deletions(-) diff --git a/pfunk/cli.py b/pfunk/cli.py index 838684b..7337cae 100644 --- a/pfunk/cli.py +++ b/pfunk/cli.py @@ -39,9 +39,7 @@ def load_config_file(filename): @click.option('--description', prompt=True, help='Project Description') @click.option('--api_type', type=click.Choice(['web', 'rest', 'none']), prompt=True, help='API Type (web, rest, none)') @click.argument('name') -def init(name: str, api_type: str, description: str, host: str, fauna_key: str, bucket: str, email: str, - stage_name: str, generate_local_key: bool): - +def init(name: str, api_type: str, fauna_key: str, bucket: str, email: str, stage_name: str, description: str, host: str, generate_local_key: bool): """ Creates a PFunk project Args: @@ -76,7 +74,8 @@ def init(name: str, api_type: str, description: str, host: str, fauna_key: str, }, f, indent=4, sort_keys=True) open(f'{name}/__init__.py', 'x').close() with open(f'{name}/wsgi.py', 'x') as f: - f.write(wsgi_template.render(PFUNK_PROJECT=f'{name}.project.project')) + f.write(wsgi_template.render( + PFUNK_PROJECT=f'{name}.project.project')) with open(f'{name}/project.py', 'x') as f: f.write(project_template.render()) with open(f'{name}/collections.py', 'x') as f: @@ -89,9 +88,11 @@ def init(name: str, api_type: str, description: str, host: str, fauna_key: str, q.create_database({'name': db_name}) ) key = client.query( - q.create_key({'database': q.database(db_name), 'role': 'admin'}) + q.create_key( + {'database': q.database(db_name), 'role': 'admin'}) ) - click.secho(f'Fauna Local Secret (copy into your .env or pipenv file): {key}', fg='green') + click.secho( + f'Fauna Local Secret (copy into your .env or pipenv file): {key}', fg='green') else: click.echo('There is already a project file in this directory.') @@ -147,7 +148,8 @@ def local(hostname: str, port: int, wsgi: str, config_file: str, use_debugger: b sys.path.insert(0, os.getcwd()) wsgi_path = wsgi or f'{config.get("name")}.wsgi.app' app = import_util(wsgi_path) - run_simple(hostname, port, app, use_debugger=use_debugger, use_reloader=use_reloader) + run_simple(hostname, port, app, use_debugger=use_debugger, + use_reloader=use_reloader) @pfunk.command() @@ -257,7 +259,8 @@ def create_admin_user(stage_name: str, group_slug: str, last_name: str, first_na project = import_util(project_path) perm_list = [] for i in project.collections: - perm_list.append(PermissionGroup(collection=i, permissions=['create', 'write', 'read', 'delete'])) + perm_list.append(PermissionGroup(collection=i, permissions=[ + 'create', 'write', 'read', 'delete'])) user.add_permissions(group, perm_list) @@ -283,5 +286,27 @@ def deploy(stage_name: str, config_path: str): d.deploy(stage_name) +@pfunk.command() +@click.option('--config_path', help='Configuration file path', default='pfunk.json') +@click.option('--yaml_path', help='Dir to create yaml swagger file to', default='') +def generate_swagger(config_path: str, yaml_path: str): + """ Generates the swagger file of the project from a config json file + + Args: + config_path (str, optional): + dir of the json config file to use + yaml_path (str, optional): + dir to put the generated swagger file + + Returns: + + """ + config = load_config_file(config_path) + sys.path.insert(0, os.getcwd()) + project_path = f'{config.get("name")}.project.project' + project = import_util(project_path) + project.generate_swagger(yaml_dir=yaml_path, config_file=config_path) + + if __name__ == '__main__': pfunk() diff --git a/pfunk/project.py b/pfunk/project.py index de66fb0..339ebeb 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -298,9 +298,21 @@ def wsgi_app(self, environ, start_response): start_response(status_str, response.wsgi_headers) return [str.encode(response.body)] - def generate_swagger(self): + def generate_swagger(self, yaml_dir='', config_file='pfunk.json'): + """ Generates a swagger file that houses all endpoints + + Args: + yaml_dir (str, optional): + which directory to create the swagger yaml file + config_file (str, optional): + which directory to look for the config file + + Returns: + swagger file + """ swag = SwaggerDoc( collections=self.collections, - rules=[GraphQLView.url()]) - swag_file = swag.generate_swagger() + rules=[GraphQLView.url()], + config_file=config_file) + swag_file = swag.generate_swagger(dir=yaml_dir) return swag_file diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 2c5f02f..6335fbd 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -38,7 +38,7 @@ class SwaggerDoc(object): - def __init__(self, collections, rules=[]): + def __init__(self, collections, rules=[], config_file='pfunk.json'): """ Generates swagger doc. Details are going to be acquired from the collections The acquisition of the information needed for docs are as follows: @@ -64,6 +64,9 @@ def __init__(self, collections, rules=[]): array of collection of the project to generate models from rules ([`werkzeug.routing.Rule`]): array of additional URLs that the given collection doesn't have + config_file (str, optional): + directory of the config_file + Returns: swagger.yaml (yaml, required): Generated YAML file @@ -73,6 +76,7 @@ def __init__(self, collections, rules=[]): self.paths = [] self.definitions = [] self.responses = [] + self.config_file = config_file self._response_classes = [ 'response_class', 'not_found_class', @@ -100,10 +104,10 @@ def write_to_yaml(self, dir=''): swagger_file (str, required): the contents of the swagger yaml file """ - if not os.path.exists(f'pfunk.json'): + if not os.path.exists(self.config_file): raise Exception('Missing JSON Config file.') else: - with open(f'pfunk.json', 'r') as f: + with open(self.config_file, 'r') as f: data = json.loads(f.read()) proj_title = data.get('name') proj_desc = data.get('description', 'A Pfunk project') @@ -112,6 +116,10 @@ def write_to_yaml(self, dir=''): basePath = data.get('basePath', '/') schemes = ['https'] + if dir: + if not dir.endswith('/'): + dir = dir + "/" + info = sw.Info( title=proj_title, description=proj_desc, @@ -128,7 +136,8 @@ def write_to_yaml(self, dir=''): with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) else: - print('There is an existing swagger file. Kindly move/delete it to generate a new one.') + print( + 'There is an existing swagger file. Kindly move/delete it to generate a new one.') # print(t.to_yaml()) return { "dir": f'{dir}swagger.yaml', @@ -264,4 +273,4 @@ def generate_swagger(self, dir=''): col = i() self.get_operations(col) self.get_model_definitions(col) - return self.write_to_yaml(dir) \ No newline at end of file + return self.write_to_yaml(dir) From 25aeaacfba0ca720b7f3aafcc9285e8e09fce4b9 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 2 Jun 2022 15:11:30 +0800 Subject: [PATCH 054/214] added support for splitting the docs to get params of the view --- pfunk/tests/test_project.py | 36 ++++++++++++------------ pfunk/utils/swagger.py | 56 ++++++++++++++++++++++++++++++++----- 2 files changed, 67 insertions(+), 25 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index 858dcc3..3f1a43c 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -34,24 +34,24 @@ def tearDown(self) -> None: except FileNotFoundError: pass - def test_add_resource(self): - self.project.add_resource(Person) - self.project.add_resource(Person) - # Test that no duplicates are there - self.assertEqual(self.project.collections, set([Person])) - - def test_add_resources(self): - self.project.add_resources([Person, Sport]) - self.assertEqual(self.project.collections, set([Person, Sport])) - - def test_render(self): - self.project.add_resources([Person, Sport]) - gql = self.project.render() - self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - self.assertTrue('enum gender_pronouns' in gql) - self.assertTrue('type Person' in gql) - self.assertTrue('type Sport' in gql) - self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + # def test_add_resource(self): + # self.project.add_resource(Person) + # self.project.add_resource(Person) + # # Test that no duplicates are there + # self.assertEqual(self.project.collections, set([Person])) + + # def test_add_resources(self): + # self.project.add_resources([Person, Sport]) + # self.assertEqual(self.project.collections, set([Person, Sport])) + + # def test_render(self): + # self.project.add_resources([Person, Sport]) + # gql = self.project.render() + # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + # self.assertTrue('enum gender_pronouns' in gql) + # self.assertTrue('type Person' in gql) + # self.assertTrue('type Sport' in gql) + # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): self.project.add_resources([Person, Sport, Group, User]) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 6335fbd..eae74e0 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -160,6 +160,7 @@ def get_operations(self, col: Collection): An array of `Path` that can be consumed using `swaggyp.SwaggerTemplate` to show available paths + ``` """ for view in col.collection_views: route = view.url(col) @@ -188,6 +189,7 @@ def get_operations(self, col: Collection): # Skip HEAD operations continue + # Acquire path parameters of URL if args is None or len(args) == 0: # if `defaults` weren't used in URL building, use the argument defined in the URL string for converter, arguments, variable in parse_rule(rule): @@ -196,10 +198,10 @@ def get_operations(self, col: Collection): args = variable arg_type = converter - # Replace werkzeug params () to swagger-style params ({id}) - swagger_rule = self._convert_url_to_swagger(args, rule) + params = [] + # Construct path parameters for swagger generation if arg_type: - params = sw.Parameter( + path_params = sw.Parameter( name=args, _type=WERKZEUG_URL_TO_YAML_TYPES.get(arg_type), _in='path', @@ -207,18 +209,55 @@ def get_operations(self, col: Collection): required=True, allowEmptyValue=False ) + params.append(path_params) + + # Acquire payload of the view from the View's docstring + # where to cut the docstrings to use the definition for the payload of the view + oas_str_split = '[OAS]\n' + view_docs = view.__doc__ + view_payload = None + if (view_docs and len(view_docs.split(oas_str_split)) > 1): + view_payload = view_docs.split(oas_str_split)[1] + + # Construct payload for swagger generation + if view_payload: + for field in json.loads(view_payload).get('data'): + param = sw.Parameter( + name=field.get('name'), + _type=field.get('type'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + allowEmptyValue=False + ) + params.append(param) + + docs_description = view_docs if not len(view_docs.split( + oas_str_split)) > 1 else view_docs.split(oas_str_split)[0] + consumes = ['application/json', + 'application/x-www-form-urlencoded'] + produces = ['application/json', + 'application/x-www-form-urlencoded'] + if params: op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=view.__doc__, + description=docs_description, responses=responses, - parameters=[params]) + consumes=consumes, + produces=produces, + parameters=params) else: op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=view.__doc__, - responses=responses) + description=docs_description, + responses=responses, + consumes=consumes, + produces=produces) + + # Replace werkzeug params () to swagger-style params ({id}) + swagger_rule = self._convert_url_to_swagger(args, rule) p = sw.Path(endpoint=swagger_rule, operations=[op]) self.paths.append(p) return self.paths @@ -240,6 +279,9 @@ def get_model_definitions(self, col: Collection): An array of `Definition` that can be consumed using `swaggyp.SwaggerTemplate` to show available models + + Payload: + """ # Define model definitions by iterating through collection's fields for its properties From 8d81f22c936b60692bcf8f1cca113e76256dece2 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 2 Jun 2022 15:30:35 +0800 Subject: [PATCH 055/214] switch acquiring of swagger docs for payload from docstrings to function return instead --- pfunk/utils/swagger.py | 20 ++++++++------------ pfunk/web/views/json.py | 32 +++++++++++++++++++++++++++++--- 2 files changed, 37 insertions(+), 15 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index eae74e0..e952883 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -163,6 +163,7 @@ def get_operations(self, col: Collection): ``` """ for view in col.collection_views: + v = view() route = view.url(col) rule = route.rule methods = route.methods @@ -211,17 +212,13 @@ def get_operations(self, col: Collection): ) params.append(path_params) - # Acquire payload of the view from the View's docstring - # where to cut the docstrings to use the definition for the payload of the view - oas_str_split = '[OAS]\n' - view_docs = view.__doc__ - view_payload = None - if (view_docs and len(view_docs.split(oas_str_split)) > 1): - view_payload = view_docs.split(oas_str_split)[1] + # Acquire payload of the view from the View's `_payload_docs` + view_payload = view()._payload_docs() # Construct payload for swagger generation + # TODO: support referencing of models if view_payload: - for field in json.loads(view_payload).get('data'): + for field in view_payload.get('data'): param = sw.Parameter( name=field.get('name'), _type=field.get('type'), @@ -232,17 +229,16 @@ def get_operations(self, col: Collection): ) params.append(param) - docs_description = view_docs if not len(view_docs.split( - oas_str_split)) > 1 else view_docs.split(oas_str_split)[0] consumes = ['application/json', 'application/x-www-form-urlencoded'] produces = ['application/json', 'application/x-www-form-urlencoded'] + view_docs = view.__doc__ if params: op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=docs_description, + description=view_docs, responses=responses, consumes=consumes, produces=produces, @@ -251,7 +247,7 @@ def get_operations(self, col: Collection): op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=docs_description, + description=view_docs, responses=responses, consumes=consumes, produces=produces) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index ac2e994..1ebe635 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -25,6 +25,29 @@ def get_response(self): headers=self.get_headers() ) + def _payload_docs(self): + """ Used in defining payload parameters for the view. + + Should return a dict that has the fields of a swagger parameter e.g. + {"data": [ + { + "name":"name", + "in":"formData", + "description":"name of the pet", + "required": true, + "type": "string" + }, + { + "name": "status", + "in": "formData", + "description": "status of the pet", + "required":true, + "type":"string" + } + ]} + """ + return {} + class CreateView(UpdateMixin, ActionMixin, JSONView): """ Define a `Create` view that allows `creation` of an entity in the collection """ @@ -34,7 +57,8 @@ class CreateView(UpdateMixin, ActionMixin, JSONView): def get_query(self): """ Entity created in a collection """ - obj = self.collection.create(**self.get_query_kwargs(), _token=self.request.token) + obj = self.collection.create( + **self.get_query_kwargs(), _token=self.request.token) return obj def get_m2m_kwargs(self, obj): @@ -50,7 +74,8 @@ def get_m2m_kwargs(self, obj): """ data = self.request.get_json() - fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') + fields = self.collection.get_foreign_fields_by_type( + 'pfunk.fields.ManyToManyField') for k, v in fields.items(): current_value = data.get(k) col = v.get('foreign_class')() @@ -70,7 +95,8 @@ class UpdateView(UpdateMixin, IDMixin, JSONView): def get_query(self): """ Entity in collection updated by an ID """ - obj = self.collection.get(self.request.kwargs.get('id'), _token=self.request.token) + obj = self.collection.get(self.request.kwargs.get( + 'id'), _token=self.request.token) obj._data.update(self.get_query_kwargs()) obj.save() return obj From 7129fc37f2409b0ea372b9f3337a9e77fcc19125 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:09:34 +0800 Subject: [PATCH 056/214] added ability to also detect model references from _payload_docs function --- pfunk/tests/test_project.py | 36 ++++++++++++++++++------------------ pfunk/utils/swagger.py | 29 ++++++++++++++++++++--------- pfunk/web/views/json.py | 14 +++++++------- 3 files changed, 45 insertions(+), 34 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index 3f1a43c..858dcc3 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -34,24 +34,24 @@ def tearDown(self) -> None: except FileNotFoundError: pass - # def test_add_resource(self): - # self.project.add_resource(Person) - # self.project.add_resource(Person) - # # Test that no duplicates are there - # self.assertEqual(self.project.collections, set([Person])) - - # def test_add_resources(self): - # self.project.add_resources([Person, Sport]) - # self.assertEqual(self.project.collections, set([Person, Sport])) - - # def test_render(self): - # self.project.add_resources([Person, Sport]) - # gql = self.project.render() - # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - # self.assertTrue('enum gender_pronouns' in gql) - # self.assertTrue('type Person' in gql) - # self.assertTrue('type Sport' in gql) - # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + def test_add_resource(self): + self.project.add_resource(Person) + self.project.add_resource(Person) + # Test that no duplicates are there + self.assertEqual(self.project.collections, set([Person])) + + def test_add_resources(self): + self.project.add_resources([Person, Sport]) + self.assertEqual(self.project.collections, set([Person, Sport])) + + def test_render(self): + self.project.add_resources([Person, Sport]) + gql = self.project.render() + self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + self.assertTrue('enum gender_pronouns' in gql) + self.assertTrue('type Person' in gql) + self.assertTrue('type Sport' in gql) + self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): self.project.add_resources([Person, Sport, Group, User]) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index e952883..c6fe21f 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -216,17 +216,28 @@ def get_operations(self, col: Collection): view_payload = view()._payload_docs() # Construct payload for swagger generation - # TODO: support referencing of models if view_payload: for field in view_payload.get('data'): - param = sw.Parameter( - name=field.get('name'), - _type=field.get('type'), - _in=field.get('in'), - description=field.get('description'), - required=field.get('required'), - allowEmptyValue=False - ) + if field.get('schema'): + schema = sw.SwagSchema( + ref=field.get('schema') + ) + param = sw.Parameter( + name=field.get('name'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + schema=schema + ) + else: + param = sw.Parameter( + name=field.get('name'), + _type=field.get('type'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + allowEmptyValue=False + ) params.append(param) consumes = ['application/json', diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 1ebe635..8124ead 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -26,7 +26,7 @@ def get_response(self): ) def _payload_docs(self): - """ Used in defining payload parameters for the view. + """ Used in custom defining payload parameters for the view. Should return a dict that has the fields of a swagger parameter e.g. {"data": [ @@ -34,15 +34,15 @@ def _payload_docs(self): "name":"name", "in":"formData", "description":"name of the pet", - "required": true, + "required": True, "type": "string" }, { - "name": "status", - "in": "formData", - "description": "status of the pet", - "required":true, - "type":"string" + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": "#/definitions/Person" } ]} """ From a007336171276521bc4c3025ca98f391e37a380a Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:18:08 +0800 Subject: [PATCH 057/214] Updated the docstrings of _payload_docs --- pfunk/utils/swagger.py | 4 +--- pfunk/web/views/json.py | 51 ++++++++++++++++++++++++++++++++++------- 2 files changed, 44 insertions(+), 11 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index c6fe21f..33c393b 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -219,9 +219,7 @@ def get_operations(self, col: Collection): if view_payload: for field in view_payload.get('data'): if field.get('schema'): - schema = sw.SwagSchema( - ref=field.get('schema') - ) + schema = sw.SwagSchema(ref=field.get('schema')) param = sw.Parameter( name=field.get('name'), _in=field.get('in'), diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 8124ead..793680d 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -28,15 +28,31 @@ def get_response(self): def _payload_docs(self): """ Used in custom defining payload parameters for the view. - Should return a dict that has the fields of a swagger parameter e.g. + Should return a dict that has the fields of a swagger parameter. + If there is an error in the swagger, it will not be raised. + Usage of `https://editor.swagger.io` to validate is recommended + e.g. + ``` + # Defining formdata + {"data": [ + { + "name":"name", + "in":"formData", + "description":"name of the pet", + "required": true, + "type": "string" + }, + { + "name": "status", + "in": "formData", + "description": "status of the pet", + "required":true, + "type":"string" + } + ]} + + # Defining a payload that references a model {"data": [ - { - "name":"name", - "in":"formData", - "description":"name of the pet", - "required": True, - "type": "string" - }, { "name": "body", "in": "body", @@ -45,6 +61,7 @@ def _payload_docs(self): "schema": "#/definitions/Person" } ]} + ``` """ return {} @@ -86,6 +103,24 @@ def get_m2m_kwargs(self, obj): ) ) + def _payload_docs(self): + return {"data": [ + { + "name": "name", + "in": "formData", + "description": "name of the pet", + "required": True, + "type": "string" + }, + { + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": "#/definitions/Person" + } + ]} + class UpdateView(UpdateMixin, IDMixin, JSONView): """ Define a view to allow `Update` operations """ From 31dba3dceb7563a042b55772941c00bef0805891 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:18:41 +0800 Subject: [PATCH 058/214] Updated the docstrings of _payload_docs --- pfunk/web/views/json.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 793680d..4eb2f96 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -26,7 +26,7 @@ def get_response(self): ) def _payload_docs(self): - """ Used in custom defining payload parameters for the view. + """ Used in custom defining payload parameters for the view in Swagger generation. Should return a dict that has the fields of a swagger parameter. If there is an error in the swagger, it will not be raised. From a2c4603c4cde0ccb90212478da5b608174d87002 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 22 Jun 2022 15:19:06 +0800 Subject: [PATCH 059/214] updated swaggyp package --- pfunk/utils/swagger.py | 1 - pfunk/web/views/json.py | 27 +++++++++++---------------- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 4 files changed, 16 insertions(+), 22 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 33c393b..1a27812 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -163,7 +163,6 @@ def get_operations(self, col: Collection): ``` """ for view in col.collection_views: - v = view() route = view.url(col) rule = route.rule methods = route.methods diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 4eb2f96..dc7a2c6 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -104,22 +104,17 @@ def get_m2m_kwargs(self, obj): ) def _payload_docs(self): - return {"data": [ - { - "name": "name", - "in": "formData", - "description": "name of the pet", - "required": True, - "type": "string" - }, - { - "name": "body", - "in": "body", - "description": "Collection object to add", - "required": True, - "schema": "#/definitions/Person" - } - ]} + # TODO: Get view's collection class name by default + if self.collection: + return {"data": [ + { + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": f"#/definitions/Collection" + } + ]} class UpdateView(UpdateMixin, IDMixin, JSONView): diff --git a/poetry.lock b/poetry.lock index ede9b77..32dcf5f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1173,7 +1173,7 @@ requests = {version = ">=2.20", markers = "python_version >= \"3.0\""} [[package]] name = "swaggyp" -version = "0.2.0" +version = "0.3.0" description = "Python library for generating Swagger templates based on valley" category = "main" optional = false @@ -1310,7 +1310,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "6b06fa36eba0e2e7e8d19a1efecff127af145281ccb3d10c02e1baebb062d147" +content-hash = "b98fdaeac6227f48d169512de02fc0155103f626a8e7c5d074c25fef2bef9cd5" [metadata.files] appnope = [ @@ -2025,8 +2025,8 @@ stripe = [ {file = "stripe-2.76.0.tar.gz", hash = "sha256:fd3fc6935c3b6189967191607b6f38ebe490005a590b4d0d43fbe3aba45deca8"}, ] swaggyp = [ - {file = "swaggyp-0.2.0-py3-none-any.whl", hash = "sha256:030dca0f0a24469abcf7e1f047eaa01c84c206a8d891cfb1a4a24f40e2a6a146"}, - {file = "swaggyp-0.2.0.tar.gz", hash = "sha256:b39ab7f8a49fdb17af7862c5ac81b872a5f7f95c3b0989a6a285fde3f4ee4885"}, + {file = "swaggyp-0.3.0-py3-none-any.whl", hash = "sha256:d678daa026f374c09690bfae93670e8824c616fe7186375cca531dd9530d2c41"}, + {file = "swaggyp-0.3.0.tar.gz", hash = "sha256:2af8b74e014ba83287d4b2fcbe355636ca8ec09ca70bffba4290b3f833c8ad46"}, ] terminado = [ {file = "terminado-0.15.0-py3-none-any.whl", hash = "sha256:0d5f126fbfdb5887b25ae7d9d07b0d716b1cc0ccaacc71c1f3c14d228e065197"}, diff --git a/pyproject.toml b/pyproject.toml index 46062d0..5aaa2ab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,8 +25,8 @@ graphql-py = "^0.8.1" sammy = "^0.4.3" stripe = "^2.61.0" bleach = "^4.1.0" -swaggyp = "^0.2.0" openapi-spec-validator = "^0.4.0" +swaggyp = "^0.3.0" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From 32486221a25b0a951ab31a14a9c8ab3efa39f626 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 23 Jun 2022 15:13:06 +0800 Subject: [PATCH 060/214] Fixed calling of view's _payload_docs to pass the correct argument. Made Create and Update views have default reference to model --- pfunk/utils/swagger.py | 2 +- pfunk/web/views/json.py | 17 +++++++++++++++-- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 1a27812..2820efe 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -212,7 +212,7 @@ def get_operations(self, col: Collection): params.append(path_params) # Acquire payload of the view from the View's `_payload_docs` - view_payload = view()._payload_docs() + view_payload = view(col)._payload_docs() # Construct payload for swagger generation if view_payload: diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index dc7a2c6..791af54 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -104,7 +104,7 @@ def get_m2m_kwargs(self, obj): ) def _payload_docs(self): - # TODO: Get view's collection class name by default + # Reference the collection by default if self.collection: return {"data": [ { @@ -112,7 +112,7 @@ def _payload_docs(self): "in": "body", "description": "Collection object to add", "required": True, - "schema": f"#/definitions/Collection" + "schema": f"#/definitions/{self.collection.__class__.__name__}" } ]} @@ -131,6 +131,19 @@ def get_query(self): obj.save() return obj + def _payload_docs(self): + # Reference the collection by default + if self.collection: + return {"data": [ + { + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": f"#/definitions/{self.collection.__class__.__name__}" + } + ]} + class DetailView(ObjectMixin, IDMixin, JSONView): """ Define a view to allow single entity operations """ From 9536db104a890868e414f548c03f17857c59b3fc Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 5 Apr 2022 15:08:35 +0800 Subject: [PATCH 061/214] Added aws.py as a skeleton for upcoming AWS features --- pfunk/utils/aws.py | 192 +++++++-------------------------------------- 1 file changed, 28 insertions(+), 164 deletions(-) diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 13164f8..7413120 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,172 +1,36 @@ -import datetime import boto3 -import json import swaggyp as sw -from botocore.exceptions import ClientError, NoCredentialsError -from envs import env -from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator -from openapi_spec_validator.readers import read_from_filename -from openapi_spec_validator.exceptions import OpenAPIValidationError - -AWS_ACCESS_KEY = env('AWS_ACCESS_KEY') -AWS_SECRET_ACCESS_KEY = env('AWS_SECRET_ACCESS_KEY') -AWS_DEFAULT_REGION = env('AWS_DEFAULT_REGION') - - -def _json_dt_helper(o): - """ Helps serializing `datetime` objects to a readable string """ - if isinstance(o, (datetime.date, datetime.datetime)): - return o.isoformat() - - -def write_to_config(obj, config_file_dir='pfunk.json'): - """ Appends object to pfunk config file - - Args: - obj (dict, required): - key, value pairs to write to json file - config_file_dir (str, optional): - directory of the config json file, default='pfunk.json' - Returns: - config_file (dict, required): - the current value of config file (pfunk.json) - """ - with open(config_file_dir, 'r+') as f: - data = json.load(f) - data.update(obj) - f.seek(0) - f.truncate() - json.dump(data, f, indent=4, sort_keys=True, default=_json_dt_helper) - return data - - -def read_from_config_file(config_file_dir='pfunk.json'): - """ Returns data from config file in dict form """ - with open(config_file_dir, 'r') as f: - data = json.load(f) - return data - class ApiGateway(object): - region_name = env('SES_REGION_NAME', 'us-east-1') def __init__(self): - self.client = boto3.client( - 'apigateway', - aws_access_key_id=AWS_ACCESS_KEY, - aws_secret_access_key=AWS_SECRET_ACCESS_KEY, - region_name=AWS_DEFAULT_REGION) - + self.client = boto3.client('apigateway') + pass + + def create_api_from_yaml(self, yaml_file): + # response = client.import_rest_api( + # failOnWarnings=True|False, + # parameters={ + # 'string': 'string' + # }, + # body=b'bytes'|file + # ) + pass + + def update_api_from_yaml(self, yaml_file): + # response = client.put_rest_api( + # restApiId='string', + # mode='merge'|'overwrite', + # failOnWarnings=True|False, + # parameters={ + # 'string': 'string' + # }, + # body=b'bytes'|file + # ) + pass + def validate_yaml(self, yaml_file): - """ Validate YAML file if it is valid for using OpenAPI Spec v2""" - try: - spec_dict, spec_url = read_from_filename(yaml_file) - validate_v2_spec(spec_dict) - except (OSError, AttributeError) as err: - return {'errors': str(err)} - except OpenAPIValidationError as err: - return self._iterate_validator_errors(spec_dict) - return None - - def _iterate_validator_errors(self, spec_dict): - """ Iterates through list of errors that the `openapi_spec_validator` returned - - This method was implemented due to `openapi_spec_validator` design - that if an error happened while iterating through the YAML file - it returns a Python error. - - Args: - spec_dict (dict, required): - `spec_dict` generated from `openapi_spec_validator.readers.read_from_filename` - Returns: - list of errors - """ - try: - errors = [{err.message: err.json_path} - for err in openapi_v2_spec_validator.iter_errors(spec_dict)] - return errors - except (OSError, AttributeError) as err: - return str(err) - - def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): - """ Creates an API for AWS API Gateway from a YAML swagger file - - Args: - yaml_file (yaml file, required): - The OpenAPI swagger file to create API from - fail_on_warnings (bool, optional): - Specifies if the method will error on warnings. Default: `True` - """ - _yaml_valid = self.validate_yaml(yaml_file) - if _yaml_valid: - return { - "error": 'Bad Request. YAML is not valid.', - "yaml_err": _yaml_valid - } - - try: - if not type(yaml_file) == 'string': - with open(yaml_file, 'r') as file: - yaml_file = file.read() - response = self.client.import_rest_api( - failOnWarnings=fail_on_warnings, - body=yaml_file) - - if response: - write_to_config({'api': response}) - return { - 'success': True, - 'response': response - } - except (ClientError, NoCredentialsError) as err: - return { - 'error': str(err) - } - - def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnings=True): - """ Updates rest API using yaml file - - Args: - rest_api_id (string, required): - ID of the API for updating, if not provided, use API ID from `pfunk.json` - yaml_file (yaml file, required): - The OpenAPI swagger file to create API from - mode (string, required): - Mode of update, choice=['merge', 'overwrite'] - fail_on_warnings (bool, optional): - Specifies if the method will error on warnings. Default: `True` - """ - _yaml_valid = self.validate_yaml(yaml_file) - if _yaml_valid: - return { - "error": 'Bad Request. YAML is not valid.', - "yaml_err": _yaml_valid - } - - try: - if not type(yaml_file) == 'string': - with open(yaml_file, 'r') as file: - yaml_file = file.read() - # Acquire REST API ID from config file if not provided - if not rest_api_id: - data = read_from_config_file() - if data.get('api'): - rest_api_id = (data.get('api') - .get('id')) - - response = self.client.put_rest_api( - restApiId=rest_api_id, - mode=mode, - failOnWarnings=fail_on_warnings, - body=yaml_file - ) + """ Validate YAML file if it is valid for using """ + pass - if response: - return { - 'success': True, - 'response': response - } - except (ClientError, NoCredentialsError) as err: - return { - 'error': str(err) - } + def \ No newline at end of file From 9d9b0ac5439118db4be4f5a127b0a9a733bc968f Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 5 Apr 2022 15:11:41 +0800 Subject: [PATCH 062/214] Added skeleton tests for aws features --- pfunk/tests/test_aws.py | 64 ++++++----------------------------------- 1 file changed, 8 insertions(+), 56 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index d28c852..c3cdc45 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -1,68 +1,20 @@ -import os import unittest -import tempfile -from unittest import mock from pfunk.utils.aws import ApiGateway -from pfunk.tests import User, Group, Person, Sport +from pfunk.tests import User, Group from pfunk.project import Project class ApiGatewayTests(unittest.TestCase): - """ Unit tests for creation of API from Swagger file - - Note that the unittests uses mocked boto3 normally. If - you want to test against a real endpoint, remove the - patch decorator at `setUpClass` and the `mocked` - param. Also make sure you have the required - env vars for AWS credentials and you have - the json config in the current env. - """ - @classmethod - @mock.patch('boto3.client') - def setUpClass(cls, mocked) -> None: - cls.project = Project() - cls.aws_client = ApiGateway() - cls.project.add_resources([Person, Sport, Group, User]) - - swagger = cls.project.generate_swagger() - cls.swagger_dir = swagger['dir'] - cls.swagger_file = swagger['swagger_file'] + def setUp(self) -> None: + self.project = Project() def test_validate_yaml(self): - result = self.aws_client.validate_yaml(self.swagger_dir) - self.assertIsNone(result) # if there are no errors, then spec is valid - - def test_validate_wrong_yaml(self): - result = self.aws_client.validate_yaml('wrong yaml...33::39') - # if there are returned objs, there is an error - self.assertIsNotNone(result) - - @mock.patch('boto3.client') - def test_create_api_from_yaml(self, mocked): - result = self.aws_client.create_api_from_yaml( - yaml_file=self.swagger_dir) - self.assertTrue(result['success']) - - @mock.patch('boto3.client') - def test_create_api_from_wrong_yaml(self, mocked): - with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: - tmp.seek(0) - tmp.write('test wrong yaml') - result = self.aws_client.create_api_from_yaml(tmp.name) - self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') + pass - @mock.patch('boto3.client') - def test_update_api_from_yaml(self, mocked): - result = self.aws_client.update_api_from_yaml( - yaml_file=self.swagger_dir, mode='merge') - self.assertTrue(result['success']) + def test_create_api_from_yaml(self): + pass - @mock.patch('boto3.client') - def test_update_api_from_wrong_yaml(self, mocked): - with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: - tmp.seek(0) - tmp.write('test wrong yaml') - result = self.aws_client.update_api_from_yaml(tmp.name, mode='merge') - self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') + def test_update_api_from_yaml(self): + pass From 1d767add81be0d267ec4d15c23d50c1489fbe9d7 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 7 Apr 2022 15:29:22 +0800 Subject: [PATCH 063/214] Added yaml validator and have it return specific errors --- pfunk/tests/test_aws.py | 34 +++++++++++++++++++++++++++------ pfunk/utils/aws.py | 21 +++++++++++++++------ pfunk/utils/swagger.py | 1 + poetry.lock | 42 ++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 2 +- 5 files changed, 86 insertions(+), 14 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index c3cdc45..a6b1314 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -1,20 +1,42 @@ import unittest +from unittest import mock from pfunk.utils.aws import ApiGateway -from pfunk.tests import User, Group +from pfunk.tests import User, Group, Person, Sport from pfunk.project import Project class ApiGatewayTests(unittest.TestCase): - def setUp(self) -> None: - self.project = Project() + @classmethod + def setUpCls(cls) -> None: + cls.project = Project() + cls.aws_client = ApiGateway() + cls.project.add_resources([Person, Sport, Group, User]) + cls.api_yaml = cls.project.generate_swagger() def test_validate_yaml(self): - pass + result = self.aws_client.validate_yaml(self.api_yaml) + self.assertIsNone(result) # if there are no errors, then spec is valid + def test_validate_wrong_yaml(self): + result = self.aws_client.validate_yaml('wrong yaml...33::39') + self.assertIsNotNone(result) # if there are returned objs, there is an error + + @mock.patch('boto3.client') def test_create_api_from_yaml(self): - pass + result = self.aws_client.create_api_from_yaml() + self.assertTrue(result['success']) + @mock.patch('boto3.client') def test_update_api_from_yaml(self): - pass + result = self.aws_client.create_api_from_yaml() + self.assertTrue(result['success']) + + def test_create_api_from_wrong_yaml(self): + result = self.aws_client.create_api_from_yaml('wrong yaml...21320:: asdkas') + self.assertEqual(result, 'Bad Request. YAML is not valid.') + + def test_update_api_from_wrong_yaml(self): + result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') + self.assertEqual(result, 'Bad Request. YAML is not valid.') \ No newline at end of file diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 7413120..4c61506 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,5 +1,9 @@ import boto3 import swaggyp as sw +from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator +from openapi_spec_validator.readers import read_from_filename +from openapi_spec_validator.exceptions import OpenAPIValidationError + class ApiGateway(object): @@ -7,6 +11,17 @@ def __init__(self): self.client = boto3.client('apigateway') pass + def validate_yaml(self, yaml_file): + """ Validate YAML file if it is valid for using OpenAPI Spec v2""" + try: + spec_dict, spec_url = read_from_filename(yaml_file) + validate_v2_spec(spec_dict) + except OpenAPIValidationError as err: + errors = [{err.message: err.json_path} + for err in openapi_v2_spec_validator.iter_errors(spec_dict)] + return errors + return None + def create_api_from_yaml(self, yaml_file): # response = client.import_rest_api( # failOnWarnings=True|False, @@ -28,9 +43,3 @@ def update_api_from_yaml(self, yaml_file): # body=b'bytes'|file # ) pass - - def validate_yaml(self, yaml_file): - """ Validate YAML file if it is valid for using """ - pass - - def \ No newline at end of file diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 2820efe..adcbe9e 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -135,6 +135,7 @@ def write_to_yaml(self, dir=''): if not os.path.exists(f'{dir}swagger.yaml'): with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) + return t.to_yaml() else: print( 'There is an existing swagger file. Kindly move/delete it to generate a new one.') diff --git a/poetry.lock b/poetry.lock index 32dcf5f..81c654d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -795,6 +795,38 @@ PyYAML = ">=5.1" [package.extras] requests = ["requests"] +[[package]] +name = "openapi-schema-validator" +version = "0.2.3" +description = "OpenAPI schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.0.0,<5.0.0" + +[package.extras] +rfc3339-validator = ["rfc3339-validator"] +strict-rfc3339 = ["strict-rfc3339"] +isodate = ["isodate"] + +[[package]] +name = "openapi-spec-validator" +version = "0.4.0" +description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3.0 spec validator" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.2.0,<5.0.0" +openapi-schema-validator = ">=0.2.0,<0.3.0" +PyYAML = ">=5.1" + +[package.extras] +requests = ["requests"] + [[package]] name = "packaging" version = "21.3" @@ -1310,7 +1342,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "b98fdaeac6227f48d169512de02fc0155103f626a8e7c5d074c25fef2bef9cd5" +content-hash = "6b06fa36eba0e2e7e8d19a1efecff127af145281ccb3d10c02e1baebb062d147" [metadata.files] appnope = [ @@ -1739,6 +1771,14 @@ openapi-spec-validator = [ {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, ] +openapi-schema-validator = [ + {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, + {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, +] +openapi-spec-validator = [ + {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, + {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, +] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, diff --git a/pyproject.toml b/pyproject.toml index 5aaa2ab..46062d0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,8 +25,8 @@ graphql-py = "^0.8.1" sammy = "^0.4.3" stripe = "^2.61.0" bleach = "^4.1.0" +swaggyp = "^0.2.0" openapi-spec-validator = "^0.4.0" -swaggyp = "^0.3.0" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From eb0cf783901e5c36372b74c710bc093ad972cd85 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 11 Apr 2022 15:54:06 +0800 Subject: [PATCH 064/214] Made yaml validation to properly separate openapi errors and python errors. Refactored swagger.py to return the swagger file directory --- pfunk/tests/test_aws.py | 46 +++++++++++++++---------- pfunk/utils/aws.py | 74 ++++++++++++++++++++++++++++++++++------- pfunk/utils/swagger.py | 12 +++---- 3 files changed, 96 insertions(+), 36 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index a6b1314..6ec5841 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -1,4 +1,6 @@ +import os import unittest +import tempfile from unittest import mock from pfunk.utils.aws import ApiGateway @@ -9,34 +11,44 @@ class ApiGatewayTests(unittest.TestCase): @classmethod - def setUpCls(cls) -> None: + def setUpClass(cls) -> None: cls.project = Project() cls.aws_client = ApiGateway() cls.project.add_resources([Person, Sport, Group, User]) - cls.api_yaml = cls.project.generate_swagger() + + swagger = cls.project.generate_swagger() + cls.swagger_dir = swagger['dir'] + cls.swagger_file = swagger['swagger_file'] + print(cls.swagger_dir) def test_validate_yaml(self): - result = self.aws_client.validate_yaml(self.api_yaml) + result = self.aws_client.validate_yaml(self.swagger_dir) self.assertIsNone(result) # if there are no errors, then spec is valid def test_validate_wrong_yaml(self): result = self.aws_client.validate_yaml('wrong yaml...33::39') - self.assertIsNotNone(result) # if there are returned objs, there is an error + # if there are returned objs, there is an error + self.assertIsNotNone(result) @mock.patch('boto3.client') - def test_create_api_from_yaml(self): - result = self.aws_client.create_api_from_yaml() + def test_create_api_from_yaml(self, mocked): + result = self.aws_client.create_api_from_yaml( + yaml_file=self.swagger_dir) self.assertTrue(result['success']) @mock.patch('boto3.client') - def test_update_api_from_yaml(self): - result = self.aws_client.create_api_from_yaml() - self.assertTrue(result['success']) - - def test_create_api_from_wrong_yaml(self): - result = self.aws_client.create_api_from_yaml('wrong yaml...21320:: asdkas') - self.assertEqual(result, 'Bad Request. YAML is not valid.') - - def test_update_api_from_wrong_yaml(self): - result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') - self.assertEqual(result, 'Bad Request. YAML is not valid.') \ No newline at end of file + def test_create_api_from_wrong_yaml(self, mocked): + with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: + tmp.seek(0) + tmp.write('test wrong yaml') + result = self.aws_client.create_api_from_yaml(tmp.name) + self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') + + # @mock.patch('boto3.client') + # def test_update_api_from_yaml(self): + # result = self.aws_client.update_api_from_yaml(yaml_file=self.api_yaml) + # self.assertTrue(result['success']) + + # def test_update_api_from_wrong_yaml(self): + # result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') + # self.assertEqual(result, 'Bad Request. YAML is not valid.') diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 4c61506..b1c26c0 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,36 +1,86 @@ import boto3 import swaggyp as sw +# from botocore.exceptions import BadReq +from envs import env from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator from openapi_spec_validator.readers import read_from_filename from openapi_spec_validator.exceptions import OpenAPIValidationError class ApiGateway(object): + region_name = env('SES_REGION_NAME', 'us-east-1') def __init__(self): - self.client = boto3.client('apigateway') - pass + self.client = boto3.client('apigateway', region_name=self.region_name) def validate_yaml(self, yaml_file): """ Validate YAML file if it is valid for using OpenAPI Spec v2""" try: spec_dict, spec_url = read_from_filename(yaml_file) validate_v2_spec(spec_dict) + except (OSError, AttributeError) as err: + return {'errors': str(err)} except OpenAPIValidationError as err: + return self._iterate_validator_errors(spec_dict) + return None + + def _iterate_validator_errors(self, spec_dict): + """ Iterates through list of errors that the `openapi_spec_validator` returned + + This method was implemented due to `openapi_spec_validator` design + that if an error happened while iterating through the YAML file + it returns a Python error. + + Args: + spec_dict (dict, required): + `spec_dict` generated from `openapi_spec_validator.readers.read_from_filename` + Returns: + list of errors + """ + try: errors = [{err.message: err.json_path} for err in openapi_v2_spec_validator.iter_errors(spec_dict)] return errors - return None + except (OSError, AttributeError) as err: + return str(err) - def create_api_from_yaml(self, yaml_file): - # response = client.import_rest_api( - # failOnWarnings=True|False, - # parameters={ - # 'string': 'string' - # }, - # body=b'bytes'|file - # ) - pass + def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): + """ Creates an API for AWS API Gateway from a YAML swagger file + + Args: + yaml_file (yaml file, required): + The OpenAPI swagger file to create API from + fail_on_warnings (bool, optional): + Specifies if the method will error on warnings. Default: `True` + """ + _yaml_valid = self.validate_yaml(yaml_file) + if _yaml_valid: + return { + "error": 'Bad Request. YAML is not valid.', + "yaml_err": _yaml_valid + } + + try: + if not type(yaml_file) == 'string': + with open(yaml_file, 'r') as file: + response = self.client.import_rest_api( + failOnWarnings=fail_on_warnings, + body=file + ) + else: + response = self.client.import_rest_api( + failOnWarnings=fail_on_warnings, + body=yaml_file + ) + + if response: + return { + 'success': True, + response: response + } + # TODO: Specify boto exceptions + except Exception as err: + return err def update_api_from_yaml(self, yaml_file): # response = client.put_rest_api( diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index adcbe9e..01d1224 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -132,16 +132,14 @@ def write_to_yaml(self, dir=''): schemes=schemes, definitions=self.definitions) - if not os.path.exists(f'{dir}swagger.yaml'): - with open(f'{dir}swagger.yaml', 'x') as swag_doc: + if not os.path.exists(f'{dir}/swagger.yaml'): + with open(f'{dir}/swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) - return t.to_yaml() else: - print( - 'There is an existing swagger file. Kindly move/delete it to generate a new one.') + print('There is an existing swagger file. Kindly move/delete it to generate a new one.') # print(t.to_yaml()) return { - "dir": f'{dir}swagger.yaml', + "dir": f'{dir}/swagger.yaml', "swagger_file": t.to_yaml() } @@ -320,4 +318,4 @@ def generate_swagger(self, dir=''): col = i() self.get_operations(col) self.get_model_definitions(col) - return self.write_to_yaml(dir) + return self.write_to_yaml(dir) \ No newline at end of file From 0794d4793c3717b7d1ce85681bc1c68c7439fde1 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 11 Apr 2022 15:55:52 +0800 Subject: [PATCH 065/214] Refactored default dir of swagger file. Refactored unit tests for AWS utils --- pfunk/tests/test_aws.py | 1 - pfunk/utils/swagger.py | 6 +++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index 6ec5841..571f3c1 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -19,7 +19,6 @@ def setUpClass(cls) -> None: swagger = cls.project.generate_swagger() cls.swagger_dir = swagger['dir'] cls.swagger_file = swagger['swagger_file'] - print(cls.swagger_dir) def test_validate_yaml(self): result = self.aws_client.validate_yaml(self.swagger_dir) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 01d1224..3c04ca3 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -132,14 +132,14 @@ def write_to_yaml(self, dir=''): schemes=schemes, definitions=self.definitions) - if not os.path.exists(f'{dir}/swagger.yaml'): - with open(f'{dir}/swagger.yaml', 'x') as swag_doc: + if not os.path.exists(f'{dir}swagger.yaml'): + with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) else: print('There is an existing swagger file. Kindly move/delete it to generate a new one.') # print(t.to_yaml()) return { - "dir": f'{dir}/swagger.yaml', + "dir": f'{dir}swagger.yaml', "swagger_file": t.to_yaml() } From d976d4ee2c3d4ab021d0cf787ebfdfedf2c74308 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 12 Apr 2022 16:15:50 +0800 Subject: [PATCH 066/214] Finished create/update api from yaml. Added writing to config file if API is created. Added reading from config file if API is to be updated --- pfunk/tests/test_aws.py | 23 +++++--- pfunk/utils/aws.py | 128 ++++++++++++++++++++++++++++++++-------- 2 files changed, 119 insertions(+), 32 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index 571f3c1..2789dc2 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -11,7 +11,8 @@ class ApiGatewayTests(unittest.TestCase): @classmethod - def setUpClass(cls) -> None: + @mock.patch('boto3.client') + def setUpClass(cls, mocked) -> None: cls.project = Project() cls.aws_client = ApiGateway() cls.project.add_resources([Person, Sport, Group, User]) @@ -43,11 +44,17 @@ def test_create_api_from_wrong_yaml(self, mocked): result = self.aws_client.create_api_from_yaml(tmp.name) self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') - # @mock.patch('boto3.client') - # def test_update_api_from_yaml(self): - # result = self.aws_client.update_api_from_yaml(yaml_file=self.api_yaml) - # self.assertTrue(result['success']) + @mock.patch('boto3.client') + def test_update_api_from_yaml(self, mocked): + result = self.aws_client.update_api_from_yaml( + yaml_file=self.swagger_dir, mode='merge') + print(result) + self.assertTrue(result['success']) - # def test_update_api_from_wrong_yaml(self): - # result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') - # self.assertEqual(result, 'Bad Request. YAML is not valid.') + @mock.patch('boto3.client') + def test_update_api_from_wrong_yaml(self, mocked): + with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: + tmp.seek(0) + tmp.write('test wrong yaml') + result = self.aws_client.update_api_from_yaml(tmp.name, mode='merge') + self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index b1c26c0..7427db3 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,4 +1,6 @@ +import datetime import boto3 +import json import swaggyp as sw # from botocore.exceptions import BadReq from envs import env @@ -6,12 +8,54 @@ from openapi_spec_validator.readers import read_from_filename from openapi_spec_validator.exceptions import OpenAPIValidationError +AWS_ACCESS_KEY = env('AWS_ACCESS_KEY') +AWS_SECRET_ACCESS_KEY = env('AWS_SECRET_ACCESS_KEY') +AWS_DEFAULT_REGION = env('AWS_DEFAULT_REGION') + + +def _json_dt_helper(o): + """ Helps serializing `datetime` objects to a readable string """ + if isinstance(o, (datetime.date, datetime.datetime)): + return o.isoformat() + + +def write_to_config(obj, config_file_dir='pfunk.json'): + """ Writes to pfunk config file + + Args: + obj (dict, required): + key, value pairs to write to json file + config_file_dir (str, optional): + directory of the config json file, default='pfunk.json' + Returns: + config_file (dict, required): + the current value of config file (pfunk.json) + """ + with open(config_file_dir, 'r+') as f: + data = json.load(f) + data.update(obj) + f.seek(0) + f.truncate() + json.dump(data, f, indent=4, sort_keys=True, default=_json_dt_helper) + return data + + +def read_from_config_file(config_file_dir='pfunk.json'): + """ Returns data from config file in dict form """ + with open(config_file_dir, 'r') as f: + data = json.load(f) + return data + class ApiGateway(object): region_name = env('SES_REGION_NAME', 'us-east-1') def __init__(self): - self.client = boto3.client('apigateway', region_name=self.region_name) + self.client = boto3.client( + 'apigateway', + aws_access_key_id=AWS_ACCESS_KEY, + aws_secret_access_key=AWS_SECRET_ACCESS_KEY, + region_name=AWS_DEFAULT_REGION) def validate_yaml(self, yaml_file): """ Validate YAML file if it is valid for using OpenAPI Spec v2""" @@ -63,33 +107,69 @@ def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): try: if not type(yaml_file) == 'string': with open(yaml_file, 'r') as file: - response = self.client.import_rest_api( - failOnWarnings=fail_on_warnings, - body=file - ) - else: - response = self.client.import_rest_api( - failOnWarnings=fail_on_warnings, - body=yaml_file - ) + yaml_file = file.read() + response = self.client.import_rest_api( + failOnWarnings=fail_on_warnings, + body=yaml_file) + + # TODO: Fix -- if using mocked obj, don't write anything + if response: + write_to_config({'api': response}) + return { + 'success': True, + 'response': response + } + # TODO: Specify boto exceptions + except Exception as err: + return { + 'error': str(err) + } + + def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnings=True): + """ Updates rest API using yaml file + + Args: + rest_api_id (string, required): + ID of the API for updating, if not provided, use API ID from `pfunk.json` + yaml_file (yaml file, required): + The OpenAPI swagger file to create API from + mode (string, required): + Mode of update, choice=['merge', 'overwrite'] + fail_on_warnings (bool, optional): + Specifies if the method will error on warnings. Default: `True` + """ + _yaml_valid = self.validate_yaml(yaml_file) + if _yaml_valid: + return { + "error": 'Bad Request. YAML is not valid.', + "yaml_err": _yaml_valid + } + + try: + if not type(yaml_file) == 'string': + with open(yaml_file, 'r') as file: + yaml_file = file.read() + # Acquire REST API ID from config file if not provided + if not rest_api_id: + data = read_from_config_file() + if data.get('api'): + rest_api_id = (data.get('api') + .get('id')) + + response = self.client.put_rest_api( + restApiId=rest_api_id, + mode=mode, + failOnWarnings=fail_on_warnings, + body=yaml_file + ) if response: return { 'success': True, - response: response + 'response': response } # TODO: Specify boto exceptions except Exception as err: - return err - - def update_api_from_yaml(self, yaml_file): - # response = client.put_rest_api( - # restApiId='string', - # mode='merge'|'overwrite', - # failOnWarnings=True|False, - # parameters={ - # 'string': 'string' - # }, - # body=b'bytes'|file - # ) - pass + return { + 'error': str(err) + } From b510112c8e05ec8ac5a59f69e2fe2d1497058881 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 13 Apr 2022 13:44:30 +0800 Subject: [PATCH 067/214] Reworked what exceptions does aws util react to. Added doc for aws util tests --- pfunk/tests/test_aws.py | 10 +++++++++- pfunk/utils/aws.py | 15 ++++++--------- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index 2789dc2..d28c852 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -9,6 +9,15 @@ class ApiGatewayTests(unittest.TestCase): + """ Unit tests for creation of API from Swagger file + + Note that the unittests uses mocked boto3 normally. If + you want to test against a real endpoint, remove the + patch decorator at `setUpClass` and the `mocked` + param. Also make sure you have the required + env vars for AWS credentials and you have + the json config in the current env. + """ @classmethod @mock.patch('boto3.client') @@ -48,7 +57,6 @@ def test_create_api_from_wrong_yaml(self, mocked): def test_update_api_from_yaml(self, mocked): result = self.aws_client.update_api_from_yaml( yaml_file=self.swagger_dir, mode='merge') - print(result) self.assertTrue(result['success']) @mock.patch('boto3.client') diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 7427db3..13164f8 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -2,7 +2,7 @@ import boto3 import json import swaggyp as sw -# from botocore.exceptions import BadReq +from botocore.exceptions import ClientError, NoCredentialsError from envs import env from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator from openapi_spec_validator.readers import read_from_filename @@ -20,7 +20,7 @@ def _json_dt_helper(o): def write_to_config(obj, config_file_dir='pfunk.json'): - """ Writes to pfunk config file + """ Appends object to pfunk config file Args: obj (dict, required): @@ -111,16 +111,14 @@ def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): response = self.client.import_rest_api( failOnWarnings=fail_on_warnings, body=yaml_file) - - # TODO: Fix -- if using mocked obj, don't write anything + if response: write_to_config({'api': response}) return { 'success': True, 'response': response } - # TODO: Specify boto exceptions - except Exception as err: + except (ClientError, NoCredentialsError) as err: return { 'error': str(err) } @@ -154,7 +152,7 @@ def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnin data = read_from_config_file() if data.get('api'): rest_api_id = (data.get('api') - .get('id')) + .get('id')) response = self.client.put_rest_api( restApiId=rest_api_id, @@ -168,8 +166,7 @@ def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnin 'success': True, 'response': response } - # TODO: Specify boto exceptions - except Exception as err: + except (ClientError, NoCredentialsError) as err: return { 'error': str(err) } From 91e7bb3b477aa344f2ff6927b3312dae5cdf1c62 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 27 Apr 2022 14:41:51 +0800 Subject: [PATCH 068/214] Did cleaning up of swaggerdoc docstrings --- pfunk/utils/swagger.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 3c04ca3..778c981 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -64,9 +64,6 @@ def __init__(self, collections, rules=[], config_file='pfunk.json'): array of collection of the project to generate models from rules ([`werkzeug.routing.Rule`]): array of additional URLs that the given collection doesn't have - config_file (str, optional): - directory of the config_file - Returns: swagger.yaml (yaml, required): Generated YAML file From c68975deed4d5f1eb2ec6a7d3d8c73965099de17 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 4 May 2022 14:52:41 +0800 Subject: [PATCH 069/214] added generate-swagger as a cli command, creates a swagger file from the provided json config file of a project --- pfunk/utils/swagger.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 778c981..2820efe 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -64,6 +64,9 @@ def __init__(self, collections, rules=[], config_file='pfunk.json'): array of collection of the project to generate models from rules ([`werkzeug.routing.Rule`]): array of additional URLs that the given collection doesn't have + config_file (str, optional): + directory of the config_file + Returns: swagger.yaml (yaml, required): Generated YAML file @@ -133,7 +136,8 @@ def write_to_yaml(self, dir=''): with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) else: - print('There is an existing swagger file. Kindly move/delete it to generate a new one.') + print( + 'There is an existing swagger file. Kindly move/delete it to generate a new one.') # print(t.to_yaml()) return { "dir": f'{dir}swagger.yaml', @@ -315,4 +319,4 @@ def generate_swagger(self, dir=''): col = i() self.get_operations(col) self.get_model_definitions(col) - return self.write_to_yaml(dir) \ No newline at end of file + return self.write_to_yaml(dir) From 40983c20f27456a14b28319f433eb17cb53aed9e Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 2 Jun 2022 15:11:30 +0800 Subject: [PATCH 070/214] added support for splitting the docs to get params of the view --- pfunk/tests/test_project.py | 36 +++++++++++++++--------------- pfunk/utils/swagger.py | 44 +++++++++++++++++-------------------- 2 files changed, 38 insertions(+), 42 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index 858dcc3..3f1a43c 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -34,24 +34,24 @@ def tearDown(self) -> None: except FileNotFoundError: pass - def test_add_resource(self): - self.project.add_resource(Person) - self.project.add_resource(Person) - # Test that no duplicates are there - self.assertEqual(self.project.collections, set([Person])) - - def test_add_resources(self): - self.project.add_resources([Person, Sport]) - self.assertEqual(self.project.collections, set([Person, Sport])) - - def test_render(self): - self.project.add_resources([Person, Sport]) - gql = self.project.render() - self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - self.assertTrue('enum gender_pronouns' in gql) - self.assertTrue('type Person' in gql) - self.assertTrue('type Sport' in gql) - self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + # def test_add_resource(self): + # self.project.add_resource(Person) + # self.project.add_resource(Person) + # # Test that no duplicates are there + # self.assertEqual(self.project.collections, set([Person])) + + # def test_add_resources(self): + # self.project.add_resources([Person, Sport]) + # self.assertEqual(self.project.collections, set([Person, Sport])) + + # def test_render(self): + # self.project.add_resources([Person, Sport]) + # gql = self.project.render() + # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + # self.assertTrue('enum gender_pronouns' in gql) + # self.assertTrue('type Person' in gql) + # self.assertTrue('type Sport' in gql) + # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): self.project.add_resources([Person, Sport, Group, User]) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 2820efe..eae74e0 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -211,42 +211,38 @@ def get_operations(self, col: Collection): ) params.append(path_params) - # Acquire payload of the view from the View's `_payload_docs` - view_payload = view(col)._payload_docs() + # Acquire payload of the view from the View's docstring + # where to cut the docstrings to use the definition for the payload of the view + oas_str_split = '[OAS]\n' + view_docs = view.__doc__ + view_payload = None + if (view_docs and len(view_docs.split(oas_str_split)) > 1): + view_payload = view_docs.split(oas_str_split)[1] # Construct payload for swagger generation if view_payload: - for field in view_payload.get('data'): - if field.get('schema'): - schema = sw.SwagSchema(ref=field.get('schema')) - param = sw.Parameter( - name=field.get('name'), - _in=field.get('in'), - description=field.get('description'), - required=field.get('required'), - schema=schema - ) - else: - param = sw.Parameter( - name=field.get('name'), - _type=field.get('type'), - _in=field.get('in'), - description=field.get('description'), - required=field.get('required'), - allowEmptyValue=False - ) + for field in json.loads(view_payload).get('data'): + param = sw.Parameter( + name=field.get('name'), + _type=field.get('type'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + allowEmptyValue=False + ) params.append(param) + docs_description = view_docs if not len(view_docs.split( + oas_str_split)) > 1 else view_docs.split(oas_str_split)[0] consumes = ['application/json', 'application/x-www-form-urlencoded'] produces = ['application/json', 'application/x-www-form-urlencoded'] - view_docs = view.__doc__ if params: op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=view_docs, + description=docs_description, responses=responses, consumes=consumes, produces=produces, @@ -255,7 +251,7 @@ def get_operations(self, col: Collection): op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=view_docs, + description=docs_description, responses=responses, consumes=consumes, produces=produces) From 506cd8561e97b2b601ace512aaa4bf5e9c1da0ab Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 2 Jun 2022 15:30:35 +0800 Subject: [PATCH 071/214] switch acquiring of swagger docs for payload from docstrings to function return instead --- pfunk/utils/swagger.py | 20 ++++++++---------- pfunk/web/views/json.py | 45 +++++++++++++---------------------------- 2 files changed, 22 insertions(+), 43 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index eae74e0..e952883 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -163,6 +163,7 @@ def get_operations(self, col: Collection): ``` """ for view in col.collection_views: + v = view() route = view.url(col) rule = route.rule methods = route.methods @@ -211,17 +212,13 @@ def get_operations(self, col: Collection): ) params.append(path_params) - # Acquire payload of the view from the View's docstring - # where to cut the docstrings to use the definition for the payload of the view - oas_str_split = '[OAS]\n' - view_docs = view.__doc__ - view_payload = None - if (view_docs and len(view_docs.split(oas_str_split)) > 1): - view_payload = view_docs.split(oas_str_split)[1] + # Acquire payload of the view from the View's `_payload_docs` + view_payload = view()._payload_docs() # Construct payload for swagger generation + # TODO: support referencing of models if view_payload: - for field in json.loads(view_payload).get('data'): + for field in view_payload.get('data'): param = sw.Parameter( name=field.get('name'), _type=field.get('type'), @@ -232,17 +229,16 @@ def get_operations(self, col: Collection): ) params.append(param) - docs_description = view_docs if not len(view_docs.split( - oas_str_split)) > 1 else view_docs.split(oas_str_split)[0] consumes = ['application/json', 'application/x-www-form-urlencoded'] produces = ['application/json', 'application/x-www-form-urlencoded'] + view_docs = view.__doc__ if params: op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=docs_description, + description=view_docs, responses=responses, consumes=consumes, produces=produces, @@ -251,7 +247,7 @@ def get_operations(self, col: Collection): op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=docs_description, + description=view_docs, responses=responses, consumes=consumes, produces=produces) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 791af54..244bd89 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -26,42 +26,25 @@ def get_response(self): ) def _payload_docs(self): - """ Used in custom defining payload parameters for the view in Swagger generation. + """ Used in defining payload parameters for the view. - Should return a dict that has the fields of a swagger parameter. - If there is an error in the swagger, it will not be raised. - Usage of `https://editor.swagger.io` to validate is recommended - e.g. - ``` - # Defining formdata - {"data": [ - { - "name":"name", - "in":"formData", - "description":"name of the pet", - "required": true, - "type": "string" - }, - { - "name": "status", - "in": "formData", - "description": "status of the pet", - "required":true, - "type":"string" - } - ]} - - # Defining a payload that references a model + Should return a dict that has the fields of a swagger parameter e.g. {"data": [ { - "name": "body", - "in": "body", - "description": "Collection object to add", - "required": True, - "schema": "#/definitions/Person" + "name":"name", + "in":"formData", + "description":"name of the pet", + "required": true, + "type": "string" + }, + { + "name": "status", + "in": "formData", + "description": "status of the pet", + "required":true, + "type":"string" } ]} - ``` """ return {} From d30cd152e5b2fceacc55af63d4f543b23e000066 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:09:34 +0800 Subject: [PATCH 072/214] added ability to also detect model references from _payload_docs function --- pfunk/tests/test_project.py | 36 ++++++++++++++++++------------------ pfunk/utils/swagger.py | 29 ++++++++++++++++++++--------- pfunk/web/views/json.py | 14 +++++++------- 3 files changed, 45 insertions(+), 34 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index 3f1a43c..858dcc3 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -34,24 +34,24 @@ def tearDown(self) -> None: except FileNotFoundError: pass - # def test_add_resource(self): - # self.project.add_resource(Person) - # self.project.add_resource(Person) - # # Test that no duplicates are there - # self.assertEqual(self.project.collections, set([Person])) - - # def test_add_resources(self): - # self.project.add_resources([Person, Sport]) - # self.assertEqual(self.project.collections, set([Person, Sport])) - - # def test_render(self): - # self.project.add_resources([Person, Sport]) - # gql = self.project.render() - # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - # self.assertTrue('enum gender_pronouns' in gql) - # self.assertTrue('type Person' in gql) - # self.assertTrue('type Sport' in gql) - # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + def test_add_resource(self): + self.project.add_resource(Person) + self.project.add_resource(Person) + # Test that no duplicates are there + self.assertEqual(self.project.collections, set([Person])) + + def test_add_resources(self): + self.project.add_resources([Person, Sport]) + self.assertEqual(self.project.collections, set([Person, Sport])) + + def test_render(self): + self.project.add_resources([Person, Sport]) + gql = self.project.render() + self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + self.assertTrue('enum gender_pronouns' in gql) + self.assertTrue('type Person' in gql) + self.assertTrue('type Sport' in gql) + self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): self.project.add_resources([Person, Sport, Group, User]) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index e952883..c6fe21f 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -216,17 +216,28 @@ def get_operations(self, col: Collection): view_payload = view()._payload_docs() # Construct payload for swagger generation - # TODO: support referencing of models if view_payload: for field in view_payload.get('data'): - param = sw.Parameter( - name=field.get('name'), - _type=field.get('type'), - _in=field.get('in'), - description=field.get('description'), - required=field.get('required'), - allowEmptyValue=False - ) + if field.get('schema'): + schema = sw.SwagSchema( + ref=field.get('schema') + ) + param = sw.Parameter( + name=field.get('name'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + schema=schema + ) + else: + param = sw.Parameter( + name=field.get('name'), + _type=field.get('type'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + allowEmptyValue=False + ) params.append(param) consumes = ['application/json', diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 244bd89..a8447e3 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -26,7 +26,7 @@ def get_response(self): ) def _payload_docs(self): - """ Used in defining payload parameters for the view. + """ Used in custom defining payload parameters for the view. Should return a dict that has the fields of a swagger parameter e.g. {"data": [ @@ -34,15 +34,15 @@ def _payload_docs(self): "name":"name", "in":"formData", "description":"name of the pet", - "required": true, + "required": True, "type": "string" }, { - "name": "status", - "in": "formData", - "description": "status of the pet", - "required":true, - "type":"string" + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": "#/definitions/Person" } ]} """ From 835881225929d7ae5629feaca18cd65eeb7e74af Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:18:08 +0800 Subject: [PATCH 073/214] Updated the docstrings of _payload_docs --- pfunk/utils/swagger.py | 4 +-- pfunk/web/views/json.py | 60 ++++++++++++++++++++++++++++------------- 2 files changed, 42 insertions(+), 22 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index c6fe21f..33c393b 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -219,9 +219,7 @@ def get_operations(self, col: Collection): if view_payload: for field in view_payload.get('data'): if field.get('schema'): - schema = sw.SwagSchema( - ref=field.get('schema') - ) + schema = sw.SwagSchema(ref=field.get('schema')) param = sw.Parameter( name=field.get('name'), _in=field.get('in'), diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index a8447e3..d620469 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -28,15 +28,31 @@ def get_response(self): def _payload_docs(self): """ Used in custom defining payload parameters for the view. - Should return a dict that has the fields of a swagger parameter e.g. + Should return a dict that has the fields of a swagger parameter. + If there is an error in the swagger, it will not be raised. + Usage of `https://editor.swagger.io` to validate is recommended + e.g. + ``` + # Defining formdata + {"data": [ + { + "name":"name", + "in":"formData", + "description":"name of the pet", + "required": true, + "type": "string" + }, + { + "name": "status", + "in": "formData", + "description": "status of the pet", + "required":true, + "type":"string" + } + ]} + + # Defining a payload that references a model {"data": [ - { - "name":"name", - "in":"formData", - "description":"name of the pet", - "required": True, - "type": "string" - }, { "name": "body", "in": "body", @@ -45,6 +61,7 @@ def _payload_docs(self): "schema": "#/definitions/Person" } ]} + ``` """ return {} @@ -87,17 +104,22 @@ def get_m2m_kwargs(self, obj): ) def _payload_docs(self): - # Reference the collection by default - if self.collection: - return {"data": [ - { - "name": "body", - "in": "body", - "description": "Collection object to add", - "required": True, - "schema": f"#/definitions/{self.collection.__class__.__name__}" - } - ]} + return {"data": [ + { + "name": "name", + "in": "formData", + "description": "name of the pet", + "required": True, + "type": "string" + }, + { + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": "#/definitions/Person" + } + ]} class UpdateView(UpdateMixin, IDMixin, JSONView): From 3c2e1228deecfeadf2dcc35a5884c2fb39c3702e Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:18:41 +0800 Subject: [PATCH 074/214] Updated the docstrings of _payload_docs --- pfunk/web/views/json.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index d620469..885478b 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -26,7 +26,7 @@ def get_response(self): ) def _payload_docs(self): - """ Used in custom defining payload parameters for the view. + """ Used in custom defining payload parameters for the view in Swagger generation. Should return a dict that has the fields of a swagger parameter. If there is an error in the swagger, it will not be raised. From 0d0b24bbdb06da2ed02220119bbb1e4313ba27da Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 22 Jun 2022 15:19:06 +0800 Subject: [PATCH 075/214] updated swaggyp package --- pfunk/utils/swagger.py | 1 - pfunk/web/views/json.py | 27 +++++++++++---------------- poetry.lock | 2 +- pyproject.toml | 2 +- 4 files changed, 13 insertions(+), 19 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 33c393b..1a27812 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -163,7 +163,6 @@ def get_operations(self, col: Collection): ``` """ for view in col.collection_views: - v = view() route = view.url(col) rule = route.rule methods = route.methods diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 885478b..b7138fb 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -104,22 +104,17 @@ def get_m2m_kwargs(self, obj): ) def _payload_docs(self): - return {"data": [ - { - "name": "name", - "in": "formData", - "description": "name of the pet", - "required": True, - "type": "string" - }, - { - "name": "body", - "in": "body", - "description": "Collection object to add", - "required": True, - "schema": "#/definitions/Person" - } - ]} + # TODO: Get view's collection class name by default + if self.collection: + return {"data": [ + { + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": f"#/definitions/Collection" + } + ]} class UpdateView(UpdateMixin, IDMixin, JSONView): diff --git a/poetry.lock b/poetry.lock index 81c654d..cafe82d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1342,7 +1342,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "6b06fa36eba0e2e7e8d19a1efecff127af145281ccb3d10c02e1baebb062d147" +content-hash = "b98fdaeac6227f48d169512de02fc0155103f626a8e7c5d074c25fef2bef9cd5" [metadata.files] appnope = [ diff --git a/pyproject.toml b/pyproject.toml index 46062d0..5aaa2ab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,8 +25,8 @@ graphql-py = "^0.8.1" sammy = "^0.4.3" stripe = "^2.61.0" bleach = "^4.1.0" -swaggyp = "^0.2.0" openapi-spec-validator = "^0.4.0" +swaggyp = "^0.3.0" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From 39d50b8365a4a851aab4dce042fd356db7e4de1b Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 23 Jun 2022 15:13:06 +0800 Subject: [PATCH 076/214] Fixed calling of view's _payload_docs to pass the correct argument. Made Create and Update views have default reference to model --- pfunk/utils/swagger.py | 2 +- pfunk/web/views/json.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 1a27812..2820efe 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -212,7 +212,7 @@ def get_operations(self, col: Collection): params.append(path_params) # Acquire payload of the view from the View's `_payload_docs` - view_payload = view()._payload_docs() + view_payload = view(col)._payload_docs() # Construct payload for swagger generation if view_payload: diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index b7138fb..791af54 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -104,7 +104,7 @@ def get_m2m_kwargs(self, obj): ) def _payload_docs(self): - # TODO: Get view's collection class name by default + # Reference the collection by default if self.collection: return {"data": [ { @@ -112,7 +112,7 @@ def _payload_docs(self): "in": "body", "description": "Collection object to add", "required": True, - "schema": f"#/definitions/Collection" + "schema": f"#/definitions/{self.collection.__class__.__name__}" } ]} From 198dadc4b5bd863e89785e23f69bdfbaa77af097 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 7 Apr 2022 15:29:22 +0800 Subject: [PATCH 077/214] Added yaml validator and have it return specific errors --- pfunk/utils/swagger.py | 1 + poetry.lock | 40 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 2820efe..adcbe9e 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -135,6 +135,7 @@ def write_to_yaml(self, dir=''): if not os.path.exists(f'{dir}swagger.yaml'): with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) + return t.to_yaml() else: print( 'There is an existing swagger file. Kindly move/delete it to generate a new one.') diff --git a/poetry.lock b/poetry.lock index cafe82d..43564c8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -827,6 +827,38 @@ PyYAML = ">=5.1" [package.extras] requests = ["requests"] +[[package]] +name = "openapi-schema-validator" +version = "0.2.3" +description = "OpenAPI schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.0.0,<5.0.0" + +[package.extras] +rfc3339-validator = ["rfc3339-validator"] +strict-rfc3339 = ["strict-rfc3339"] +isodate = ["isodate"] + +[[package]] +name = "openapi-spec-validator" +version = "0.4.0" +description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3.0 spec validator" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.2.0,<5.0.0" +openapi-schema-validator = ">=0.2.0,<0.3.0" +PyYAML = ">=5.1" + +[package.extras] +requests = ["requests"] + [[package]] name = "packaging" version = "21.3" @@ -1779,6 +1811,14 @@ openapi-spec-validator = [ {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, ] +openapi-schema-validator = [ + {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, + {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, +] +openapi-spec-validator = [ + {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, + {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, +] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, From 150c916a98dcc1bc5acf32f507f6cfce970bf0aa Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 11 Apr 2022 15:54:06 +0800 Subject: [PATCH 078/214] Made yaml validation to properly separate openapi errors and python errors. Refactored swagger.py to return the swagger file directory --- pfunk/utils/swagger.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index adcbe9e..2820efe 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -135,7 +135,6 @@ def write_to_yaml(self, dir=''): if not os.path.exists(f'{dir}swagger.yaml'): with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) - return t.to_yaml() else: print( 'There is an existing swagger file. Kindly move/delete it to generate a new one.') From 4464f038ef58beb87329da88c10e56d6d33fff64 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 12 Apr 2022 16:15:50 +0800 Subject: [PATCH 079/214] Finished create/update api from yaml. Added writing to config file if API is created. Added reading from config file if API is to be updated --- pfunk/utils/aws.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 13164f8..28449eb 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -111,7 +111,6 @@ def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): response = self.client.import_rest_api( failOnWarnings=fail_on_warnings, body=yaml_file) - if response: write_to_config({'api': response}) return { From 461e73fbe7ead0e607bf002f2bc2701e50063e0f Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 2 Jun 2022 15:11:30 +0800 Subject: [PATCH 080/214] added support for splitting the docs to get params of the view --- pfunk/tests/test_project.py | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index 858dcc3..3f1a43c 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -34,24 +34,24 @@ def tearDown(self) -> None: except FileNotFoundError: pass - def test_add_resource(self): - self.project.add_resource(Person) - self.project.add_resource(Person) - # Test that no duplicates are there - self.assertEqual(self.project.collections, set([Person])) - - def test_add_resources(self): - self.project.add_resources([Person, Sport]) - self.assertEqual(self.project.collections, set([Person, Sport])) - - def test_render(self): - self.project.add_resources([Person, Sport]) - gql = self.project.render() - self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - self.assertTrue('enum gender_pronouns' in gql) - self.assertTrue('type Person' in gql) - self.assertTrue('type Sport' in gql) - self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + # def test_add_resource(self): + # self.project.add_resource(Person) + # self.project.add_resource(Person) + # # Test that no duplicates are there + # self.assertEqual(self.project.collections, set([Person])) + + # def test_add_resources(self): + # self.project.add_resources([Person, Sport]) + # self.assertEqual(self.project.collections, set([Person, Sport])) + + # def test_render(self): + # self.project.add_resources([Person, Sport]) + # gql = self.project.render() + # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + # self.assertTrue('enum gender_pronouns' in gql) + # self.assertTrue('type Person' in gql) + # self.assertTrue('type Sport' in gql) + # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): self.project.add_resources([Person, Sport, Group, User]) From 5a1ae8e74e336b6029bf5d90fad671d63e71655f Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 2 Jun 2022 15:30:35 +0800 Subject: [PATCH 081/214] switch acquiring of swagger docs for payload from docstrings to function return instead --- pfunk/utils/swagger.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 2820efe..a1ff3ec 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -163,6 +163,7 @@ def get_operations(self, col: Collection): ``` """ for view in col.collection_views: + v = view() route = view.url(col) rule = route.rule methods = route.methods @@ -215,6 +216,7 @@ def get_operations(self, col: Collection): view_payload = view(col)._payload_docs() # Construct payload for swagger generation + # TODO: support referencing of models if view_payload: for field in view_payload.get('data'): if field.get('schema'): From d2d054f88978115d29f292972e28df63f1388ca9 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:09:34 +0800 Subject: [PATCH 082/214] added ability to also detect model references from _payload_docs function --- pfunk/tests/test_project.py | 36 ++++++++++++++++++------------------ pfunk/utils/swagger.py | 1 - 2 files changed, 18 insertions(+), 19 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index 3f1a43c..858dcc3 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -34,24 +34,24 @@ def tearDown(self) -> None: except FileNotFoundError: pass - # def test_add_resource(self): - # self.project.add_resource(Person) - # self.project.add_resource(Person) - # # Test that no duplicates are there - # self.assertEqual(self.project.collections, set([Person])) - - # def test_add_resources(self): - # self.project.add_resources([Person, Sport]) - # self.assertEqual(self.project.collections, set([Person, Sport])) - - # def test_render(self): - # self.project.add_resources([Person, Sport]) - # gql = self.project.render() - # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - # self.assertTrue('enum gender_pronouns' in gql) - # self.assertTrue('type Person' in gql) - # self.assertTrue('type Sport' in gql) - # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + def test_add_resource(self): + self.project.add_resource(Person) + self.project.add_resource(Person) + # Test that no duplicates are there + self.assertEqual(self.project.collections, set([Person])) + + def test_add_resources(self): + self.project.add_resources([Person, Sport]) + self.assertEqual(self.project.collections, set([Person, Sport])) + + def test_render(self): + self.project.add_resources([Person, Sport]) + gql = self.project.render() + self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + self.assertTrue('enum gender_pronouns' in gql) + self.assertTrue('type Person' in gql) + self.assertTrue('type Sport' in gql) + self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): self.project.add_resources([Person, Sport, Group, User]) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index a1ff3ec..50107e1 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -216,7 +216,6 @@ def get_operations(self, col: Collection): view_payload = view(col)._payload_docs() # Construct payload for swagger generation - # TODO: support referencing of models if view_payload: for field in view_payload.get('data'): if field.get('schema'): From 88dd53b6bf4586ebf91821a777853e58989217a9 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 22 Jun 2022 15:19:06 +0800 Subject: [PATCH 083/214] updated swaggyp package --- pfunk/utils/swagger.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 50107e1..2820efe 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -163,7 +163,6 @@ def get_operations(self, col: Collection): ``` """ for view in col.collection_views: - v = view() route = view.url(col) rule = route.rule methods = route.methods From c0d149b230c3b1381df9035b6cb54502e5f75d85 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 6 Sep 2022 15:21:16 +0800 Subject: [PATCH 084/214] Added github workflow --- .github/workflows/main.yml | 49 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 .github/workflows/main.yml diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 0000000..f4af893 --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,49 @@ +# This is a basic workflow to help you get started with Actions + +name: Unittests + +# Controls when the action will run. +on: + # Triggers the workflow on push or pull request events but only for the master branch + push: + branches: [ master, develop ] + pull_request: + branches: [ master, develop ] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + + # This workflow contains a single job called "build" + build: + environment: Master + # The type of runner that the job will run on + runs-on: ubuntu-latest + + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v2 + - run: touch .env + - run: docker-compose pull + + # In this step, this action saves a list of existing images, + # the cache is created without them in the post run. + # It also restores the cache if it exists. + - uses: satackey/action-docker-layer-caching@v0.0.11 + # Ignore the failure of a step and avoid terminating the job. + continue-on-error: true + + - run: docker-compose build + + # Runs a single command using the runners shell + - name: Run Unit Tests + run: docker-compose run web poetry run python -m unittest + - name: Build and publish to pypi + if: github.ref == 'refs/heads/master' + uses: JRubics/poetry-publish@v1.13 + with: + pypi_token: ${{ secrets.PYPI_TOKEN }} + ignore_dev_requirements: "yes" \ No newline at end of file From 3763c440dbc7c5bfc09fe76a18f0e318068e4bdc Mon Sep 17 00:00:00 2001 From: Brian Jinwright Date: Tue, 13 Sep 2022 21:09:43 -0400 Subject: [PATCH 085/214] first stab at add forms and HTMLView, and removing circular imports --- README.md | 6 + pfunk/cli.py | 8 +- pfunk/collection.py | 36 +- .../{collections/user.py => collections.py} | 65 +- pfunk/contrib/auth/collections/common.py | 34 - pfunk/contrib/auth/collections/group.py | 16 - .../auth/{collections/__init__.py => key.py} | 0 .../auth/templates/auth/forgot_email.html | 13 + .../auth/forgot_email.txt} | 0 pfunk/contrib/auth/views.py | 6 +- pfunk/contrib/ecommerce/collections.py | 4 +- pfunk/contrib/ecommerce/views.py | 4 +- pfunk/contrib/email/base.py | 2 +- pfunk/contrib/email/dummy.py | 42 + pfunk/contrib/templates.py | 5 - pfunk/exceptions.py | 5 + pfunk/fields.py | 40 +- pfunk/test_resources/__init__.py | 0 .../test_resources/templates/house/list.html | 14 + .../templates/house/update.html | 14 + pfunk/testcase.py | 2 +- pfunk/tests/__init__.py | 7 +- pfunk/tests/test_auth.py | 6 +- pfunk/tests/test_crud.py | 4 +- pfunk/tests/test_deployment.py | 4 +- pfunk/tests/test_email.py | 4 +- pfunk/tests/test_jwt.py | 6 +- pfunk/tests/test_project.py | 4 +- pfunk/tests/test_web_crud.py | 8 +- ...rd.py => test_web_json_change_password.py} | 4 +- pfunk/tests/test_web_json_crud.py | 71 ++ ...rd.py => test_web_json_forgot_password.py} | 4 +- ...st_web_login.py => test_web_json_login.py} | 4 +- ..._web_signup.py => test_web_json_signup.py} | 6 +- ..._web_stripe.py => test_web_json_stripe.py} | 32 +- pfunk/utils/publishing.py | 6 +- pfunk/utils/templates.py | 21 + pfunk/web/forms/__init__.py | 0 pfunk/web/forms/collections.py | 68 ++ pfunk/web/forms/templates/forms/ul.html | 15 + pfunk/web/views/base.py | 55 +- pfunk/web/views/html.py | 155 +++ pfunk/web/views/json.py | 17 +- poetry.lock | 929 +++--------------- pyproject.toml | 5 +- 45 files changed, 804 insertions(+), 947 deletions(-) rename pfunk/contrib/auth/{collections/user.py => collections.py} (82%) delete mode 100644 pfunk/contrib/auth/collections/common.py delete mode 100644 pfunk/contrib/auth/collections/group.py rename pfunk/contrib/auth/{collections/__init__.py => key.py} (100%) create mode 100644 pfunk/contrib/auth/templates/auth/forgot_email.html rename pfunk/contrib/auth/{collections/group_user.py => templates/auth/forgot_email.txt} (100%) create mode 100644 pfunk/contrib/email/dummy.py delete mode 100644 pfunk/contrib/templates.py create mode 100644 pfunk/test_resources/__init__.py create mode 100644 pfunk/test_resources/templates/house/list.html create mode 100644 pfunk/test_resources/templates/house/update.html rename pfunk/tests/{test_web_change_password.py => test_web_json_change_password.py} (95%) create mode 100644 pfunk/tests/test_web_json_crud.py rename pfunk/tests/{test_web_forgot_password.py => test_web_json_forgot_password.py} (95%) rename pfunk/tests/{test_web_login.py => test_web_json_login.py} (95%) rename pfunk/tests/{test_web_signup.py => test_web_json_signup.py} (91%) rename pfunk/tests/{test_web_stripe.py => test_web_json_stripe.py} (90%) create mode 100644 pfunk/utils/templates.py create mode 100644 pfunk/web/forms/__init__.py create mode 100644 pfunk/web/forms/collections.py create mode 100644 pfunk/web/forms/templates/forms/ul.html create mode 100644 pfunk/web/views/html.py diff --git a/README.md b/README.md index db4a26a..b0634bf 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,7 @@ Includes GraphQL and generic ABAC auth workflow integrations. - [Getting Started](#Getting-Started) - [Installation](#Installation) + - [Environment Variables](#environment-variables) - [Setup the Connection](#setup-the-connection) - [Define your Collections](#define-your-collections-collectionspy) - [Choose an Auth Workflow](#auth-workflows) @@ -37,6 +38,11 @@ Includes GraphQL and generic ABAC auth workflow integrations. ### Installation ```pip install pfunk``` +### Environment Variables + +- **FAUNA_SECRET** - Fauna admin or server key. +- **FAUNA_SCHEME** - (optional) HTTP scheme to use (default: https) +- ### Setup the Connection #### Using Environment Variables (Preferred Method) diff --git a/pfunk/cli.py b/pfunk/cli.py index 838684b..c23d6f1 100644 --- a/pfunk/cli.py +++ b/pfunk/cli.py @@ -8,13 +8,13 @@ from werkzeug.serving import run_simple from pfunk.client import FaunaClient, q -from pfunk.contrib.auth.collections import PermissionGroup +from pfunk.contrib.auth.key import PermissionGroup from pfunk.exceptions import DocNotFound from pfunk.template import wsgi_template, project_template, collections_templates, key_template from pfunk.utils.deploy import Deploy -Group = import_util(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.group.Group')) +Group = import_util(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group')) @click.group() @@ -191,7 +191,7 @@ def seed_keys(stage_name: str, config_path: str): """ config = load_config_file(config_path) - Key = import_util('pfunk.contrib.auth.collections.Key') + Key = import_util('pfunk.contrib.auth.key.Key') keys = Key.create_keys() name = config.get('name') keys_path = f'{name}/{stage_name}_keys.py' @@ -233,7 +233,7 @@ def create_admin_user(stage_name: str, group_slug: str, last_name: str, first_na """ config = load_config_file(config_path) secret = config['stages'][stage_name]['fauna_secret'] - User = import_util('pfunk.contrib.auth.collections.user.User') + User = import_util('pfunk.contrib.auth.collections.User') if not local_user: os.environ['FAUNA_SECRET'] = secret diff --git a/pfunk/collection.py b/pfunk/collection.py index 204aad2..8cb1b64 100644 --- a/pfunk/collection.py +++ b/pfunk/collection.py @@ -10,12 +10,14 @@ from pfunk.web.views.json import DetailView, CreateView, UpdateView, DeleteView, ListView from .client import q from .contrib.generic import GenericCreate, GenericDelete, GenericUpdate, AllFunction -from .exceptions import DocNotFound +from .exceptions import DocNotFound, NotUniqueError from .queryset import Queryset from .resources import Index __all__ = ['Enum', 'Collection'] +from .web.views.html import HTMLCreateView, HTMLUpdateView, HTMLListView, HTMLDeleteView, HTMLDetailView + class PFunkDeclaredVars(DeclaredVars): base_field_class = BaseProperty @@ -32,6 +34,8 @@ class Enum(Schema): def __unicode__(self): return self.name # pragma: no cover + def __str__(self): + return self.name # pragma: no cover class Collection(BaseSchema, metaclass=PFunkDeclarativeVariablesMetaclass): """ @@ -54,7 +58,11 @@ class Collection(BaseSchema, metaclass=PFunkDeclarativeVariablesMetaclass): use_crud_views: bool = True """Specifies whether to use the CRUD views.""" crud_views: list = [CreateView, UpdateView, ListView, DeleteView, DetailView] - """Specifies the base events used if the `use_base_events` variable is `True`""" + """Specifies the crud views used if the `use_crud_views` variable is `True`""" + use_crud_html_views = False + """Specifies whether to use the CRUD HTML views.""" + crud_html_views = [HTMLCreateView, HTMLUpdateView, HTMLListView, HTMLDeleteView, HTMLDetailView] + """Specifies the crud html views used if the `use_crud_html_views` variable is `True`""" require_auth: bool = True """Determines wheter to require authentication and authorization""" non_public_fields: list = [] @@ -70,6 +78,13 @@ class Collection(BaseSchema, metaclass=PFunkDeclarativeVariablesMetaclass): 'collection_name'] """List of class variables that are not allowed a field names. """ + def __str__(self): + try: + return self.__unicode__() # pragma: no cover + except AttributeError: + return f"{self.__class__.__name__} object" # pragma: no cover + + def __init__(self, _ref: object = None, _lazied: bool = False, **kwargs) -> None: """ Args: @@ -92,6 +107,8 @@ def __init__(self, _ref: object = None, _lazied: bool = False, **kwargs) -> None self.collection_functions = set(self.collection_functions) if self.use_crud_views: self.collection_views.extend(self.crud_views) + if self.use_crud_html_views: + self.collection_views.extend(self.crud_html_views) self.collection_views = set(self.collection_views) if self.use_crud_functions: @@ -412,11 +429,16 @@ def save(self, _credentials=None, _token=None) -> None: if not self.ref: self.call_signals('pre_create_signals') data_dict, relational_data = self.get_data_dict(_credentials=_credentials) - resp = self.client(_token=_token).query( - q.create( - q.collection(self.get_collection_name()), - data_dict - )) + try: + resp = self.client(_token=_token).query( + q.create( + q.collection(self.get_collection_name()), + data_dict + )) + except BadRequest as e: + if 'instance not unique' in [i.code for i in e.errors]: + raise NotUniqueError(f"{self.get_collection_name()} document is not unique.") + self.ref = resp['ref'] self.call_signals('post_create_signals') else: diff --git a/pfunk/contrib/auth/collections/user.py b/pfunk/contrib/auth/collections.py similarity index 82% rename from pfunk/contrib/auth/collections/user.py rename to pfunk/contrib/auth/collections.py index 6fca5d3..39cbacb 100644 --- a/pfunk/contrib/auth/collections/user.py +++ b/pfunk/contrib/auth/collections.py @@ -5,15 +5,61 @@ from valley.exceptions import ValidationException from valley.utils import import_util +from pfunk import ReferenceField from pfunk.client import q from pfunk.collection import Collection, Enum -from pfunk.contrib.auth.collections import Key +from pfunk.contrib.auth.key import Key from pfunk.contrib.auth.resources import LoginUser, UpdatePassword, Public, UserRole, LogoutUser from pfunk.contrib.auth.views import ForgotPasswordChangeView, LoginView, SignUpView, VerifyEmailView, LogoutView, \ UpdatePasswordView, ForgotPasswordView from pfunk.contrib.email.base import send_email from pfunk.exceptions import LoginFailed, DocNotFound from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField +from pfunk.fields import ListField +from pfunk.fields import SlugField + + +class UserGroups(Collection): + """ Many-to-many collection of the user-group relationship + + The native fauna-way of holding many-to-many relationship + is to only have the ID of the 2 object. Here in pfunk, we + leverage the flexibility of the collection to have another + field, which is `permissions`, this field holds the capablities + of a user, allowing us to add easier permission handling. + Instead of manually going to roles and adding individual + collections which can be painful in long term. + + Attributes: + collection_name (str): + Name of the collection in Fauna + userID (str): + Fauna ref of user that is tied to the group + groupID (str): + Fauna ref of a collection that is tied with the user + permissions (str[]): + List of permissions, `['create', 'read', 'delete', 'write']` + """ + collection_name = 'users_groups' + userID = ReferenceField(env('USER_COLLECTION', 'pfunk.contrib.auth.collections.User')) + groupID = ReferenceField(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group')) + permissions = ListField() + + def __unicode__(self): + return f"{self.userID}, {self.groupID}, {self.permissions}" + + +class Group(Collection): + """ Group collection that the user belongs to """ + name = StringField(required=True) + slug = SlugField(unique=True, required=False) + users = ManyToManyField( + env('USER_COLLECTION', 'pfunk.contrib.auth.collections.User'), + relation_name='users_groups') + + def __unicode__(self): + return self.name # pragma: no cover + AccountStatus = Enum(name='AccountStatus', choices=['ACTIVE', 'INACTIVE']) @@ -39,7 +85,7 @@ class BaseUser(Collection): collection_roles = [Public, UserRole] non_public_fields = ['groups'] use_email_verification = True - group_class = import_util(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.group.Group')) + group_class = import_util(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group')) # Views collection_views = [LoginView, SignUpView, VerifyEmailView, LogoutView, UpdatePasswordView, ForgotPasswordView, ForgotPasswordChangeView] @@ -83,7 +129,6 @@ def logout(cls, _token=None): q.call("logout_user") ) - def permissions(self, _token=None): return [] @@ -146,10 +191,12 @@ def send_verification_email(self, from_email=None, verification_type='signup'): txt_template = 'auth/verification_email.txt' html_template = 'auth/verification_email.html' verification_key = self.verification_key + verification_link = f'https://{env("PROJECT_DOMAIN")}/{self.get_collection_name().lower()}/verify/{verification_key}' elif verification_type == 'forgot': txt_template = 'auth/forgot_email.txt' html_template = 'auth/forgot_email.html' verification_key = self.forgot_password_key + verification_link = f'https://{env("PROJECT_DOMAIN")}/{self.get_collection_name().lower()}/forgot-password/{verification_key}' try: send_email( txt_template=txt_template, @@ -159,7 +206,10 @@ def send_verification_email(self, from_email=None, verification_type='signup'): subject=f'{project_name} Email Verification', first_name=self.first_name, last_name=self.last_name, - verification_key=verification_key + verification_key=verification_key, + verification_type=verification_type, + verification_link=verification_link, + collection=self.get_collection_name().lower(), ) except Exception as e: import logging @@ -190,6 +240,7 @@ def signup(cls, _token=None, **kwargs): data.pop('groups') except KeyError: pass + cls.create(**data, _token=_token) @classmethod @@ -244,10 +295,10 @@ def __unicode__(self): class User(BaseUser): - user_group_class = import_util('pfunk.contrib.auth.collections.common.UserGroups') - group_class = import_util('pfunk.contrib.auth.collections.group.Group') + user_group_class = import_util('pfunk.contrib.auth.collections.UserGroups') + group_class = import_util('pfunk.contrib.auth.collections.Group') """ User that has permission capabilities. Extension of `BaseUser` """ - groups = ManyToManyField(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.group.Group'), 'users_groups') + groups = ManyToManyField(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group'), 'users_groups') @classmethod def get_permissions(cls, ref, _token=None): diff --git a/pfunk/contrib/auth/collections/common.py b/pfunk/contrib/auth/collections/common.py deleted file mode 100644 index 2aa07df..0000000 --- a/pfunk/contrib/auth/collections/common.py +++ /dev/null @@ -1,34 +0,0 @@ -from envs import env - -from pfunk import ReferenceField, Collection -from pfunk.fields import ListField - - -class UserGroups(Collection): - """ Many-to-many collection of the user-group relationship - - The native fauna-way of holding many-to-many relationship - is to only have the ID of the 2 object. Here in pfunk, we - leverage the flexibility of the collection to have another - field, which is `permissions`, this field holds the capablities - of a user, allowing us to add easier permission handling. - Instead of manually going to roles and adding individual - collections which can be painful in long term. - - Attributes: - collection_name (str): - Name of the collection in Fauna - userID (str): - Fauna ref of user that is tied to the group - groupID (str): - Fauna ref of a collection that is tied with the user - permissions (str[]): - List of permissions, `['create', 'read', 'delete', 'write']` - """ - collection_name = 'users_groups' - userID = ReferenceField(env('USER_COLLECTION', 'pfunk.contrib.auth.collections.user.User')) - groupID = ReferenceField(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.group.Group')) - permissions = ListField() - - def __unicode__(self): - return f"{self.userID}, {self.groupID}, {self.permissions}" diff --git a/pfunk/contrib/auth/collections/group.py b/pfunk/contrib/auth/collections/group.py deleted file mode 100644 index 4ae5a96..0000000 --- a/pfunk/contrib/auth/collections/group.py +++ /dev/null @@ -1,16 +0,0 @@ -from envs import env - -from pfunk.collection import Collection -from pfunk.fields import SlugField, ManyToManyField, StringField - - -class Group(Collection): - """ Group collection that the user belongs to """ - name = StringField(required=True) - slug = SlugField(unique=True, required=False) - users = ManyToManyField( - env('USER_COLLECTION', 'pfunk.contrib.auth.collections.user.User'), - relation_name='users_groups') - - def __unicode__(self): - return self.name # pragma: no cover diff --git a/pfunk/contrib/auth/collections/__init__.py b/pfunk/contrib/auth/key.py similarity index 100% rename from pfunk/contrib/auth/collections/__init__.py rename to pfunk/contrib/auth/key.py diff --git a/pfunk/contrib/auth/templates/auth/forgot_email.html b/pfunk/contrib/auth/templates/auth/forgot_email.html new file mode 100644 index 0000000..9a55777 --- /dev/null +++ b/pfunk/contrib/auth/templates/auth/forgot_email.html @@ -0,0 +1,13 @@ +
+
+

Email Verification

+

First Name

+

{{first_name}}

+

Last Name

+

{{last_name}}

+

Email

+

{{email}}

+

Verification Link

+

{{verification_link}}

+
+
\ No newline at end of file diff --git a/pfunk/contrib/auth/collections/group_user.py b/pfunk/contrib/auth/templates/auth/forgot_email.txt similarity index 100% rename from pfunk/contrib/auth/collections/group_user.py rename to pfunk/contrib/auth/templates/auth/forgot_email.txt diff --git a/pfunk/contrib/auth/views.py b/pfunk/contrib/auth/views.py index ee3de70..01b6b86 100644 --- a/pfunk/contrib/auth/views.py +++ b/pfunk/contrib/auth/views.py @@ -109,7 +109,10 @@ def get_query(self): class ForgotPasswordChangeView(ActionMixin, JSONAuthView): - """ Accepts a hashed key from the forgot-password email, validates it if it matches the user's and change the password """ + """ + Accepts a hashed key from the forgot-password email, validates + it if it matches the user's and change the password + """ action = 'forgot-password' login_required = False http_methods = ['put'] @@ -122,5 +125,6 @@ def get_query(self): password=kwargs['password']) + class WebhookView(JSONView): pass diff --git a/pfunk/contrib/ecommerce/collections.py b/pfunk/contrib/ecommerce/collections.py index 7e00515..ddc415e 100644 --- a/pfunk/contrib/ecommerce/collections.py +++ b/pfunk/contrib/ecommerce/collections.py @@ -12,8 +12,8 @@ stripe.api_key = env('STRIPE_API_KEY') -User = import_util(env('USER_COLLECTION', 'pfunk.contrib.auth.collections.user.User')) -Group = import_util(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.group.Group')) +User = import_util(env('USER_COLLECTION', 'pfunk.contrib.auth.collections.User')) +Group = import_util(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group')) class StripePackage(Collection): diff --git a/pfunk/contrib/ecommerce/views.py b/pfunk/contrib/ecommerce/views.py index 9d74e0f..fb7f23d 100644 --- a/pfunk/contrib/ecommerce/views.py +++ b/pfunk/contrib/ecommerce/views.py @@ -40,7 +40,7 @@ class CheckoutView(DetailView): a base class. """ - def get_context_data(self, **kwargs): + def get_context(self, **kwargs): context = super().get_context_data(**kwargs) customer = self.collection.objects.get_or_create_customer( self.request.user) # `StripeCustomer` collection @@ -77,7 +77,7 @@ class CheckoutSuccessView(DetailView, ActionMixin): @classmethod def url(cls, collection): - return Rule(f'/{collection.get_class_name()}/{cls.action}//', endpoint=cls.as_view(collection), + return Rule(f'/json/{collection.get_class_name()}/{cls.action}//', endpoint=cls.as_view(collection), methods=cls.http_methods) def get_query(self, *args, **kwargs): diff --git a/pfunk/contrib/email/base.py b/pfunk/contrib/email/base.py index a5c87a8..7f35655 100644 --- a/pfunk/contrib/email/base.py +++ b/pfunk/contrib/email/base.py @@ -3,7 +3,7 @@ from envs import env from valley.utils import import_util -from pfunk.contrib.templates import temp_env +from pfunk.utils.templates import temp_env class EmailBackend(object): diff --git a/pfunk/contrib/email/dummy.py b/pfunk/contrib/email/dummy.py new file mode 100644 index 0000000..f542e90 --- /dev/null +++ b/pfunk/contrib/email/dummy.py @@ -0,0 +1,42 @@ +from pfunk.contrib.email.base import EmailBackend + + +class DummyBackend(EmailBackend): + """ + AWS SES email backend (https://aws.amazon.com/ses/) + """ + region_name = None + charset = "UTF-8" + + def send_email(self, subject: str, to_emails: list, html_template: str = None, txt_template: str = None, + from_email: str = None, cc_emails: list = [], bcc_emails: list = [], fail_silently: bool = True, + **kwargs): + """ + Sends email + Args: + subject: Email subject line + to_emails: List of email addresses + html_template: HTML template location string + txt_template: Text template location string + from_email: From email address + cc_emails: CC email addresses + bcc_emails: BCC email addresses + fail_silently: Specifies whether to fail silently + **kwargs: keyword arguments used to render template(s) + + Returns: None + + """ + email_dict = { + 'subject': subject, + 'to_emails': to_emails, + 'html_template': html_template, + 'txt_template': txt_template, + 'from_email': from_email, + 'cc_emails': cc_emails, + 'bcc_emails': bcc_emails, + 'fail_silently': fail_silently, + 'kwargs': kwargs, + 'body': self.get_body_kwargs(html_template=html_template, txt_template=txt_template, **kwargs) + } + return email_dict \ No newline at end of file diff --git a/pfunk/contrib/templates.py b/pfunk/contrib/templates.py deleted file mode 100644 index cbb4c9a..0000000 --- a/pfunk/contrib/templates.py +++ /dev/null @@ -1,5 +0,0 @@ -from envs import env -from jinja2 import Environment -from jinja2.loaders import FileSystemLoader - -temp_env = Environment(loader=FileSystemLoader(env('TEMPLATE_ROOT_DIR'))) diff --git a/pfunk/exceptions.py b/pfunk/exceptions.py index 9625b75..b91fbbe 100644 --- a/pfunk/exceptions.py +++ b/pfunk/exceptions.py @@ -21,3 +21,8 @@ class Unauthorized(Exception): class GraphQLError(Exception): """Graphql SyntaxError""" pass + + +class NotUniqueError(Exception): + """Exception raised when a unique constraint is violated""" + pass \ No newline at end of file diff --git a/pfunk/fields.py b/pfunk/fields.py index e376333..ddb1c1a 100644 --- a/pfunk/fields.py +++ b/pfunk/fields.py @@ -3,7 +3,7 @@ import pytz from valley.exceptions import ValidationException from valley.properties import CharProperty, IntegerProperty, DateTimeProperty, DateProperty, FloatProperty, \ - BooleanProperty, EmailProperty, SlugProperty, BaseProperty, ForeignProperty, ForeignListProperty, ListProperty + BooleanProperty, EmailProperty, SlugProperty, BaseProperty, ForeignProperty as FP, ForeignListProperty, ListProperty from valley.utils import import_util from valley.validators import ChoiceValidator, ForeignValidator @@ -40,6 +40,10 @@ def get_graphql_type(self): return f"{self.GRAPHQL_FIELD_TYPE}{req} {unique}" + def get_form_field(self, **kwargs): + return 'formy.fields.StringField' + + class StringField(GraphQLMixin, CharProperty): pass @@ -47,7 +51,8 @@ class StringField(GraphQLMixin, CharProperty): class IntegerField(GraphQLMixin, IntegerProperty): GRAPHQL_FIELD_TYPE = 'Int' - + def get_form_field(self, **kwargs): + return 'formy.fields.IntegerField' class DateTimeField(GraphQLMixin, DateTimeProperty): GRAPHQL_FIELD_TYPE = 'Time' @@ -55,17 +60,28 @@ class DateTimeField(GraphQLMixin, DateTimeProperty): def now(self): return datetime.datetime.now(tz=pytz.UTC) + def get_form_field(self, **kwargs): + return 'formy.fields.DateTimeField' + class FloatField(GraphQLMixin, FloatProperty): GRAPHQL_FIELD_TYPE = 'Float' + def get_form_field(self, **kwargs): + return 'formy.fields.FloatField' + + class BooleanField(GraphQLMixin, BooleanProperty): GRAPHQL_FIELD_TYPE = 'Boolean' + def get_form_field(self, **kwargs): + return 'formy.fields.BooleanField' + class EmailField(GraphQLMixin, EmailProperty): - pass + def get_form_field(self, **kwargs): + return 'formy.fields.EmailField' class SlugField(GraphQLMixin, SlugProperty): @@ -101,6 +117,9 @@ def get_graphql_type(self): unique = '@unique' return f"{self.enum.name}{req} {unique}" + def get_form_field(self, **kwargs): + return 'formy.fields.ChoiceField' + class ReferenceValidator(ForeignValidator): @@ -113,8 +132,20 @@ def validate(self, value, key): key, value, self.foreign_class.__name__)) +class ForeignProperty(FP): + + def __init__(self, foreign_class, return_type=None, return_prop=None, choices_index=None, **kwargs): + super(ForeignProperty, self).__init__( + foreign_class, return_type=return_type, return_prop=return_prop, + choices_index=choices_index, **kwargs) + self.choices_index = choices_index + + class ReferenceField(GraphQLMixin, ForeignProperty): + def get_form_field(self, **kwargs): + return 'formy.fields.ChoiceField' + def get_validators(self): super(BaseProperty, self).get_validators() self.validators.insert(0, ReferenceValidator(self.foreign_class)) @@ -210,6 +241,9 @@ class DateField(GraphQLMixin, DateProperty): def now(self): return datetime.datetime.now(tz=pytz.UTC).date() + def get_form_field(self, **kwargs): + return 'formy.fields.DateField' + class ListField(GraphQLMixin, ListProperty): GRAPHQL_FIELD_TYPE = '[String]' diff --git a/pfunk/test_resources/__init__.py b/pfunk/test_resources/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pfunk/test_resources/templates/house/list.html b/pfunk/test_resources/templates/house/list.html new file mode 100644 index 0000000..dbd6f25 --- /dev/null +++ b/pfunk/test_resources/templates/house/list.html @@ -0,0 +1,14 @@ + + + + + Title + + +
    + {% for house in object_list %} +
  • {{ house.address }}
  • + {% endfor %} +
+ + \ No newline at end of file diff --git a/pfunk/test_resources/templates/house/update.html b/pfunk/test_resources/templates/house/update.html new file mode 100644 index 0000000..dbd6f25 --- /dev/null +++ b/pfunk/test_resources/templates/house/update.html @@ -0,0 +1,14 @@ + + + + + Title + + +
    + {% for house in object_list %} +
  • {{ house.address }}
  • + {% endfor %} +
+ + \ No newline at end of file diff --git a/pfunk/testcase.py b/pfunk/testcase.py index 054bbab..dac644e 100644 --- a/pfunk/testcase.py +++ b/pfunk/testcase.py @@ -58,7 +58,7 @@ def setUp(self) -> None: self.app = self.project.wsgi_app self.c = Client(self.app) os.environ.setdefault('KEY_MODULE', 'pfunk.tests.unittest_keys.KEYS') - Key = import_util('pfunk.contrib.auth.collections.Key') + Key = import_util('pfunk.contrib.auth.key.Key') keys = Key.create_keys() self.keys_path = 'pfunk/tests/unittest_keys.py' with open(self.keys_path, 'w+') as f: diff --git a/pfunk/tests/__init__.py b/pfunk/tests/__init__.py index 936292b..6002a22 100644 --- a/pfunk/tests/__init__.py +++ b/pfunk/tests/__init__.py @@ -14,6 +14,7 @@ class SimpleIndex(Index): class Sport(Collection): use_crud_functions = True + use_crud_html_views = True name = StringField(required=True) slug = SlugField() @@ -26,12 +27,13 @@ class Meta: class Person(Collection): collection_roles = [GenericGroupBasedRole] + use_crud_html_views = True verbose_plural_name = 'people' first_name = StringField(required=True) last_name = StringField(required=True) gender_pronoun = EnumField(GENDER_PRONOUN) sport = ReferenceField(Sport) - group = ReferenceField('pfunk.contrib.auth.collections.group.Group') + group = ReferenceField('pfunk.contrib.auth.collections.Group') def __unicode__(self): return f"{self.first_name} {self.last_name}" @@ -39,8 +41,9 @@ def __unicode__(self): class House(Collection): collection_roles = [GenericUserBasedRole] + use_crud_html_views = True address = StringField(required=True) - user = ReferenceField('pfunk.contrib.auth.collections.user.User') + user = ReferenceField('pfunk.contrib.auth.collections.User') def __unicode__(self): return self.address diff --git a/pfunk/tests/test_auth.py b/pfunk/tests/test_auth.py index afc8204..9162aa4 100644 --- a/pfunk/tests/test_auth.py +++ b/pfunk/tests/test_auth.py @@ -1,8 +1,8 @@ from faunadb.errors import PermissionDenied -from pfunk.contrib.auth.collections import PermissionGroup -from pfunk.contrib.auth.collections.group import Group -from pfunk.contrib.auth.collections.user import User +from pfunk.contrib.auth.key import PermissionGroup +from pfunk.contrib.auth.collections import Group +from pfunk.contrib.auth.collections import User from pfunk.exceptions import LoginFailed from pfunk.testcase import CollectionTestCase from pfunk.tests import Sport, Person, House diff --git a/pfunk/tests/test_crud.py b/pfunk/tests/test_crud.py index b7abc64..a960fc5 100644 --- a/pfunk/tests/test_crud.py +++ b/pfunk/tests/test_crud.py @@ -1,5 +1,5 @@ -from pfunk.contrib.auth.collections.group import Group -from pfunk.contrib.auth.collections.user import User +from pfunk.contrib.auth.collections import Group +from pfunk.contrib.auth.collections import User from pfunk.testcase import CollectionTestCase diff --git a/pfunk/tests/test_deployment.py b/pfunk/tests/test_deployment.py index c938b29..b354061 100644 --- a/pfunk/tests/test_deployment.py +++ b/pfunk/tests/test_deployment.py @@ -1,6 +1,6 @@ from pfunk.client import q -from pfunk.contrib.auth.collections.group import Group -from pfunk.contrib.auth.collections.user import User +from pfunk.contrib.auth.collections import Group +from pfunk.contrib.auth.collections import User from pfunk.project import Project from pfunk.testcase import PFunkTestCase from pfunk.tests import Sport, Person diff --git a/pfunk/tests/test_email.py b/pfunk/tests/test_email.py index af42f6d..27e899c 100644 --- a/pfunk/tests/test_email.py +++ b/pfunk/tests/test_email.py @@ -4,8 +4,8 @@ from jinja2.exceptions import TemplateNotFound from werkzeug.test import Client -from pfunk.contrib.auth.collections.group import Group -from pfunk.contrib.auth.collections.user import User +from pfunk.contrib.auth.collections import Group +from pfunk.contrib.auth.collections import User from pfunk.contrib.email.base import EmailBackend from pfunk.contrib.email.ses import SESBackend from pfunk.testcase import APITestCase diff --git a/pfunk/tests/test_jwt.py b/pfunk/tests/test_jwt.py index 571a907..15c3bdd 100644 --- a/pfunk/tests/test_jwt.py +++ b/pfunk/tests/test_jwt.py @@ -1,6 +1,6 @@ -from pfunk.contrib.auth.collections import Key -from pfunk.contrib.auth.collections.group import Group -from pfunk.contrib.auth.collections.user import User +from pfunk.contrib.auth.key import Key +from pfunk.contrib.auth.collections import Group +from pfunk.contrib.auth.collections import User from pfunk.exceptions import LoginFailed from pfunk.testcase import APITestCase diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index 858dcc3..2665808 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -5,8 +5,8 @@ from pfunk.cli import init from pfunk.project import Project from pfunk.tests import Person, Sport, GENDER_PRONOUN -from pfunk.contrib.auth.collections.user import User -from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group class ProjectTestCase(unittest.TestCase): diff --git a/pfunk/tests/test_web_crud.py b/pfunk/tests/test_web_crud.py index 71123e6..8a1f963 100644 --- a/pfunk/tests/test_web_crud.py +++ b/pfunk/tests/test_web_crud.py @@ -1,7 +1,7 @@ from werkzeug.test import Client -from pfunk.contrib.auth.collections.group import Group -from pfunk.contrib.auth.collections.user import User +from pfunk.contrib.auth.collections import Group +from pfunk.contrib.auth.collections import User from pfunk.testcase import APITestCase from pfunk.tests import House @@ -31,7 +31,9 @@ def test_read_all(self): res = self.c.get(f'/house/list/', headers={ "Authorization": self.token}) - self.assertTrue(res.json['success']) + self.assertTrue(res.status_code, 200) + self.assertIn("test address", str(res.get_data())) + def test_create(self): self.assertNotIn("the street somewhere", [ diff --git a/pfunk/tests/test_web_change_password.py b/pfunk/tests/test_web_json_change_password.py similarity index 95% rename from pfunk/tests/test_web_change_password.py rename to pfunk/tests/test_web_json_change_password.py index bdc2d7a..6b9a9f0 100644 --- a/pfunk/tests/test_web_change_password.py +++ b/pfunk/tests/test_web_json_change_password.py @@ -1,5 +1,5 @@ -from pfunk.contrib.auth.collections.group import Group -from pfunk.contrib.auth.collections.user import User +from pfunk.contrib.auth.collections import Group +from pfunk.contrib.auth.collections import User from pfunk.testcase import APITestCase diff --git a/pfunk/tests/test_web_json_crud.py b/pfunk/tests/test_web_json_crud.py new file mode 100644 index 0000000..48d77bf --- /dev/null +++ b/pfunk/tests/test_web_json_crud.py @@ -0,0 +1,71 @@ +from werkzeug.test import Client + +from pfunk.contrib.auth.collections import Group +from pfunk.contrib.auth.collections import User +from pfunk.testcase import APITestCase +from pfunk.tests import House + + +class TestWebCrud(APITestCase): + collections = [User, Group, House] + + def setUp(self) -> None: + super(TestWebCrud, self).setUp() + self.group = Group.create(name='Power Users', slug='power-users') + self.user = User.create(username='test', email='tlasso@example.org', first_name='Ted', + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.group]) + self.token, self.exp = User.api_login("test", "abc123") + self.house = House.create(address="test address", user=self.user) + self.app = self.project.wsgi_app + self.c = Client(self.app) + + def test_read(self): + res = self.c.get(f'/json/house/detail/{self.house.ref.id()}/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.json['success']) + self.assertEqual("test address", res.json['data']['data']['address']) + + def test_read_all(self): + res = self.c.get(f'/json/house/list/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.json['success']) + + def test_create(self): + self.assertNotIn("the street somewhere", [ + house.address for house in House.all()]) + res = self.c.post('/json/house/create/', + json={ + "address": "the street somewhere", + "user": self.user.ref.id()}, + headers={ + "Authorization": self.token}) + + self.assertTrue(res.json['success']) + self.assertIn("the street somewhere", [ + house.address for house in House.all()]) + + def test_update(self): + self.assertNotIn("the updated street somewhere", [ + house.address for house in House.all()]) + res = self.c.put(f'/json/house/update/{self.house.ref.id()}/', + json={ + "address": "the updated street somewhere", + "user": self.user.ref.id()}, + headers={ + "Authorization": self.token}) + + self.assertTrue(res.json['success']) + self.assertIn("the updated street somewhere", [ + house.address for house in House.all()]) + + def test_delete(self): + res = self.c.delete(f'/json/house/delete/{self.house.ref.id()}/', + headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) + + self.assertTrue(res.json['success']) diff --git a/pfunk/tests/test_web_forgot_password.py b/pfunk/tests/test_web_json_forgot_password.py similarity index 95% rename from pfunk/tests/test_web_forgot_password.py rename to pfunk/tests/test_web_json_forgot_password.py index 83c3e32..64f8603 100644 --- a/pfunk/tests/test_web_forgot_password.py +++ b/pfunk/tests/test_web_json_forgot_password.py @@ -1,7 +1,7 @@ from werkzeug.test import Client -from pfunk.contrib.auth.collections.group import Group -from pfunk.contrib.auth.collections.user import User +from pfunk.contrib.auth.collections import Group +from pfunk.contrib.auth.collections import User from pfunk.testcase import APITestCase diff --git a/pfunk/tests/test_web_login.py b/pfunk/tests/test_web_json_login.py similarity index 95% rename from pfunk/tests/test_web_login.py rename to pfunk/tests/test_web_json_login.py index 4a895c6..f67ce73 100644 --- a/pfunk/tests/test_web_login.py +++ b/pfunk/tests/test_web_json_login.py @@ -1,7 +1,7 @@ from werkzeug.test import Client -from pfunk.contrib.auth.collections.group import Group -from pfunk.contrib.auth.collections.user import User +from pfunk.contrib.auth.collections import Group +from pfunk.contrib.auth.collections import User from pfunk.exceptions import LoginFailed from pfunk.testcase import APITestCase diff --git a/pfunk/tests/test_web_signup.py b/pfunk/tests/test_web_json_signup.py similarity index 91% rename from pfunk/tests/test_web_signup.py rename to pfunk/tests/test_web_json_signup.py index c651084..b175ee2 100644 --- a/pfunk/tests/test_web_signup.py +++ b/pfunk/tests/test_web_json_signup.py @@ -1,7 +1,7 @@ from werkzeug.test import Client -from pfunk.contrib.auth.collections.group import Group -from pfunk.contrib.auth.collections.user import User +from pfunk.contrib.auth.collections import Group +from pfunk.contrib.auth.collections import User from pfunk.testcase import APITestCase @@ -47,4 +47,4 @@ def test_signup_not_unique(self): self.assertFalse(res.json['success']) self.assertEqual(res.status_code, 400) - self.assertEqual('document is not unique.', res.json['data']) + self.assertEqual('User document is not unique.', res.json['data']) diff --git a/pfunk/tests/test_web_stripe.py b/pfunk/tests/test_web_json_stripe.py similarity index 90% rename from pfunk/tests/test_web_stripe.py rename to pfunk/tests/test_web_json_stripe.py index 23d833a..1158a1d 100644 --- a/pfunk/tests/test_web_stripe.py +++ b/pfunk/tests/test_web_json_stripe.py @@ -1,12 +1,11 @@ -import tempfile from types import SimpleNamespace from unittest import mock from werkzeug.test import Client -from pfunk.contrib.auth.collections import PermissionGroup -from pfunk.contrib.auth.collections.group import Group -from pfunk.contrib.auth.collections.user import User +from pfunk.contrib.auth.collections import Group +from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.key import PermissionGroup from pfunk.contrib.ecommerce.collections import StripePackage, StripeCustomer from pfunk.contrib.ecommerce.views import BaseWebhookView from pfunk.testcase import APITestCase @@ -35,7 +34,7 @@ def setUp(self) -> None: StripePackage, ['create', 'read', 'write', 'delete'])]) def test_list_package(self): - res = self.c.get('/stripepackage/list/', headers={ + res = self.c.get('/json/stripepackage/list/', headers={ "Content-Type": "application/json" }) self.assertTrue(res.json['success']) @@ -44,7 +43,7 @@ def test_list_package(self): self.stripe_pkg.name) def test_get_package(self): - res = self.c.get(f'/stripepackage/detail/{self.stripe_pkg.ref.id()}/', headers={ + res = self.c.get(f'/json/stripepackage/detail/{self.stripe_pkg.ref.id()}/', headers={ "Content-Type": "application/json" }) self.assertTrue(res.json['success']) @@ -55,7 +54,7 @@ def test_get_package(self): def test_create_package(self): self.assertNotIn("new stripe pkg", [ pkg.name for pkg in StripePackage.all()]) - res = self.c.post('/stripepackage/create/', + res = self.c.post('/json/stripepackage/create/', json={ 'stripe_id': '123', 'name': 'new stripe pkg', @@ -75,7 +74,7 @@ def test_update_package(self): self.assertNotIn("updated pkg", [ pkg.name for pkg in StripePackage.all()]) updated_name = 'updated pkg' - res = self.c.put(f'/stripepackage/update/{self.stripe_pkg.ref.id()}/', + res = self.c.put(f'/json/stripepackage/update/{self.stripe_pkg.ref.id()}/', json={ 'stripe_id': '123', 'name': updated_name, @@ -93,7 +92,7 @@ def test_update_package(self): updated_name) def test_delete_package(self): - res = self.c.delete(f'/stripepackage/delete/{self.stripe_pkg.ref.id()}/', + res = self.c.delete(f'/json/stripepackage/delete/{self.stripe_pkg.ref.id()}/', headers={ "Authorization": self.token, "Content-Type": "application/json" @@ -109,7 +108,7 @@ def test_create_customer(self): stripe_id = '201' self.assertNotIn(stripe_id, [ cus.stripe_id for cus in StripeCustomer.all()]) - res = self.c.post(f'/stripecustomer/create/', + res = self.c.post(f'/json/stripecustomer/create/', json={ "user": self.user.ref.id(), "stripe_id": stripe_id @@ -124,7 +123,7 @@ def test_create_customer(self): cus.stripe_id for cus in StripeCustomer.all()]) def test_list_customers(self): - res = self.c.get('/stripecustomer/list/', headers={ + res = self.c.get('/json/stripecustomer/list/', headers={ "Authorization": self.token, "Content-Type": "application/json" }) @@ -135,7 +134,7 @@ def test_list_customers(self): '100') def test_get_customer(self): - res = self.c.get(f'/stripecustomer/detail/{self.stripe_cus.ref.id()}/', headers={ + res = self.c.get(f'/json/stripecustomer/detail/{self.stripe_cus.ref.id()}/', headers={ "Authorization": self.token, "Content-Type": "application/json" }) @@ -149,7 +148,7 @@ def test_update_customer(self): updated_stripe_id = '101' self.assertNotIn(updated_stripe_id, [ cus.stripe_id for cus in StripeCustomer.all()]) - res = self.c.put(f'/stripecustomer/update/{self.stripe_cus.ref.id()}/', + res = self.c.put(f'/json/stripecustomer/update/{self.stripe_cus.ref.id()}/', json={ "stripe_id": updated_stripe_id }, @@ -164,7 +163,7 @@ def test_update_customer(self): updated_stripe_id) def test_delete_customer(self): - res = self.c.delete(f'/stripecustomer/delete/{self.stripe_cus.ref.id()}/', + res = self.c.delete(f'/json/stripecustomer/delete/{self.stripe_cus.ref.id()}/', headers={ "Authorization": self.token, "Content-Type": "application/json" @@ -255,7 +254,7 @@ def test_get_transfer_data(self): def test_receive_post_req(self, mocked): with self.assertRaises(NotImplementedError): self.view.event = SimpleNamespace(**self.view.request.body) - res = self.c.post('/stripecustomer/webhook/', + res = self.c.post('/json/stripecustomer/webhook/', json=self.stripe_req_body, headers={ 'HTTP_STRIPE_SIGNATURE': 'sig_1113' @@ -281,9 +280,10 @@ def setUp(self) -> None: @mock.patch('stripe.checkout', spec=True) def test_checkout_success_view(self, mocked): session_id = 'session_123' - res = self.c.get(f'/stripepackage/checkout-success/{session_id}/', headers={ + res = self.c.get(f'/json/stripepackage/checkout-success/{session_id}/', headers={ 'Authorization': self.token, 'Content-Type': 'application/json' }) + self.assertTrue(True) self.assertDictEqual({'success': False, 'data': 'Not Found'}, res.json) diff --git a/pfunk/utils/publishing.py b/pfunk/utils/publishing.py index 60633bd..9558a0e 100644 --- a/pfunk/utils/publishing.py +++ b/pfunk/utils/publishing.py @@ -1,3 +1,5 @@ +import logging + import requests from faunadb import query as q from faunadb.errors import BadRequest @@ -31,6 +33,7 @@ def create_or_update_role(client, payload: dict = {}): Returns: query """ + try: response = client.query( q.create_role(payload) @@ -60,13 +63,12 @@ def create_or_pass_index(client, payload): Returns: query """ + try: response = client.query( q.create_index(payload) ) except BadRequest as err: - print('Warning: You cannot update an index please delete the index and publish it again.') - print(err) return return response diff --git a/pfunk/utils/templates.py b/pfunk/utils/templates.py new file mode 100644 index 0000000..27aa90c --- /dev/null +++ b/pfunk/utils/templates.py @@ -0,0 +1,21 @@ +from envs import env +from jinja2 import Environment +from jinja2.loaders import ChoiceLoader, PackageLoader, FileSystemLoader + + +def get_loaders(): + """ + Get the Jinja2 loaders for the project. + Returns: list + """ + loaders = [ + FileSystemLoader(env('TEMPLATE_ROOT_DIR')), + PackageLoader('pfunk.contrib.auth'), + PackageLoader('pfunk.contrib.ecommerce'), + ] + for i in env('TEMPLATE_PACKAGES', [], var_type='list'): + loaders.append(PackageLoader(i)) + return loaders + + +temp_env = Environment(loader=ChoiceLoader(get_loaders())) diff --git a/pfunk/web/forms/__init__.py b/pfunk/web/forms/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pfunk/web/forms/collections.py b/pfunk/web/forms/collections.py new file mode 100644 index 0000000..1f06704 --- /dev/null +++ b/pfunk/web/forms/collections.py @@ -0,0 +1,68 @@ +from formy import Form +from valley.utils import import_util + + +class CollectionForm(Form): + _template = 'forms/ul.html' + + def __init__(self, **kwargs): + super(CollectionForm, self).__init__(**kwargs) + self._instance = kwargs.get('_instance') + if self._instance: + self._data = self._instance.to_dict().get('data') + self.create_fields() + + + @classmethod + def add_field_choices(cls, class_name, field): + if class_name == 'EnumField': + choices = {item: item for item in field.choices} + else: + choices = {str(obj): obj.ref.id() for obj in cls.get_queryset( + field.get_foreign_class(), field.choices_index)} + return choices + + @classmethod + def get_queryset(cls, collection, index=None): + if not index: + return collection.all() + return collection.get_index(index) + + def add_field(self, name, field): + # We need to know the class name to determine the correct form field + class_name = field.__class__.__name__ + # We use the class name to get the correct form field class from the map + field_class = import_util(field.get_form_field()) + + if field_class: + field_kwargs = { + 'required': field.required, + } + if field.choices: + field_kwargs['choices'] = field.choices + if class_name in ['ReferenceField', 'ManyToManyField', 'EnumField']: + field_kwargs['choices'] = self.add_field_choices( + class_name, field) + if field.default_value: + field_kwargs['default_value'] = field.default + if self._data.get(name): + field_kwargs['value'] = self._data.get(name) + self._base_properties[name] = field_class(**field_kwargs) + + def create_fields(self): + if hasattr(self.Meta, 'fields') and len(self.Meta.fields) > 0: + for name in self.Meta.fields: + self.add_field(name, self.Meta.collection._base_properties[name]) + else: + try: + for name, field in self.Meta.collection._base_properties.items(): + self.add_field(name, field) + except TypeError: + pass + + def save(self): + return self.Meta.collection(**self._data).save() + + class Meta: + collection = None + fields = None diff --git a/pfunk/web/forms/templates/forms/ul.html b/pfunk/web/forms/templates/forms/ul.html new file mode 100644 index 0000000..0549589 --- /dev/null +++ b/pfunk/web/forms/templates/forms/ul.html @@ -0,0 +1,15 @@ +
    + {% for field in form %} +
  • + + + {{field.render(errors=form._errors, value=field.kwargs.get('value'))}} + {% if field.help_text %}{{field.help_text}}{% endif %} +
  • + {% endfor %} + {% if include_submit %} +
  • + +
  • + {% endif %} +
\ No newline at end of file diff --git a/pfunk/web/views/base.py b/pfunk/web/views/base.py index cb6c63d..00f43ff 100644 --- a/pfunk/web/views/base.py +++ b/pfunk/web/views/base.py @@ -7,7 +7,8 @@ from werkzeug.http import dump_cookie from werkzeug.routing import Rule -from pfunk.exceptions import TokenValidationFailed, LoginFailed, Unauthorized, DocNotFound, GraphQLError +from pfunk.exceptions import TokenValidationFailed, LoginFailed, Unauthorized, DocNotFound, GraphQLError, NotUniqueError +from pfunk.web.forms.collections import CollectionForm from pfunk.web.request import Request, RESTRequest, HTTPRequest from pfunk.web.response import (Response, HttpNotFoundResponse, HttpForbiddenResponse, HttpBadRequestResponse, HttpMethodNotAllowedResponse, HttpUnauthorizedResponse) @@ -88,7 +89,7 @@ def process_lambda_request(self): response = self.not_found_class().response except PermissionDenied: response = self.forbidden_class().response - except (BadRequest, GraphQLError) as e: + except (BadRequest, NotUniqueError, GraphQLError) as e: if isinstance(e, BadRequest): payload = e._get_description() else: @@ -124,7 +125,7 @@ def process_wsgi_request(self): response = self.not_found_class() except PermissionDenied: response = self.forbidden_class() - except (BadRequest, GraphQLError) as e: + except (BadRequest, NotUniqueError, GraphQLError) as e: if isinstance(e, BadRequest): payload = e._get_description() else: @@ -154,10 +155,10 @@ def get_token(self): returns the decrypted token Returns: - token (`contrib.auth.collections.Key`, required): token of Fauna + token (`contrib.auth.key.Key`, required): token of Fauna """ - from pfunk.contrib.auth.collections import Key + from pfunk.contrib.auth.key import Key enc_token = self.request.cookies.get(env('TOKEN_COOKIE_NAME', 'tk')) if not enc_token: @@ -351,6 +352,25 @@ def get_query_kwargs(self): class UpdateMixin(object): """ Generic PUT mixin for a fauna object """ + form_class = None + + def get_form_class(self): + """ Acquires or builds the form class to use for updating the object """ + if self.form_class: + return self.form_class + return self.build_form_class() + + def build_form_class(self): + """ Builds the form class to use for updating the object """ + + class Meta: + collection = self.collection + + form_class = type(f"{self.get_collection_name()}Form", (CollectionForm,), { + # constructor + + "Meta": Meta, + }) def get_query_kwargs(self): @@ -376,6 +396,7 @@ class ActionMixin(object): action of the endpoint """ action: str + template_name = '{collection}/{action}.html' @classmethod def url(cls, collection): @@ -383,6 +404,21 @@ def url(cls, collection): methods=cls.http_methods) +class JSONActionMixin(ActionMixin): + """ Mixin for specifying what action should an endpoint have + + Attributes: + action (str, required): + action of the endpoint + """ + action: str + + @classmethod + def url(cls, collection): + return Rule(f'/json/{collection.get_class_name()}/{cls.action}/', endpoint=cls.as_view(collection), + methods=cls.http_methods) + + class IDMixin(ActionMixin): """ Mixin for specifying a URL that accepts an ID """ @@ -390,3 +426,12 @@ class IDMixin(ActionMixin): def url(cls, collection): return Rule(f'/{collection.get_class_name()}/{cls.action}//', endpoint=cls.as_view(collection), methods=cls.http_methods) + + +class JSONIDMixin(ActionMixin): + """ Mixin for specifying a URL that accepts an ID """ + + @classmethod + def url(cls, collection): + return Rule(f'/json/{collection.get_class_name()}/{cls.action}//', endpoint=cls.as_view(collection), + methods=cls.http_methods) diff --git a/pfunk/web/views/html.py b/pfunk/web/views/html.py new file mode 100644 index 0000000..a9ba9d0 --- /dev/null +++ b/pfunk/web/views/html.py @@ -0,0 +1,155 @@ +from pfunk.client import q +from pfunk.utils.templates import temp_env +from pfunk.web.response import Response, HttpNotFoundResponse, HttpBadRequestResponse, HttpMethodNotAllowedResponse, \ + HttpUnauthorizedResponse, HttpForbiddenResponse +from pfunk.web.views.base import UpdateMixin, ActionMixin, IDMixin, ObjectMixin, QuerysetMixin, RESTView + + +class HTMLView(RESTView): + """ + Base class for all HTML views + """ + response_class = Response + content_type_accepted = 'text/html' + restrict_content_type = False + not_found_class = HttpNotFoundResponse + bad_request_class = HttpBadRequestResponse + method_not_allowed_class = HttpMethodNotAllowedResponse + unauthorized_class = HttpUnauthorizedResponse + forbidden_class = HttpForbiddenResponse + template_name = None + + def get_template(self): + return temp_env.get_template( + self.template_name.format( + collection=self.collection.get_collection_name().lower(), + action=self.action + ) + ) + + def get_response(self): + return self.response_class( + payload=self.get_template().render(**self.get_context()), + headers=self.get_headers() + ) + + +class HTMLCreateView(UpdateMixin, ActionMixin, HTMLView): + """ + Define a `Create` view that allows `creation` of an entity in the collection + """ + action = 'create' + http_methods = ['post'] + login_required = True + + def get_query(self): + """ Entity created in a collection """ + obj = self.collection.create(**self.get_query_kwargs(), _token=self.request.token) + return obj + + def get_m2m_kwargs(self, obj): + """ Acquires the keyword-arguments for the many-to-many relationship + + FaunaDB is only able to create a many-to-many relationship + by creating a collection that references both of the object. + So, when creating an entity, it is needed to create an entity to + make them related to each other. + + Args: + obj (dict, required): + + """ + data = self.request.get_json() + fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') + for k, v in fields.items(): + current_value = data.get(k) + col = v.get('foreign_class')() + client = col().client() + client.query( + q.create( + + ) + ) + + +class HTMLUpdateView(UpdateMixin, IDMixin, HTMLView): + """ + Define a view to allow `Update` operations + """ + action = 'update' + http_methods = ['put'] + login_required = True + + def get_query(self): + """ Entity updated in a collection """ + obj = self.collection.update(**self.get_query_kwargs(), _token=self.request.token) + return obj + + def get_m2m_kwargs(self, obj): + """ Acquires the keyword-arguments for the many-to-many relationship + + FaunaDB is only able to create a many-to-many relationship + by creating a collection that references both of the object. + So, when creating an entity, it is needed to create an entity to + make them related to each other. + + Args: + obj (dict, required): + + """ + data = self.request.get_json() + fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') + for k, v in fields.items(): + current_value = data.get(k) + col = v.get('foreign_class')() + client = col().client() + client.query( + q.create( + data={ + '_class': col.get_class_name(), + '_ref': obj['_ref'] + } + ) + ) + + +class HTMLDetailView(ObjectMixin, IDMixin, HTMLView): + """ Define a view to allow single entity operations """ + action = 'detail' + restrict_content_type = False + login_required = True + + def get_context(self): + """ Context for the view """ + context = super(HTMLDetailView, self).get_context() + context['object'] = self.get_query() + return context + + +class HTMLDeleteView(ObjectMixin, IDMixin, HTMLView): + """ Define a view to allow `Delete` entity operations """ + action = 'delete' + http_methods = ['delete'] + login_required = True + + def get_query(self): + """ Deleted an entity in the specified collection """ + return self.collection.delete_from_id(self.request.kwargs.get('id'), _token=self.request.token) + + def get_context(self): + """ Context for the view """ + context = super(HTMLDeleteView, self).get_context() + context['object'] = self.get_query() + return context + +class HTMLListView(QuerysetMixin, ActionMixin, HTMLView): + """ Define a view to allow `All/List` entity operations """ + restrict_content_type = False + action = 'list' + login_required = True + + def get_context(self): + """ Context for the view """ + context = super(HTMLListView, self).get_context() + context['object_list'] = self.get_query() + return context \ No newline at end of file diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index ac2e994..93b957a 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -1,7 +1,8 @@ from pfunk.client import q from pfunk.web.response import JSONResponse, JSONNotFoundResponse, JSONBadRequestResponse, \ JSONMethodNotAllowedResponse, JSONUnauthorizedResponse, JSONForbiddenResponse -from pfunk.web.views.base import ActionMixin, HTTPView, IDMixin, ObjectMixin, QuerysetMixin, UpdateMixin +from pfunk.web.views.base import HTTPView, ObjectMixin, QuerysetMixin, UpdateMixin, \ + JSONActionMixin, JSONIDMixin class JSONView(HTTPView): @@ -26,7 +27,7 @@ def get_response(self): ) -class CreateView(UpdateMixin, ActionMixin, JSONView): +class CreateView(UpdateMixin, JSONActionMixin, JSONView): """ Define a `Create` view that allows `creation` of an entity in the collection """ action = 'create' http_methods = ['post'] @@ -62,7 +63,7 @@ def get_m2m_kwargs(self, obj): ) -class UpdateView(UpdateMixin, IDMixin, JSONView): +class UpdateView(UpdateMixin, JSONIDMixin, JSONView): """ Define a view to allow `Update` operations """ action = 'update' http_methods = ['put'] @@ -76,14 +77,14 @@ def get_query(self): return obj -class DetailView(ObjectMixin, IDMixin, JSONView): +class DetailView(ObjectMixin, JSONIDMixin, JSONView): """ Define a view to allow single entity operations """ action = 'detail' restrict_content_type = False login_required = True -class DeleteView(ObjectMixin, IDMixin, JSONView): +class DeleteView(ObjectMixin, JSONIDMixin, JSONView): """ Define a view to allow `Delete` entity operations """ action = 'delete' http_methods = ['delete'] @@ -94,12 +95,8 @@ def get_query(self): return self.collection.delete_from_id(self.request.kwargs.get('id'), _token=self.request.token) -class ListView(QuerysetMixin, ActionMixin, JSONView): +class ListView(QuerysetMixin, JSONActionMixin, JSONView): """ Define a view to allow `All/List` entity operations """ restrict_content_type = False action = 'list' login_required = True - - -class GraphQLView(HTTPView): - pass diff --git a/poetry.lock b/poetry.lock index 8121a5c..333249c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -272,7 +272,7 @@ optional = false python-versions = ">=3.6,<4.0" [package.extras] -cli = ["click[cli] (>=8.0.3,<9.0.0)", "Jinja2[cli] (>=3.0.3,<4.0.0)", "terminaltables[cli] (>=3.1.10,<4.0.0)"] +cli = ["terminaltables[cli] (>=3.1.10,<4.0.0)", "Jinja2[cli] (>=3.0.3,<4.0.0)", "click[cli] (>=8.0.3,<9.0.0)"] [[package]] name = "executing" @@ -311,6 +311,18 @@ requests = "*" lint = ["pylint"] test = ["nose2", "nose2"] +[[package]] +name = "formy" +version = "1.3.1" +description = "Valley is a Python forms library that allows you to use Jinja2 templates to create and manage the HTML of your forms." +category = "main" +optional = false +python-versions = ">=3.8,<4.0" + +[package.dependencies] +Jinja2 = ">=3.1.2,<4.0.0" +valley = ">=1.5.8,<2.0.0" + [[package]] name = "future" version = "0.18.2" @@ -507,11 +519,11 @@ testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] name = "jinja2" -version = "3.0.1" +version = "3.1.2" description = "A very fast and expressive template engine." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] MarkupSafe = ">=2.0" @@ -672,8 +684,8 @@ nest-asyncio = "*" traitlets = ">=5.0.0" [package.extras] -sphinx = ["autodoc-traits", "mock", "moto", "myst-parser", "Sphinx (>=1.7)", "sphinx-book-theme"] -test = ["black", "check-manifest", "flake8", "ipykernel", "ipython (<8.0.0)", "ipywidgets (<8.0.0)", "mypy", "pip (>=18.1)", "pre-commit", "pytest (>=4.1)", "pytest-asyncio", "pytest-cov (>=2.6.1)", "setuptools (>=60.0)", "testpath", "twine (>=1.11.0)", "xmltodict"] +test = ["xmltodict", "twine (>=1.11.0)", "testpath", "setuptools (>=60.0)", "pytest-cov (>=2.6.1)", "pytest-asyncio", "pytest (>=4.1)", "pre-commit", "pip (>=18.1)", "mypy", "ipywidgets (<8.0.0)", "ipython (<8.0.0)", "ipykernel", "flake8", "check-manifest", "black"] +sphinx = ["sphinx-book-theme", "Sphinx (>=1.7)", "myst-parser", "moto", "mock", "autodoc-traits"] [[package]] name = "nbconvert" @@ -1036,7 +1048,7 @@ python-versions = ">=3.7" packaging = "*" [package.extras] -test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] +test = ["pytest-qt", "pytest-cov (>=3.0.0)", "pytest (>=6,!=7.0.0,!=7.0.1)"] [[package]] name = "requests" @@ -1126,7 +1138,7 @@ executing = "*" pure-eval = "*" [package.extras] -tests = ["pytest", "typeguard", "pygments", "littleutils", "cython"] +tests = ["cython", "littleutils", "pygments", "typeguard", "pytest"] [[package]] name = "stripe" @@ -1179,8 +1191,8 @@ python-versions = ">=3.6" webencodings = ">=0.4" [package.extras] -doc = ["sphinx", "sphinx-rtd-theme"] -test = ["pytest", "pytest-cov", "pytest-flake8", "pytest-isort", "coverage"] +test = ["coverage", "pytest-isort", "pytest-flake8", "pytest-cov", "pytest"] +doc = ["sphinx-rtd-theme", "sphinx"] [[package]] name = "tornado" @@ -1216,7 +1228,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "valley" -version = "1.5.6" +version = "1.5.8" description = "Python extensible schema validations and declarative syntax helpers." category = "main" optional = false @@ -1278,796 +1290,111 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "45c80cdba27ede0a7a28b611355294d4166ccfd7d4209b7fb6b75582d854b5a7" +content-hash = "324e8aabc4bf0157d9126fe65c85ec227eba1c16ced513b206ca6f02129494d9" [metadata.files] -appnope = [ - {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, - {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, -] -argon2-cffi = [ - {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, - {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, -] -argon2-cffi-bindings = [ - {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, - {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, -] -asttokens = [ - {file = "asttokens-2.0.5-py2.py3-none-any.whl", hash = "sha256:0844691e88552595a6f4a4281a9f7f79b8dd45ca4ccea82e5e05b4bbdb76705c"}, - {file = "asttokens-2.0.5.tar.gz", hash = "sha256:9a54c114f02c7a9480d56550932546a3f1fe71d8a02f1bc7ccd0ee3ee35cf4d5"}, -] -astunparse = [ - {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, - {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, -] -attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, -] -backcall = [ - {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, - {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, -] -beautifulsoup4 = [ - {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, - {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, -] -bleach = [ - {file = "bleach-4.1.0-py2.py3-none-any.whl", hash = "sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994"}, - {file = "bleach-4.1.0.tar.gz", hash = "sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da"}, -] -boto3 = [ - {file = "boto3-1.23.8-py3-none-any.whl", hash = "sha256:15733c2bbedce7a36fcf1749560c72c3ee90785aa6302a98658c7bffdcbe1f2a"}, - {file = "boto3-1.23.8.tar.gz", hash = "sha256:ea8ebcea4ccb70d1cf57526d9eec6012c76796f28ada3e9cc1d89178683d8107"}, -] -botocore = [ - {file = "botocore-1.26.8-py3-none-any.whl", hash = "sha256:620851daf1245af5bc28137aa821375bac964aa0eddc482437c783fe01e298fc"}, - {file = "botocore-1.26.8.tar.gz", hash = "sha256:e786722cb14de7319331cc55e9092174de66a768559700ef656d05ff41b3e24f"}, -] -cachetools = [ - {file = "cachetools-4.2.4-py3-none-any.whl", hash = "sha256:92971d3cb7d2a97efff7c7bb1657f21a8f5fb309a37530537c71b1774189f2d1"}, - {file = "cachetools-4.2.4.tar.gz", hash = "sha256:89ea6f1b638d5a73a4f9226be57ac5e4f399d22770b92355f92dcb0f7f001693"}, -] -certifi = [ - {file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"}, - {file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"}, -] -cffi = [ - {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, - {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, - {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, - {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, - {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, - {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, - {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, - {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, - {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, - {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, - {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, - {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, - {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, - {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, - {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, - {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, - {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, - {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, - {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, - {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, - {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, - {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, - {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, - {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, - {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, -] -charset-normalizer = [ - {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, - {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, -] -click = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, -] -colorama = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, -] -coverage = [ - {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, - {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, - {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, - {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, - {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, - {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, - {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, - {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, - {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, - {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, - {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, - {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, - {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, - {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, - {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, - {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, - {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, - {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, - {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, - {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, - {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, - {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, - {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, - {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, - {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, - {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, - {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, - {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, - {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, - {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, - {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, - {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, - {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, - {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, - {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, - {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, - {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, - {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, - {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, - {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, - {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, - {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, - {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, - {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, - {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, - {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, - {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, - {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, -] -cryptography = [ - {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, - {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c4129fc3fdc0fa8e40861b5ac0c673315b3c902bbdc05fc176764815b43dd1d"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:695104a9223a7239d155d7627ad912953b540929ef97ae0c34c7b8bf30857e89"}, - {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"}, - {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"}, - {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, -] -debugpy = [ - {file = "debugpy-1.6.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:eb1946efac0c0c3d411cea0b5ac772fbde744109fd9520fb0c5a51979faf05ad"}, - {file = "debugpy-1.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e3513399177dd37af4c1332df52da5da1d0c387e5927dc4c0709e26ee7302e8f"}, - {file = "debugpy-1.6.0-cp310-cp310-win32.whl", hash = "sha256:5c492235d6b68f879df3bdbdb01f25c15be15682665517c2c7d0420e5658d71f"}, - {file = "debugpy-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:40de9ba137d355538432209d05e0f5fe5d0498dce761c39119ad4b950b51db31"}, - {file = "debugpy-1.6.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:0d383b91efee57dbb923ba20801130cf60450a0eda60bce25bccd937de8e323a"}, - {file = "debugpy-1.6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1ff853e60e77e1c16f85a31adb8360bb2d98ca588d7ed645b7f0985b240bdb5e"}, - {file = "debugpy-1.6.0-cp37-cp37m-win32.whl", hash = "sha256:8e972c717d95f56b6a3a7a29a5ede1ee8f2c3802f6f0e678203b0778eb322bf1"}, - {file = "debugpy-1.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a8aaeb53e87225141fda7b9081bd87155c1debc13e2f5a532d341112d1983b65"}, - {file = "debugpy-1.6.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:132defb585b518955358321d0f42f6aa815aa15b432be27db654807707c70b2f"}, - {file = "debugpy-1.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ee75844242b4537beb5899f3e60a578454d1f136b99e8d57ac424573797b94a"}, - {file = "debugpy-1.6.0-cp38-cp38-win32.whl", hash = "sha256:a65a2499761d47df3e9ea9567109be6e73d412e00ac3ffcf74839f3ddfcdf028"}, - {file = "debugpy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:bd980d533d0ddfc451e03a3bb32acb2900049fec39afc3425b944ebf0889be62"}, - {file = "debugpy-1.6.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:245c7789a012f86210847ec7ee9f38c30a30d4c2223c3e111829a76c9006a5d0"}, - {file = "debugpy-1.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e3aa2368883e83e7b689ddff3cafb595f7b711f6a065886b46a96a7fef874e7"}, - {file = "debugpy-1.6.0-cp39-cp39-win32.whl", hash = "sha256:72bcfa97f3afa0064afc77ab811f48ad4a06ac330f290b675082c24437730366"}, - {file = "debugpy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:30abefefd2ff5a5481162d613cb70e60e2fa80a5eb4c994717c0f008ed25d2e1"}, - {file = "debugpy-1.6.0-py2.py3-none-any.whl", hash = "sha256:4de7777842da7e08652f2776c552070bbdd758557fdec73a15d7be0e4aab95ce"}, - {file = "debugpy-1.6.0.zip", hash = "sha256:7b79c40852991f7b6c3ea65845ed0f5f6b731c37f4f9ad9c61e2ab4bd48a9275"}, -] -decorator = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] -defusedxml = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] -entrypoints = [ - {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, - {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, -] -envs = [ - {file = "envs-1.4-py3-none-any.whl", hash = "sha256:4a1fcf85e4d4443e77c348ff7cdd3bfc4c0178b181d447057de342e4172e5ed1"}, - {file = "envs-1.4.tar.gz", hash = "sha256:9d8435c6985d1cdd68299e04c58e2bdb8ae6cf66b2596a8079e6f9a93f2a0398"}, -] -executing = [ - {file = "executing-0.8.3-py2.py3-none-any.whl", hash = "sha256:d1eef132db1b83649a3905ca6dd8897f71ac6f8cac79a7e58a1a09cf137546c9"}, - {file = "executing-0.8.3.tar.gz", hash = "sha256:c6554e21c6b060590a6d3be4b82fb78f8f0194d809de5ea7df1c093763311501"}, -] -fastjsonschema = [ - {file = "fastjsonschema-2.15.3-py3-none-any.whl", hash = "sha256:ddb0b1d8243e6e3abb822bd14e447a89f4ab7439342912d590444831fa00b6a0"}, - {file = "fastjsonschema-2.15.3.tar.gz", hash = "sha256:0a572f0836962d844c1fc435e200b2e4f4677e4e6611a2e3bdd01ba697c275ec"}, -] -faunadb = [ - {file = "faunadb-4.2.0-py2.py3-none-any.whl", hash = "sha256:73d5f560bddb7fc45f9201d526f97fcd0a7b0ef74ce3d4d46b8e116f4ce6e219"}, -] -future = [ - {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, -] -graphql-py = [ - {file = "graphql-py-0.8.1.tar.gz", hash = "sha256:a03557d67817a0f23c5bef83ce8791c8fa3f5f9d165e1408abc6393def1d720c"}, -] -h2 = [ - {file = "h2-2.6.2-py2.py3-none-any.whl", hash = "sha256:93cbd1013a2218539af05cdf9fc37b786655b93bbc94f5296b7dabd1c5cadf41"}, - {file = "h2-2.6.2.tar.gz", hash = "sha256:af35878673c83a44afbc12b13ac91a489da2819b5dc1e11768f3c2406f740fe9"}, -] -hpack = [ - {file = "hpack-3.0.0-py2.py3-none-any.whl", hash = "sha256:0edd79eda27a53ba5be2dfabf3b15780928a0dff6eb0c60a3d6767720e970c89"}, - {file = "hpack-3.0.0.tar.gz", hash = "sha256:8eec9c1f4bfae3408a3f30500261f7e6a65912dc138526ea054f9ad98892e9d2"}, -] -hyper = [ - {file = "hyper-0.7.0-py2.py3-none-any.whl", hash = "sha256:069514f54231fb7b5df2fb910a114663a83306d5296f588fffcb0a9be19407fc"}, - {file = "hyper-0.7.0.tar.gz", hash = "sha256:12c82eacd122a659673484c1ea0d34576430afbe5aa6b8f63fe37fcb06a2458c"}, -] -hyperframe = [ - {file = "hyperframe-3.2.0-py2.py3-none-any.whl", hash = "sha256:4dcab11967482d400853b396d042038e4c492a15a5d2f57259e2b5f89a32f755"}, - {file = "hyperframe-3.2.0.tar.gz", hash = "sha256:05f0e063e117c16fcdd13c12c93a4424a2c40668abfac3bb419a10f57698204e"}, -] -idna = [ - {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, - {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, -] -importlib-resources = [ - {file = "importlib_resources-5.7.1-py3-none-any.whl", hash = "sha256:e447dc01619b1e951286f3929be820029d48c75eb25d265c28b92a16548212b8"}, - {file = "importlib_resources-5.7.1.tar.gz", hash = "sha256:b6062987dfc51f0fcb809187cffbd60f35df7acb4589091f154214af6d0d49d3"}, -] -ipykernel = [ - {file = "ipykernel-6.13.0-py3-none-any.whl", hash = "sha256:2b0987af43c0d4b62cecb13c592755f599f96f29aafe36c01731aaa96df30d39"}, - {file = "ipykernel-6.13.0.tar.gz", hash = "sha256:0e28273e290858393e86e152b104e5506a79c13d25b951ac6eca220051b4be60"}, -] -ipython = [ - {file = "ipython-8.3.0-py3-none-any.whl", hash = "sha256:341456643a764c28f670409bbd5d2518f9b82c013441084ff2c2fc999698f83b"}, - {file = "ipython-8.3.0.tar.gz", hash = "sha256:807ae3cf43b84693c9272f70368440a9a7eaa2e7e6882dad943c32fbf7e51402"}, -] -ipython-genutils = [ - {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, - {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, -] -ipywidgets = [ - {file = "ipywidgets-7.7.0-py2.py3-none-any.whl", hash = "sha256:e58ff58bc94d481e91ecb6e13a5cb96a87b6b8ade135e055603d0ca24593df38"}, - {file = "ipywidgets-7.7.0.tar.gz", hash = "sha256:ab4a5596855a88b83761921c768707d65e5847068139bc1729ddfe834703542a"}, -] -iso8601 = [ - {file = "iso8601-1.0.2-py3-none-any.whl", hash = "sha256:d7bc01b1c2a43b259570bb307f057abc578786ea734ba2b87b836c5efc5bd443"}, - {file = "iso8601-1.0.2.tar.gz", hash = "sha256:27f503220e6845d9db954fb212b95b0362d8b7e6c1b2326a87061c3de93594b1"}, -] -jedi = [ - {file = "jedi-0.18.1-py2.py3-none-any.whl", hash = "sha256:637c9635fcf47945ceb91cd7f320234a7be540ded6f3e99a50cb6febdfd1ba8d"}, - {file = "jedi-0.18.1.tar.gz", hash = "sha256:74137626a64a99c8eb6ae5832d99b3bdd7d29a3850fe2aa80a4126b2a7d949ab"}, -] -jinja2 = [ - {file = "Jinja2-3.0.1-py3-none-any.whl", hash = "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4"}, - {file = "Jinja2-3.0.1.tar.gz", hash = "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4"}, -] -jmespath = [ - {file = "jmespath-1.0.0-py3-none-any.whl", hash = "sha256:e8dcd576ed616f14ec02eed0005c85973b5890083313860136657e24784e4c04"}, - {file = "jmespath-1.0.0.tar.gz", hash = "sha256:a490e280edd1f57d6de88636992d05b71e97d69a26a19f058ecf7d304474bf5e"}, -] -jsonschema = [ - {file = "jsonschema-4.5.1-py3-none-any.whl", hash = "sha256:71b5e39324422543546572954ce71c67728922c104902cb7ce252e522235b33f"}, - {file = "jsonschema-4.5.1.tar.gz", hash = "sha256:7c6d882619340c3347a1bf7315e147e6d3dae439033ae6383d6acb908c101dfc"}, -] -jupyter = [ - {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, - {file = "jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"}, - {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, -] -jupyter-client = [ - {file = "jupyter_client-7.3.1-py3-none-any.whl", hash = "sha256:404abe552540aff3527e66e16beb114b6b4ff58479d51a301f4eb9701e4f52ef"}, - {file = "jupyter_client-7.3.1.tar.gz", hash = "sha256:05d4ff6a0ade25138c6bb0fbeac7ddc26b5fe835e7dd816b64b4a45b931bdc0b"}, -] -jupyter-console = [ - {file = "jupyter_console-6.4.3-py3-none-any.whl", hash = "sha256:e630bcb682c0088dda45688ad7c2424d4a825c8acf494cb036ced03ed0424841"}, - {file = "jupyter_console-6.4.3.tar.gz", hash = "sha256:55f32626b0be647a85e3217ddcdb22db69efc79e8b403b9771eb9ecc696019b5"}, -] -jupyter-core = [ - {file = "jupyter_core-4.10.0-py3-none-any.whl", hash = "sha256:e7f5212177af7ab34179690140f188aa9bf3d322d8155ed972cbded19f55b6f3"}, - {file = "jupyter_core-4.10.0.tar.gz", hash = "sha256:a6de44b16b7b31d7271130c71a6792c4040f077011961138afed5e5e73181aec"}, -] -jupyterlab-pygments = [ - {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, - {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, -] -jupyterlab-widgets = [ - {file = "jupyterlab_widgets-1.1.0-py3-none-any.whl", hash = "sha256:c2a9bd3789f120f64d73268c066ed3b000c56bc1dda217be5cdc43e7b4ebad3f"}, - {file = "jupyterlab_widgets-1.1.0.tar.gz", hash = "sha256:d5f41bc1713795385f718d44dcba47e1e1473c6289f28a95aa6b2c0782ee372a"}, -] -markupsafe = [ - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, - {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, -] -matplotlib-inline = [ - {file = "matplotlib-inline-0.1.3.tar.gz", hash = "sha256:a04bfba22e0d1395479f866853ec1ee28eea1485c1d69a6faf00dc3e24ff34ee"}, - {file = "matplotlib_inline-0.1.3-py3-none-any.whl", hash = "sha256:aed605ba3b72462d64d475a21a9296f400a19c4f74a31b59103d2a99ffd5aa5c"}, -] -mistune = [ - {file = "mistune-0.8.4-py2.py3-none-any.whl", hash = "sha256:88a1051873018da288eee8538d476dffe1262495144b33ecb586c4ab266bb8d4"}, - {file = "mistune-0.8.4.tar.gz", hash = "sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e"}, -] -nbclient = [ - {file = "nbclient-0.6.3-py3-none-any.whl", hash = "sha256:2747ac9b385720d8a6c34f2f71e72cbe64aec6cadaadcc064a4df0b0e99c5874"}, - {file = "nbclient-0.6.3.tar.gz", hash = "sha256:b80726fc1fb89a0e8f8be1e77e28d0026b1e8ed90bc143c8a0c7622e4f8cdd9e"}, -] -nbconvert = [ - {file = "nbconvert-6.5.0-py3-none-any.whl", hash = "sha256:c56dd0b8978a1811a5654f74c727ff16ca87dd5a43abd435a1c49b840fcd8360"}, - {file = "nbconvert-6.5.0.tar.gz", hash = "sha256:223e46e27abe8596b8aed54301fadbba433b7ffea8196a68fd7b1ff509eee99d"}, -] -nbformat = [ - {file = "nbformat-5.4.0-py3-none-any.whl", hash = "sha256:0d6072aaec95dddc39735c144ee8bbc6589c383fb462e4058abc855348152dad"}, - {file = "nbformat-5.4.0.tar.gz", hash = "sha256:44ba5ca6acb80c5d5a500f1e5b83ede8cbe364d5a495c4c8cf60aaf1ba656501"}, -] -nest-asyncio = [ - {file = "nest_asyncio-1.5.5-py3-none-any.whl", hash = "sha256:b98e3ec1b246135e4642eceffa5a6c23a3ab12c82ff816a92c612d68205813b2"}, - {file = "nest_asyncio-1.5.5.tar.gz", hash = "sha256:e442291cd942698be619823a17a86a5759eabe1f8613084790de189fe9e16d65"}, -] -notebook = [ - {file = "notebook-6.4.11-py3-none-any.whl", hash = "sha256:b4a6baf2eba21ce67a0ca11a793d1781b06b8078f34d06c710742e55f3eee505"}, - {file = "notebook-6.4.11.tar.gz", hash = "sha256:709b1856a564fe53054796c80e17a67262071c86bfbdfa6b96aaa346113c555a"}, -] -packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, -] -pandocfilters = [ - {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, - {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, -] -parso = [ - {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, - {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, -] -pdoc = [ - {file = "pdoc-7.4.0-py3-none-any.whl", hash = "sha256:681a2f243e4ca51bedd0645c2d18275b8b83444e9b6e42b502882ec45369e679"}, -] -pexpect = [ - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, - {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, -] -pickleshare = [ - {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, - {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, -] -ply = [ - {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"}, - {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, -] -prometheus-client = [ - {file = "prometheus_client-0.14.1-py3-none-any.whl", hash = "sha256:522fded625282822a89e2773452f42df14b5a8e84a86433e3f8a189c1d54dc01"}, - {file = "prometheus_client-0.14.1.tar.gz", hash = "sha256:5459c427624961076277fdc6dc50540e2bacb98eebde99886e59ec55ed92093a"}, -] -prompt-toolkit = [ - {file = "prompt_toolkit-3.0.29-py3-none-any.whl", hash = "sha256:62291dad495e665fca0bda814e342c69952086afb0f4094d0893d357e5c78752"}, - {file = "prompt_toolkit-3.0.29.tar.gz", hash = "sha256:bd640f60e8cecd74f0dc249713d433ace2ddc62b65ee07f96d358e0b152b6ea7"}, -] -psutil = [ - {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:799759d809c31aab5fe4579e50addf84565e71c1dc9f1c31258f159ff70d3f87"}, - {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9272167b5f5fbfe16945be3db475b3ce8d792386907e673a209da686176552af"}, - {file = "psutil-5.9.1-cp27-cp27m-win32.whl", hash = "sha256:0904727e0b0a038830b019551cf3204dd48ef5c6868adc776e06e93d615fc5fc"}, - {file = "psutil-5.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e7e10454cb1ab62cc6ce776e1c135a64045a11ec4c6d254d3f7689c16eb3efd2"}, - {file = "psutil-5.9.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:56960b9e8edcca1456f8c86a196f0c3d8e3e361320071c93378d41445ffd28b0"}, - {file = "psutil-5.9.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:44d1826150d49ffd62035785a9e2c56afcea66e55b43b8b630d7706276e87f22"}, - {file = "psutil-5.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7be9d7f5b0d206f0bbc3794b8e16fb7dbc53ec9e40bbe8787c6f2d38efcf6c9"}, - {file = "psutil-5.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd9246e4cdd5b554a2ddd97c157e292ac11ef3e7af25ac56b08b455c829dca8"}, - {file = "psutil-5.9.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29a442e25fab1f4d05e2655bb1b8ab6887981838d22effa2396d584b740194de"}, - {file = "psutil-5.9.1-cp310-cp310-win32.whl", hash = "sha256:20b27771b077dcaa0de1de3ad52d22538fe101f9946d6dc7869e6f694f079329"}, - {file = "psutil-5.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:58678bbadae12e0db55186dc58f2888839228ac9f41cc7848853539b70490021"}, - {file = "psutil-5.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3a76ad658641172d9c6e593de6fe248ddde825b5866464c3b2ee26c35da9d237"}, - {file = "psutil-5.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6a11e48cb93a5fa606306493f439b4aa7c56cb03fc9ace7f6bfa21aaf07c453"}, - {file = "psutil-5.9.1-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:068935df39055bf27a29824b95c801c7a5130f118b806eee663cad28dca97685"}, - {file = "psutil-5.9.1-cp36-cp36m-win32.whl", hash = "sha256:0f15a19a05f39a09327345bc279c1ba4a8cfb0172cc0d3c7f7d16c813b2e7d36"}, - {file = "psutil-5.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:db417f0865f90bdc07fa30e1aadc69b6f4cad7f86324b02aa842034efe8d8c4d"}, - {file = "psutil-5.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:91c7ff2a40c373d0cc9121d54bc5f31c4fa09c346528e6a08d1845bce5771ffc"}, - {file = "psutil-5.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fea896b54f3a4ae6f790ac1d017101252c93f6fe075d0e7571543510f11d2676"}, - {file = "psutil-5.9.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3054e923204b8e9c23a55b23b6df73a8089ae1d075cb0bf711d3e9da1724ded4"}, - {file = "psutil-5.9.1-cp37-cp37m-win32.whl", hash = "sha256:d2d006286fbcb60f0b391741f520862e9b69f4019b4d738a2a45728c7e952f1b"}, - {file = "psutil-5.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b14ee12da9338f5e5b3a3ef7ca58b3cba30f5b66f7662159762932e6d0b8f680"}, - {file = "psutil-5.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:19f36c16012ba9cfc742604df189f2f28d2720e23ff7d1e81602dbe066be9fd1"}, - {file = "psutil-5.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:944c4b4b82dc4a1b805329c980f270f170fdc9945464223f2ec8e57563139cf4"}, - {file = "psutil-5.9.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b6750a73a9c4a4e689490ccb862d53c7b976a2a35c4e1846d049dcc3f17d83b"}, - {file = "psutil-5.9.1-cp38-cp38-win32.whl", hash = "sha256:a8746bfe4e8f659528c5c7e9af5090c5a7d252f32b2e859c584ef7d8efb1e689"}, - {file = "psutil-5.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:79c9108d9aa7fa6fba6e668b61b82facc067a6b81517cab34d07a84aa89f3df0"}, - {file = "psutil-5.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:28976df6c64ddd6320d281128817f32c29b539a52bdae5e192537bc338a9ec81"}, - {file = "psutil-5.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b88f75005586131276634027f4219d06e0561292be8bd6bc7f2f00bdabd63c4e"}, - {file = "psutil-5.9.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:645bd4f7bb5b8633803e0b6746ff1628724668681a434482546887d22c7a9537"}, - {file = "psutil-5.9.1-cp39-cp39-win32.whl", hash = "sha256:32c52611756096ae91f5d1499fe6c53b86f4a9ada147ee42db4991ba1520e574"}, - {file = "psutil-5.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:f65f9a46d984b8cd9b3750c2bdb419b2996895b005aefa6cbaba9a143b1ce2c5"}, - {file = "psutil-5.9.1.tar.gz", hash = "sha256:57f1819b5d9e95cdfb0c881a8a5b7d542ed0b7c522d575706a80bedc848c8954"}, -] -ptyprocess = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] -pure-eval = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, -] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] -pycparser = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] -pygments = [ - {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, - {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, -] -pyjwt = [ - {file = "PyJWT-2.4.0-py3-none-any.whl", hash = "sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf"}, - {file = "PyJWT-2.4.0.tar.gz", hash = "sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba"}, -] -pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] -pyrsistent = [ - {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, - {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, - {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, - {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, -] -python-dateutil = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] -pytz = [ - {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, - {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, -] -pywin32 = [ - {file = "pywin32-304-cp310-cp310-win32.whl", hash = "sha256:3c7bacf5e24298c86314f03fa20e16558a4e4138fc34615d7de4070c23e65af3"}, - {file = "pywin32-304-cp310-cp310-win_amd64.whl", hash = "sha256:4f32145913a2447736dad62495199a8e280a77a0ca662daa2332acf849f0be48"}, - {file = "pywin32-304-cp310-cp310-win_arm64.whl", hash = "sha256:d3ee45adff48e0551d1aa60d2ec066fec006083b791f5c3527c40cd8aefac71f"}, - {file = "pywin32-304-cp311-cp311-win32.whl", hash = "sha256:30c53d6ce44c12a316a06c153ea74152d3b1342610f1b99d40ba2795e5af0269"}, - {file = "pywin32-304-cp311-cp311-win_amd64.whl", hash = "sha256:7ffa0c0fa4ae4077e8b8aa73800540ef8c24530057768c3ac57c609f99a14fd4"}, - {file = "pywin32-304-cp311-cp311-win_arm64.whl", hash = "sha256:cbbe34dad39bdbaa2889a424d28752f1b4971939b14b1bb48cbf0182a3bcfc43"}, - {file = "pywin32-304-cp36-cp36m-win32.whl", hash = "sha256:be253e7b14bc601718f014d2832e4c18a5b023cbe72db826da63df76b77507a1"}, - {file = "pywin32-304-cp36-cp36m-win_amd64.whl", hash = "sha256:de9827c23321dcf43d2f288f09f3b6d772fee11e809015bdae9e69fe13213988"}, - {file = "pywin32-304-cp37-cp37m-win32.whl", hash = "sha256:f64c0377cf01b61bd5e76c25e1480ca8ab3b73f0c4add50538d332afdf8f69c5"}, - {file = "pywin32-304-cp37-cp37m-win_amd64.whl", hash = "sha256:bb2ea2aa81e96eee6a6b79d87e1d1648d3f8b87f9a64499e0b92b30d141e76df"}, - {file = "pywin32-304-cp38-cp38-win32.whl", hash = "sha256:94037b5259701988954931333aafd39cf897e990852115656b014ce72e052e96"}, - {file = "pywin32-304-cp38-cp38-win_amd64.whl", hash = "sha256:ead865a2e179b30fb717831f73cf4373401fc62fbc3455a0889a7ddac848f83e"}, - {file = "pywin32-304-cp39-cp39-win32.whl", hash = "sha256:25746d841201fd9f96b648a248f731c1dec851c9a08b8e33da8b56148e4c65cc"}, - {file = "pywin32-304-cp39-cp39-win_amd64.whl", hash = "sha256:d24a3382f013b21aa24a5cfbfad5a2cd9926610c0affde3e8ab5b3d7dbcf4ac9"}, -] -pywinpty = [ - {file = "pywinpty-2.0.5-cp310-none-win_amd64.whl", hash = "sha256:f86c76e2881c37e69678cbbf178109f8da1fa8584db24d58e1b9369b0276cfcb"}, - {file = "pywinpty-2.0.5-cp37-none-win_amd64.whl", hash = "sha256:ff9b52f182650cfdf3db1b264a6fe0963eb9d996a7a1fa843ac406c1e32111f8"}, - {file = "pywinpty-2.0.5-cp38-none-win_amd64.whl", hash = "sha256:651ee1467bd7eb6f64d44dbc954b7ab7d15ab6d8adacc4e13299692c67c5d5d2"}, - {file = "pywinpty-2.0.5-cp39-none-win_amd64.whl", hash = "sha256:e59a508ae78374febada3e53b5bbc90b5ad07ae68cbfd72a2e965f9793ae04f3"}, - {file = "pywinpty-2.0.5.tar.gz", hash = "sha256:e125d3f1804d8804952b13e33604ad2ca8b9b2cac92b27b521c005d1604794f8"}, -] -pyyaml = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, -] -pyzmq = [ - {file = "pyzmq-23.0.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:176be6c348dbec04e8e0d41e810743b7084b73e50954a6fedeeafc65d7fa9290"}, - {file = "pyzmq-23.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ef2d1476cea927ba33a29f59aa128ce3b174e81083cbd091dd3149af741c85d"}, - {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2394bb857607494c3750b5040f852a1ad7831d7a7907b6106f0af2c70860cef"}, - {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fe8807d67456e7cf0e9a33b85e0d05bb9d2977dbdb23977e4cc2b843633618fd"}, - {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be3425dfdb9c46dc62d490fc1a6142a5f3dc6605ebb9048ae675056ef621413c"}, - {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cda55ff0a7566405fb29ca38db1829fecb4c041b8dc3f91754f337bb7b27cbd8"}, - {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2e4d70d34112997a32c8193fae2579aec854745f8730031e5d84cc579fd98ff"}, - {file = "pyzmq-23.0.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f3daabbe42ca31712e29d906dfa4bf1890341d2fd5178de118bc9977a8d2b23b"}, - {file = "pyzmq-23.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e7ae3e520bd182a0cbfff3cc69dda3a2c26f69847e81bd3f090ed04471fc1282"}, - {file = "pyzmq-23.0.0-cp310-cp310-win32.whl", hash = "sha256:1d480d48253f61ff90115b8069ed32f51a0907eb19101c4a5ae0b9a5973e40ad"}, - {file = "pyzmq-23.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:7eca5902ff41575d9a26f91fc750018b7eb129600ea600fe69ce852fbdfab4e2"}, - {file = "pyzmq-23.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b2a4af5e6fa85ee1743c725b46579f8de0b97024eb5ae1a0b5c5711adc436665"}, - {file = "pyzmq-23.0.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:591b455546d34bb96aa453dd9666bddb8c81314e23dbf2606f9614acf7e73d9f"}, - {file = "pyzmq-23.0.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdd008629293a0d4f00b516841ac0df89f17a64bc2d83bcfa48212d3f3b3ca1a"}, - {file = "pyzmq-23.0.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:df0b05fa4321b090abe5601dea9b1c8933c06f496588ccb397a0b1f9dfe32ebe"}, - {file = "pyzmq-23.0.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:12a53f5c13edf12547ce495afebdd5ab11c1b67ea078a941b21e13161783741a"}, - {file = "pyzmq-23.0.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:cb45b7ea577283b547b907a3389d62ca2eaddaf725fbb79cd360802440fa9c91"}, - {file = "pyzmq-23.0.0-cp36-cp36m-win32.whl", hash = "sha256:0a787f7870cba38d655c68ea7ae14bb6c3e9e19bb618d0c2412513321eeaeb80"}, - {file = "pyzmq-23.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:536491ad640448f14d8aa2dc497c354a348f216eb23513bf5aa0ac40e2b02577"}, - {file = "pyzmq-23.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5eaf7e0841d3d8d1d92838c8b56f98cb9bf35b14bcbe4efa281e4812ef4be728"}, - {file = "pyzmq-23.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21792f4d0fcc5040978ee211c033e915d8b6608ea8a5b33fe197a04f0d43e991"}, - {file = "pyzmq-23.0.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a37f0ec88e220326803084208d80229218b309d728954ab747ab21cca33424aa"}, - {file = "pyzmq-23.0.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9622d9560a6fd8d589816cdcec6946642cb4e070b3f68be1d3779b52cf240f73"}, - {file = "pyzmq-23.0.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:434044eec7f9df08fc5ca5c9bdd1a4bb08663679d43ebe7b9849713956f4d85f"}, - {file = "pyzmq-23.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12eac2294d48ee27d1eaef7e214acedb394e4c95e3a1f6e4467099b82d58ef73"}, - {file = "pyzmq-23.0.0-cp37-cp37m-win32.whl", hash = "sha256:07d2008e51718fba60641e5d1a0646b222b7929f16f6e7cf0834b8439f42c9e8"}, - {file = "pyzmq-23.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b8528aefceb787f41ad429f3210a3c6b52e99f85413416e3d0c9e6d035f8ac"}, - {file = "pyzmq-23.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3f3807e81bf51d4c63eb12a21920614e0e840645418e9f2e3b5ffdd5991b3415"}, - {file = "pyzmq-23.0.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:011a45c846ec69a3671ed15893b74b6ad608800c89ac6d0f0411e2137c6b313d"}, - {file = "pyzmq-23.0.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b97dc1273f16f85a38cff6668a07b636ef14e30591039efbfd21f5f91efae964"}, - {file = "pyzmq-23.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8951830d6a00636b3af478091f9668ecc486f1dad01b975527957fd1d8c31bfd"}, - {file = "pyzmq-23.0.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5619f6598d6fd30778053ae2daa48a7c54029816648b908270b751411fd52e74"}, - {file = "pyzmq-23.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0a89b9860d2171bcf674648dc8186db9cf3b773ad3c0610a2c7bf189cf3560b6"}, - {file = "pyzmq-23.0.0-cp38-cp38-win32.whl", hash = "sha256:0258563bf69f6ca305204354f171e0627a9bf8fe78c9d4f63a5e2447035cbb4b"}, - {file = "pyzmq-23.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:9feb7ccd426ff2158ce79f4c87a8a1600ed4f77e65e2fffda2b42638b2bc73e4"}, - {file = "pyzmq-23.0.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:e9631c6a339843e4f95efb80ff9a1bfaaf3d611ba9677a7a5cc61ffb923b4e06"}, - {file = "pyzmq-23.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:34b143751e9b2b89cf9b656081f1b2842a563c4c9ffc8465531875daf546e772"}, - {file = "pyzmq-23.0.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2f227150148e7c3db7ecd8a58500439979f556e15455841a30b6d121755b14bc"}, - {file = "pyzmq-23.0.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277b3ebc684b369a57a186a9acf629c1b01247eb04d1105536ef2dae5f61168a"}, - {file = "pyzmq-23.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e2093a97bf3f6008a4be6b5bae8ae3fc409f18373593bef19dd7b381ab8030c"}, - {file = "pyzmq-23.0.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6c09e6e5c4baf0959287943dc8170624d739ae555d334e896a94d9de01c7bb21"}, - {file = "pyzmq-23.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c234aefeef034c5d6de452e2af5173a95ea06315b685db703091e6f937a6e60"}, - {file = "pyzmq-23.0.0-cp39-cp39-win32.whl", hash = "sha256:7b518ad9cdbaaeb1a9da3444797698871ae2eeae34ff9a656d5150d37e1e42a1"}, - {file = "pyzmq-23.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:011f26841dd56ed87e464c98023dbbd4c0b3ab8802a045de3ea83e0187eb8145"}, - {file = "pyzmq-23.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a89285fedbeca483a855a77285453e21e4fc86ef0944bc018ef4b3033aa04ad2"}, - {file = "pyzmq-23.0.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5a13171268f05d127e31b4c369b753733f67dbb0d765901ef625a115feb5c7de"}, - {file = "pyzmq-23.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cd3f563b98e2a8730c93bdc550f119ae766b2d3da1f0d6a3c7735b59adfa1642"}, - {file = "pyzmq-23.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e730d490b1421e52b43b1b9f5e1f8c3973499206e188f29b582577531e11033b"}, - {file = "pyzmq-23.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0de8a7e13ffacfe33c89acc0d7bfa2f5bde94e3f74b7f1e4d43c97ce17864d77"}, - {file = "pyzmq-23.0.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a64b9cce166396df5f33894074d6515778d48c63aae5ee1abd86d8bbc5a711d8"}, - {file = "pyzmq-23.0.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e464e7b1be2216eba54b47256c15bf307ae4a656aa0f73becea7b3e7283c5ac2"}, - {file = "pyzmq-23.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3fa7126d532effee452c0ab395ab3cbef1c06fd6870ab7e681f812ba9e685cfa"}, - {file = "pyzmq-23.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9273f6d1da1018822f41630fb0f3fe208e8e70e5d5e780795326900cfa22d8b6"}, - {file = "pyzmq-23.0.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca7d77f24644298cbe53bc279eb7ca05f3b8637473d392f0c9f34b37f08b49a"}, - {file = "pyzmq-23.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8f40604437ec8010f77f7053fd135ccb202d6ca18329903831087cab8dbdab1"}, - {file = "pyzmq-23.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4d861ae20040afc17adef33053c328667da78d4d3676b2936788fd031665e3a8"}, - {file = "pyzmq-23.0.0.tar.gz", hash = "sha256:a45f5c0477d12df05ef2e2922b49b7c0ae9d0f4ff9b6bb0d666558df0ef37122"}, -] -qtconsole = [ - {file = "qtconsole-5.3.0-py3-none-any.whl", hash = "sha256:75f2ded876444454edcb5a53262149e33b53db3a4a53116b7c3df52830905b0f"}, - {file = "qtconsole-5.3.0.tar.gz", hash = "sha256:8e3520fdc75e46abc4cc6cffeca16fa2652754109b8ae839fa28e27d1eba5625"}, -] -qtpy = [ - {file = "QtPy-2.1.0-py3-none-any.whl", hash = "sha256:aee0586081f943029312becece9f63977b0a9e3788f77a6ac8cc74802bb173d6"}, - {file = "QtPy-2.1.0.tar.gz", hash = "sha256:ca8cd4217175186344299ee4c0f7e7adcf362c70852ba35b255a534077025c06"}, -] -requests = [ - {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, - {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, -] -s3transfer = [ - {file = "s3transfer-0.5.2-py3-none-any.whl", hash = "sha256:7a6f4c4d1fdb9a2b640244008e142cbc2cd3ae34b386584ef044dd0f27101971"}, - {file = "s3transfer-0.5.2.tar.gz", hash = "sha256:95c58c194ce657a5f4fb0b9e60a84968c808888aed628cd98ab8771fe1db98ed"}, -] -sammy = [ - {file = "sammy-0.4.3-py2.py3-none-any.whl", hash = "sha256:f7fc12fae537ba22830a9de761686fac439e23c67339807f2f3d026d019bae9e"}, - {file = "sammy-0.4.3.tar.gz", hash = "sha256:dff98017d59c8860349cb07bd8af2eb843ccfc7a78d11ed82599b306ef4ce7ca"}, -] -send2trash = [ - {file = "Send2Trash-1.8.0-py3-none-any.whl", hash = "sha256:f20eaadfdb517eaca5ce077640cb261c7d2698385a6a0f072a4a5447fd49fa08"}, - {file = "Send2Trash-1.8.0.tar.gz", hash = "sha256:d2c24762fd3759860a0aff155e45871447ea58d2be6bdd39b5c8f966a0c99c2d"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -soupsieve = [ - {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, - {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, -] -stack-data = [ - {file = "stack_data-0.2.0-py3-none-any.whl", hash = "sha256:999762f9c3132308789affa03e9271bbbe947bf78311851f4d485d8402ed858e"}, - {file = "stack_data-0.2.0.tar.gz", hash = "sha256:45692d41bd633a9503a5195552df22b583caf16f0b27c4e58c98d88c8b648e12"}, -] -stripe = [ - {file = "stripe-2.76.0-py2.py3-none-any.whl", hash = "sha256:756bf6c1206f438d1fa23bb90cdf1233c9383478f854f2720a8a3e1eaf1f715b"}, - {file = "stripe-2.76.0.tar.gz", hash = "sha256:fd3fc6935c3b6189967191607b6f38ebe490005a590b4d0d43fbe3aba45deca8"}, -] -swaggyp = [ - {file = "swaggyp-0.2.0-py3-none-any.whl", hash = "sha256:030dca0f0a24469abcf7e1f047eaa01c84c206a8d891cfb1a4a24f40e2a6a146"}, - {file = "swaggyp-0.2.0.tar.gz", hash = "sha256:b39ab7f8a49fdb17af7862c5ac81b872a5f7f95c3b0989a6a285fde3f4ee4885"}, -] -terminado = [ - {file = "terminado-0.15.0-py3-none-any.whl", hash = "sha256:0d5f126fbfdb5887b25ae7d9d07b0d716b1cc0ccaacc71c1f3c14d228e065197"}, - {file = "terminado-0.15.0.tar.gz", hash = "sha256:ab4eeedccfcc1e6134bfee86106af90852c69d602884ea3a1e8ca6d4486e9bfe"}, -] -tinycss2 = [ - {file = "tinycss2-1.1.1-py3-none-any.whl", hash = "sha256:fe794ceaadfe3cf3e686b22155d0da5780dd0e273471a51846d0a02bc204fec8"}, - {file = "tinycss2-1.1.1.tar.gz", hash = "sha256:b2e44dd8883c360c35dd0d1b5aad0b610e5156c2cb3b33434634e539ead9d8bf"}, -] -tornado = [ - {file = "tornado-6.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:d371e811d6b156d82aa5f9a4e08b58debf97c302a35714f6f45e35139c332e32"}, - {file = "tornado-6.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:0d321a39c36e5f2c4ff12b4ed58d41390460f798422c4504e09eb5678e09998c"}, - {file = "tornado-6.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9de9e5188a782be6b1ce866e8a51bc76a0fbaa0e16613823fc38e4fc2556ad05"}, - {file = "tornado-6.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:61b32d06ae8a036a6607805e6720ef00a3c98207038444ba7fd3d169cd998910"}, - {file = "tornado-6.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:3e63498f680547ed24d2c71e6497f24bca791aca2fe116dbc2bd0ac7f191691b"}, - {file = "tornado-6.1-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:6c77c9937962577a6a76917845d06af6ab9197702a42e1346d8ae2e76b5e3675"}, - {file = "tornado-6.1-cp35-cp35m-win32.whl", hash = "sha256:6286efab1ed6e74b7028327365cf7346b1d777d63ab30e21a0f4d5b275fc17d5"}, - {file = "tornado-6.1-cp35-cp35m-win_amd64.whl", hash = "sha256:fa2ba70284fa42c2a5ecb35e322e68823288a4251f9ba9cc77be04ae15eada68"}, - {file = "tornado-6.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a00ff4561e2929a2c37ce706cb8233b7907e0cdc22eab98888aca5dd3775feb"}, - {file = "tornado-6.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:748290bf9112b581c525e6e6d3820621ff020ed95af6f17fedef416b27ed564c"}, - {file = "tornado-6.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e385b637ac3acaae8022e7e47dfa7b83d3620e432e3ecb9a3f7f58f150e50921"}, - {file = "tornado-6.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:25ad220258349a12ae87ede08a7b04aca51237721f63b1808d39bdb4b2164558"}, - {file = "tornado-6.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:65d98939f1a2e74b58839f8c4dab3b6b3c1ce84972ae712be02845e65391ac7c"}, - {file = "tornado-6.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:e519d64089b0876c7b467274468709dadf11e41d65f63bba207e04217f47c085"}, - {file = "tornado-6.1-cp36-cp36m-win32.whl", hash = "sha256:b87936fd2c317b6ee08a5741ea06b9d11a6074ef4cc42e031bc6403f82a32575"}, - {file = "tornado-6.1-cp36-cp36m-win_amd64.whl", hash = "sha256:cc0ee35043162abbf717b7df924597ade8e5395e7b66d18270116f8745ceb795"}, - {file = "tornado-6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7250a3fa399f08ec9cb3f7b1b987955d17e044f1ade821b32e5f435130250d7f"}, - {file = "tornado-6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ed3ad863b1b40cd1d4bd21e7498329ccaece75db5a5bf58cd3c9f130843e7102"}, - {file = "tornado-6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:dcef026f608f678c118779cd6591c8af6e9b4155c44e0d1bc0c87c036fb8c8c4"}, - {file = "tornado-6.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:70dec29e8ac485dbf57481baee40781c63e381bebea080991893cd297742b8fd"}, - {file = "tornado-6.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:d3f7594930c423fd9f5d1a76bee85a2c36fd8b4b16921cae7e965f22575e9c01"}, - {file = "tornado-6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3447475585bae2e77ecb832fc0300c3695516a47d46cefa0528181a34c5b9d3d"}, - {file = "tornado-6.1-cp37-cp37m-win32.whl", hash = "sha256:e7229e60ac41a1202444497ddde70a48d33909e484f96eb0da9baf8dc68541df"}, - {file = "tornado-6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:cb5ec8eead331e3bb4ce8066cf06d2dfef1bfb1b2a73082dfe8a161301b76e37"}, - {file = "tornado-6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:20241b3cb4f425e971cb0a8e4ffc9b0a861530ae3c52f2b0434e6c1b57e9fd95"}, - {file = "tornado-6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c77da1263aa361938476f04c4b6c8916001b90b2c2fdd92d8d535e1af48fba5a"}, - {file = "tornado-6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fba85b6cd9c39be262fcd23865652920832b61583de2a2ca907dbd8e8a8c81e5"}, - {file = "tornado-6.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:1e8225a1070cd8eec59a996c43229fe8f95689cb16e552d130b9793cb570a288"}, - {file = "tornado-6.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d14d30e7f46a0476efb0deb5b61343b1526f73ebb5ed84f23dc794bdb88f9d9f"}, - {file = "tornado-6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8f959b26f2634a091bb42241c3ed8d3cedb506e7c27b8dd5c7b9f745318ddbb6"}, - {file = "tornado-6.1-cp38-cp38-win32.whl", hash = "sha256:34ca2dac9e4d7afb0bed4677512e36a52f09caa6fded70b4e3e1c89dbd92c326"}, - {file = "tornado-6.1-cp38-cp38-win_amd64.whl", hash = "sha256:6196a5c39286cc37c024cd78834fb9345e464525d8991c21e908cc046d1cc02c"}, - {file = "tornado-6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0ba29bafd8e7e22920567ce0d232c26d4d47c8b5cf4ed7b562b5db39fa199c5"}, - {file = "tornado-6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:33892118b165401f291070100d6d09359ca74addda679b60390b09f8ef325ffe"}, - {file = "tornado-6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7da13da6f985aab7f6f28debab00c67ff9cbacd588e8477034c0652ac141feea"}, - {file = "tornado-6.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:e0791ac58d91ac58f694d8d2957884df8e4e2f6687cdf367ef7eb7497f79eaa2"}, - {file = "tornado-6.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:66324e4e1beede9ac79e60f88de548da58b1f8ab4b2f1354d8375774f997e6c0"}, - {file = "tornado-6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:a48900ecea1cbb71b8c71c620dee15b62f85f7c14189bdeee54966fbd9a0c5bd"}, - {file = "tornado-6.1-cp39-cp39-win32.whl", hash = "sha256:d3d20ea5782ba63ed13bc2b8c291a053c8d807a8fa927d941bd718468f7b950c"}, - {file = "tornado-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:548430be2740e327b3fe0201abe471f314741efcb0067ec4f2d7dcfb4825f3e4"}, - {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, -] -traitlets = [ - {file = "traitlets-5.2.1.post0-py3-none-any.whl", hash = "sha256:f44b708d33d98b0addb40c29d148a761f44af740603a8fd0e2f8b5b27cf0f087"}, - {file = "traitlets-5.2.1.post0.tar.gz", hash = "sha256:70815ecb20ec619d1af28910ade523383be13754283aef90528eb3d47b77c5db"}, -] -urllib3 = [ - {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, - {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, -] -valley = [ - {file = "valley-1.5.6-py3-none-any.whl", hash = "sha256:fa2e5fc51d59901e5eb178116a4fb15b712928b4c87809f59cdf02a934d63cf6"}, - {file = "valley-1.5.6.tar.gz", hash = "sha256:ec55f7df3512f0dfa23c9f253b414a02491dea41a62230ed459a43cf02fee9a3"}, -] -wcwidth = [ - {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, - {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, -] -webencodings = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] -werkzeug = [ - {file = "Werkzeug-2.1.2-py3-none-any.whl", hash = "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255"}, - {file = "Werkzeug-2.1.2.tar.gz", hash = "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6"}, -] -widgetsnbextension = [ - {file = "widgetsnbextension-3.6.0-py2.py3-none-any.whl", hash = "sha256:4fd321cad39fdcf8a8e248a657202d42917ada8e8ed5dd3f60f073e0d54ceabd"}, - {file = "widgetsnbextension-3.6.0.tar.gz", hash = "sha256:e84a7a9fcb9baf3d57106e184a7389a8f8eb935bf741a5eb9d60aa18cc029a80"}, -] -zipp = [ - {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, - {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, -] +appnope = [] +argon2-cffi = [] +argon2-cffi-bindings = [] +asttokens = [] +astunparse = [] +attrs = [] +backcall = [] +beautifulsoup4 = [] +bleach = [] +boto3 = [] +botocore = [] +cachetools = [] +certifi = [] +cffi = [] +charset-normalizer = [] +click = [] +colorama = [] +coverage = [] +cryptography = [] +debugpy = [] +decorator = [] +defusedxml = [] +entrypoints = [] +envs = [] +executing = [] +fastjsonschema = [] +faunadb = [] +formy = [] +future = [] +graphql-py = [] +h2 = [] +hpack = [] +hyper = [] +hyperframe = [] +idna = [] +importlib-resources = [] +ipykernel = [] +ipython = [] +ipython-genutils = [] +ipywidgets = [] +iso8601 = [] +jedi = [] +jinja2 = [] +jmespath = [] +jsonschema = [] +jupyter = [] +jupyter-client = [] +jupyter-console = [] +jupyter-core = [] +jupyterlab-pygments = [] +jupyterlab-widgets = [] +markupsafe = [] +matplotlib-inline = [] +mistune = [] +nbclient = [] +nbconvert = [] +nbformat = [] +nest-asyncio = [] +notebook = [] +packaging = [] +pandocfilters = [] +parso = [] +pdoc = [] +pexpect = [] +pickleshare = [] +ply = [] +prometheus-client = [] +prompt-toolkit = [] +psutil = [] +ptyprocess = [] +pure-eval = [] +py = [] +pycparser = [] +pygments = [] +pyjwt = [] +pyparsing = [] +pyrsistent = [] +python-dateutil = [] +pytz = [] +pywin32 = [] +pywinpty = [] +pyyaml = [] +pyzmq = [] +qtconsole = [] +qtpy = [] +requests = [] +s3transfer = [] +sammy = [] +send2trash = [] +six = [] +soupsieve = [] +stack-data = [] +stripe = [] +swaggyp = [] +terminado = [] +tinycss2 = [] +tornado = [] +traitlets = [] +urllib3 = [] +valley = [] +wcwidth = [] +webencodings = [] +werkzeug = [] +widgetsnbextension = [] +zipp = [] diff --git a/pyproject.toml b/pyproject.toml index 25bd35e..7edd45a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,8 +8,7 @@ license = "Apache-2.0" [tool.poetry.dependencies] python = "^3.8" faunadb = "^4.0.1" -valley = "^1.5.6" -jinja2 = "3.0.1" +valley = "1.5.8" envs = "^1.3" requests = "^2.23.0" pytz = "^2021.1" @@ -26,6 +25,8 @@ sammy = "^0.4.3" stripe = "^2.61.0" bleach = "^4.1.0" swaggyp = "^0.2.0" +formy = "1.3.1" +Jinja2 = "^3.1.2" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From d5252ea789f6e6461b901577c5bf80cb12cdbaca Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 14 Sep 2022 14:13:37 +0800 Subject: [PATCH 086/214] added building of volume before build in github workflow --- .github/workflows/main.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index f4af893..4735795 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -36,6 +36,7 @@ jobs: # Ignore the failure of a step and avoid terminating the job. continue-on-error: true + - run: docker volume create --name=pfunk-fauna-data - run: docker-compose build # Runs a single command using the runners shell From 457cbaa5f5b7a8316e858f3463d3813c56644d4e Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 15 Sep 2022 13:23:07 +0800 Subject: [PATCH 087/214] removed packageloader for ecommerce. Changed port of faunadb in docker-compose --- docker-compose.yaml | 2 +- pfunk/utils/templates.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index 5b88d63..94e7299 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -27,7 +27,7 @@ services: - 8443:8443 - 8444:8444 - 8445:8445 - - 8084:8084 + - 8085:8085 volumes: - ./logs:/var/log/faunadb - ./docker-fauna.yml:/docker-fauna.yml diff --git a/pfunk/utils/templates.py b/pfunk/utils/templates.py index 27aa90c..18c3d7c 100644 --- a/pfunk/utils/templates.py +++ b/pfunk/utils/templates.py @@ -11,7 +11,6 @@ def get_loaders(): loaders = [ FileSystemLoader(env('TEMPLATE_ROOT_DIR')), PackageLoader('pfunk.contrib.auth'), - PackageLoader('pfunk.contrib.ecommerce'), ] for i in env('TEMPLATE_PACKAGES', [], var_type='list'): loaders.append(PackageLoader(i)) From 29919c6642aa688ee99df06272b11614f28c1053 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 16 Sep 2022 14:13:47 +0800 Subject: [PATCH 088/214] added unittests for circular dependency error for easier testing. fixed gh actions to use env var with gh secrets --- .github/workflows/main.yml | 5 +++- pfunk/tests/test_dev.py | 52 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+), 1 deletion(-) create mode 100644 pfunk/tests/test_dev.py diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 4735795..eb29c63 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -26,7 +26,10 @@ jobs: steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - uses: actions/checkout@v2 - - run: touch .env + - name: Generate env vars from gh secrets + run: | + echo TEMPLATE_ROOT_DIR=${{ secrets.TEMPLATE_ROOT_DIR }} >> .env + cat .env - run: docker-compose pull # In this step, this action saves a list of existing images, diff --git a/pfunk/tests/test_dev.py b/pfunk/tests/test_dev.py new file mode 100644 index 0000000..5acd567 --- /dev/null +++ b/pfunk/tests/test_dev.py @@ -0,0 +1,52 @@ +# test_dev.py - a placeholder test for fixing User - Group circular import errors + +import os +from valley.utils import import_util + +from pfunk.contrib.auth.collections import BaseUser, User +from pfunk.testcase import APITestCase +from pfunk.contrib.auth.collections import Group +from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField +from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField + + +# Simple setup +# Env var setup for user and group +# os.environ['GROUP_COLLECTION'] = 'pfunk.tests.test_dev.NewGroup' +# os.environ['USER_COLLECTION'] = 'pfunk.tests.test_dev.NewUser' + +class NewUser(User): + # groups = ManyToManyField('pfunk.tests.test_dev.NewGroup') + pass + +class NewGroup(Group): + users = ManyToManyField('pfunk.tests.test_dev.NewUser') + +class Blog(Collection): + """ Collection for DigitalOcean-Type request """ + title = StringField(required=True) + content = StringField(required=True) + user = ReferenceField(NewUser) + + def __unicode__(self): + return self.title + +# Test case to see if user-group is working +class TestUserGroupError(APITestCase): + collections = [NewUser, NewGroup, Blog] + + def setUp(self) -> None: + super().setUp() + self.group = NewGroup.create(name='Power Users', slug='power-users') + self.user = NewUser.create(username='test', email='tlasso@example.org', first_name='Ted', + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.group]) + self.blog = Blog.create( + title='test_blog', content='test content', user=self.user) + + self.token, self.exp = NewUser.api_login("test", "abc123") + print(f'\n\nTOKEN: {self.token}') + print(f'\n\nEXP: {self.exp}') + + def test_mock(self): + assert True \ No newline at end of file From e3de548ad1319ca9e8781640523aecaa115cbda0 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 19 Sep 2022 14:27:43 +0800 Subject: [PATCH 089/214] reverted docker-compose to use 8084 port for fauna --- docker-compose.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index 94e7299..5b88d63 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -27,7 +27,7 @@ services: - 8443:8443 - 8444:8444 - 8445:8445 - - 8085:8085 + - 8084:8084 volumes: - ./logs:/var/log/faunadb - ./docker-fauna.yml:/docker-fauna.yml From 04fb584dc7e59bf4d8b61143552b7074fc473c7c Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 19 Sep 2022 14:52:33 +0800 Subject: [PATCH 090/214] Added env vars creation from gh secrets. Changed faunadb port --- .github/workflows/main.yml | 9 +++++++++ docker-compose.yaml | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index eb29c63..94cd394 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -29,6 +29,15 @@ jobs: - name: Generate env vars from gh secrets run: | echo TEMPLATE_ROOT_DIR=${{ secrets.TEMPLATE_ROOT_DIR }} >> .env + FAUNA_SECRET=${{ secrets.FAUNA_SECRET }} >> .env + FAUNA_SCHEME=${{ secrets.FAUNA_SCHEME }} >> .env + FAUNA_DOMAIN=${{ secrets.FAUNA_DOMAIN }} >> .env + FAUNA_PORT=${{ secrets.FAUNA_PORT}} >> .env + FAUNA_GRAPHQL_IMPORT_URL=${{ secrets.FAUNA_GRAPHQL_IMPORT_URL}} >> .env + FAUNA_GRAPHQL_URL=$${{ secrets.FAUNA_GRAPHQL_URL }} >> .env + DEFAULT_FROM_EMAIL=${{ secrets.DEFAULT_FROM_EMAIL }} >> .env + PROJECT_NAME=${{ secrets.PROJECT_NAME }} >> .env + KEY_MODULE=${{ secrets.KEY_MODULE }} >> .env cat .env - run: docker-compose pull diff --git a/docker-compose.yaml b/docker-compose.yaml index 5b88d63..94e7299 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -27,7 +27,7 @@ services: - 8443:8443 - 8444:8444 - 8445:8445 - - 8084:8084 + - 8085:8085 volumes: - ./logs:/var/log/faunadb - ./docker-fauna.yml:/docker-fauna.yml From a32854cf15db8d8a49244098b5a916e1a081f004 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 20 Sep 2022 15:38:05 +0800 Subject: [PATCH 091/214] testing remapped ports for faunadb --- docker-compose.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index 94e7299..bc63e87 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -27,7 +27,7 @@ services: - 8443:8443 - 8444:8444 - 8445:8445 - - 8085:8085 + - 8085:8084 volumes: - ./logs:/var/log/faunadb - ./docker-fauna.yml:/docker-fauna.yml From 0d4dada5ee5b799213817c5ddc2574c15fd19c7a Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 20 Sep 2022 15:44:04 +0800 Subject: [PATCH 092/214] reverted fauna port back to 8085 --- docker-compose.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index bc63e87..94e7299 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -27,7 +27,7 @@ services: - 8443:8443 - 8444:8444 - 8445:8445 - - 8085:8084 + - 8085:8085 volumes: - ./logs:/var/log/faunadb - ./docker-fauna.yml:/docker-fauna.yml From 3f48aad97eab2e40250acf8ad28e9898e2724ad0 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 22 Sep 2022 15:55:07 +0800 Subject: [PATCH 093/214] reworked tests for htmlviews --- .github/workflows/main.yml | 18 +++++++++--------- pfunk/tests/__init__.py | 16 ++++++++++++++++ pfunk/tests/templates/house/create.html | 3 +++ pfunk/tests/templates/house/delete.html | 3 +++ pfunk/tests/templates/house/detail.html | 3 +++ pfunk/tests/templates/house/list.html | 3 +++ pfunk/tests/templates/house/update.html | 3 +++ pfunk/tests/test_web_crud.py | 15 ++++----------- 8 files changed, 44 insertions(+), 20 deletions(-) create mode 100644 pfunk/tests/templates/house/create.html create mode 100644 pfunk/tests/templates/house/delete.html create mode 100644 pfunk/tests/templates/house/detail.html create mode 100644 pfunk/tests/templates/house/list.html create mode 100644 pfunk/tests/templates/house/update.html diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 94cd394..2a2f56e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -29,15 +29,15 @@ jobs: - name: Generate env vars from gh secrets run: | echo TEMPLATE_ROOT_DIR=${{ secrets.TEMPLATE_ROOT_DIR }} >> .env - FAUNA_SECRET=${{ secrets.FAUNA_SECRET }} >> .env - FAUNA_SCHEME=${{ secrets.FAUNA_SCHEME }} >> .env - FAUNA_DOMAIN=${{ secrets.FAUNA_DOMAIN }} >> .env - FAUNA_PORT=${{ secrets.FAUNA_PORT}} >> .env - FAUNA_GRAPHQL_IMPORT_URL=${{ secrets.FAUNA_GRAPHQL_IMPORT_URL}} >> .env - FAUNA_GRAPHQL_URL=$${{ secrets.FAUNA_GRAPHQL_URL }} >> .env - DEFAULT_FROM_EMAIL=${{ secrets.DEFAULT_FROM_EMAIL }} >> .env - PROJECT_NAME=${{ secrets.PROJECT_NAME }} >> .env - KEY_MODULE=${{ secrets.KEY_MODULE }} >> .env + echo FAUNA_SECRET=${{ secrets.FAUNA_SECRET }} >> .env + echo FAUNA_SCHEME=${{ secrets.FAUNA_SCHEME }} >> .env + echo FAUNA_DOMAIN=${{ secrets.FAUNA_DOMAIN }} >> .env + echo FAUNA_PORT=${{ secrets.FAUNA_PORT}} >> .env + echo FAUNA_GRAPHQL_IMPORT_URL=${{ secrets.FAUNA_GRAPHQL_IMPORT_URL}} >> .env + echo FAUNA_GRAPHQL_URL=$${{ secrets.FAUNA_GRAPHQL_URL }} >> .env + echo DEFAULT_FROM_EMAIL=${{ secrets.DEFAULT_FROM_EMAIL }} >> .env + echo PROJECT_NAME=${{ secrets.PROJECT_NAME }} >> .env + echo KEY_MODULE=${{ secrets.KEY_MODULE }} >> .env cat .env - run: docker-compose pull diff --git a/pfunk/tests/__init__.py b/pfunk/tests/__init__.py index 6002a22..ce57492 100644 --- a/pfunk/tests/__init__.py +++ b/pfunk/tests/__init__.py @@ -1,6 +1,22 @@ from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole from pfunk.resources import Index +from pfunk.web.views.html import HTMLView + +from jinja2 import Environment +from jinja2.loaders import ChoiceLoader, PackageLoader, FileSystemLoader + + +temp_env = Environment(loader=FileSystemLoader('./pfunk/tests/templates')) +# Let's monkey-patch `HTMLView`'s method `get_template` for testing +def get_template(self): + return temp_env.get_template( + self.template_name.format( + collection=self.collection.get_collection_name().lower(), + action=self.action + ) + ) +HTMLView.get_template = get_template GENDER_PRONOUN = Enum(name='gender_pronouns', choices=['he', 'her', 'they']) diff --git a/pfunk/tests/templates/house/create.html b/pfunk/tests/templates/house/create.html new file mode 100644 index 0000000..3c4af60 --- /dev/null +++ b/pfunk/tests/templates/house/create.html @@ -0,0 +1,3 @@ +{% block test_block %} +{{unittest_value}} +{% endblock %} \ No newline at end of file diff --git a/pfunk/tests/templates/house/delete.html b/pfunk/tests/templates/house/delete.html new file mode 100644 index 0000000..3c4af60 --- /dev/null +++ b/pfunk/tests/templates/house/delete.html @@ -0,0 +1,3 @@ +{% block test_block %} +{{unittest_value}} +{% endblock %} \ No newline at end of file diff --git a/pfunk/tests/templates/house/detail.html b/pfunk/tests/templates/house/detail.html new file mode 100644 index 0000000..3c4af60 --- /dev/null +++ b/pfunk/tests/templates/house/detail.html @@ -0,0 +1,3 @@ +{% block test_block %} +{{unittest_value}} +{% endblock %} \ No newline at end of file diff --git a/pfunk/tests/templates/house/list.html b/pfunk/tests/templates/house/list.html new file mode 100644 index 0000000..3c4af60 --- /dev/null +++ b/pfunk/tests/templates/house/list.html @@ -0,0 +1,3 @@ +{% block test_block %} +{{unittest_value}} +{% endblock %} \ No newline at end of file diff --git a/pfunk/tests/templates/house/update.html b/pfunk/tests/templates/house/update.html new file mode 100644 index 0000000..3c4af60 --- /dev/null +++ b/pfunk/tests/templates/house/update.html @@ -0,0 +1,3 @@ +{% block test_block %} +{{unittest_value}} +{% endblock %} \ No newline at end of file diff --git a/pfunk/tests/test_web_crud.py b/pfunk/tests/test_web_crud.py index 8a1f963..065f56e 100644 --- a/pfunk/tests/test_web_crud.py +++ b/pfunk/tests/test_web_crud.py @@ -24,16 +24,13 @@ def test_read(self): res = self.c.get(f'/house/detail/{self.house.ref.id()}/', headers={ "Authorization": self.token}) - self.assertTrue(res.json['success']) - self.assertEqual("test address", res.json['data']['data']['address']) + self.assertTrue(res.status_code, 200) def test_read_all(self): res = self.c.get(f'/house/list/', headers={ "Authorization": self.token}) self.assertTrue(res.status_code, 200) - self.assertIn("test address", str(res.get_data())) - def test_create(self): self.assertNotIn("the street somewhere", [ @@ -45,9 +42,7 @@ def test_create(self): headers={ "Authorization": self.token}) - self.assertTrue(res.json['success']) - self.assertIn("the street somewhere", [ - house.address for house in House.all()]) + self.assertTrue(res.status_code, 200) def test_update(self): self.assertNotIn("the updated street somewhere", [ @@ -59,9 +54,7 @@ def test_update(self): headers={ "Authorization": self.token}) - self.assertTrue(res.json['success']) - self.assertIn("the updated street somewhere", [ - house.address for house in House.all()]) + self.assertTrue(res.status_code, 200) def test_delete(self): res = self.c.delete(f'/house/delete/{self.house.ref.id()}/', @@ -70,4 +63,4 @@ def test_delete(self): "Content-Type": "application/json" }) - self.assertTrue(res.json['success']) + self.assertTrue(res.status_code, 200) From 05e3d78f40534ef8ee7641f38a366bfc25d663c9 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 30 Sep 2022 16:17:55 +0800 Subject: [PATCH 094/214] first stab at new contrib for separate out-of-box user and group and extendedUser and BaseGroup --- pfunk/contrib/auth/collections.py | 123 ++++++++++++++++++------------ pfunk/fields.py | 1 - pfunk/project.py | 4 + pfunk/tests/test_dev.py | 54 ++++++++----- pfunk/tests/unittest_keys.py | 2 + 5 files changed, 114 insertions(+), 70 deletions(-) create mode 100644 pfunk/tests/unittest_keys.py diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index 39cbacb..bf021ad 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -19,43 +19,10 @@ from pfunk.fields import SlugField -class UserGroups(Collection): - """ Many-to-many collection of the user-group relationship - - The native fauna-way of holding many-to-many relationship - is to only have the ID of the 2 object. Here in pfunk, we - leverage the flexibility of the collection to have another - field, which is `permissions`, this field holds the capablities - of a user, allowing us to add easier permission handling. - Instead of manually going to roles and adding individual - collections which can be painful in long term. - - Attributes: - collection_name (str): - Name of the collection in Fauna - userID (str): - Fauna ref of user that is tied to the group - groupID (str): - Fauna ref of a collection that is tied with the user - permissions (str[]): - List of permissions, `['create', 'read', 'delete', 'write']` - """ - collection_name = 'users_groups' - userID = ReferenceField(env('USER_COLLECTION', 'pfunk.contrib.auth.collections.User')) - groupID = ReferenceField(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group')) - permissions = ListField() - - def __unicode__(self): - return f"{self.userID}, {self.groupID}, {self.permissions}" - - -class Group(Collection): +class BaseGroup(Collection): """ Group collection that the user belongs to """ name = StringField(required=True) slug = SlugField(unique=True, required=False) - users = ManyToManyField( - env('USER_COLLECTION', 'pfunk.contrib.auth.collections.User'), - relation_name='users_groups') def __unicode__(self): return self.name # pragma: no cover @@ -85,7 +52,6 @@ class BaseUser(Collection): collection_roles = [Public, UserRole] non_public_fields = ['groups'] use_email_verification = True - group_class = import_util(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group')) # Views collection_views = [LoginView, SignUpView, VerifyEmailView, LogoutView, UpdatePasswordView, ForgotPasswordView, ForgotPasswordChangeView] @@ -99,7 +65,8 @@ class BaseUser(Collection): email = EmailField(required=True, unique=True) verification_key = StringField(required=False, unique=True) forgot_password_key = StringField(required=False, unique=True) - account_status = EnumField(AccountStatus, required=True, default_value="INACTIVE") + account_status = EnumField( + AccountStatus, required=True, default_value="INACTIVE") def __unicode__(self): return self.username # pragma: no cover @@ -113,6 +80,7 @@ def login(cls, username, password, _token=None): """ c = cls() try: + print(f'USERNAME: {username}\nPASSWORD: {password}') return c.client(_token=_token).query( q.call("login_user", { "username": username, "password": password}) @@ -175,12 +143,14 @@ def verify_email(cls, verification_key, verify_type='signup', password=None): attached to the user """ if verify_type == 'signup': - user = cls.get_by('unique_User_verification_key', [verification_key]) + user = cls.get_by('unique_User_verification_key', + [verification_key]) user.verification_key = '' user.account_status = 'ACTIVE' user.save() elif verify_type == 'forgot' and password: - user = cls.get_by('unique_User_forgot_password_key', [verification_key]) + user = cls.get_by('unique_User_forgot_password_key', [ + verification_key]) user.forgot_password_key = '' user.save(_credentials=password) @@ -268,14 +238,17 @@ def update_password(cls, current_password, new_password, new_password_confirm, _ `Wrong current password.` """ if new_password != new_password_confirm: - raise ValidationException('new_password: Password field and password confirm field do not match.') + raise ValidationException( + 'new_password: Password field and password confirm field do not match.') c = cls() try: return c.client(_token=_token).query( - q.call("update_password", {'current_password': current_password, 'new_password': new_password}) + q.call("update_password", { + 'current_password': current_password, 'new_password': new_password}) ) except BadRequest: - raise ValidationException('current_password: Password update failed.') + raise ValidationException( + 'current_password: Password update failed.') @classmethod def get_current_user(cls, _token=None): @@ -294,11 +267,12 @@ def __unicode__(self): return self.username # pragma: no cover -class User(BaseUser): - user_group_class = import_util('pfunk.contrib.auth.collections.UserGroups') - group_class = import_util('pfunk.contrib.auth.collections.Group') - """ User that has permission capabilities. Extension of `BaseUser` """ - groups = ManyToManyField(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group'), 'users_groups') +class ExtendedUser(BaseUser): + """ User that has permission capabilities. Extension of `BaseUser`. + Subclass and define these properties + Provides base methods for group-user permissions. If there are no + supplied `groups` property, will raise `NotImplementedErrror` + """ @classmethod def get_permissions(cls, ref, _token=None): @@ -306,6 +280,8 @@ def get_permissions(cls, ref, _token=None): def get_groups(self, _token=None): """ Returns the groups (collections) that the user is bound with """ + if not self.group_class: + raise NotImplementedError return [self.group_class.get(i.id(), _token=_token) for i in self.client(_token=_token).query( q.paginate(q.match('users_groups_by_user', self.ref)) ).get('data')] @@ -361,11 +337,62 @@ def add_permissions(self, group, permissions: list, _token=None): for i in permissions: perm_list.extend(i.permissions) + if not self.user_group_class: + raise NotImplementedError + try: - user_group = self.user_group_class.get_by('users_groups_by_group_and_user', terms=[group.ref, self.ref]) + user_group = self.user_group_class.get_by( + 'users_groups_by_group_and_user', terms=[group.ref, self.ref]) except DocNotFound: - user_group = self.user_group_class.create(userID=self.ref, groupID=group.ref, permissions=perm_list) + user_group = self.user_group_class.create( + userID=self.ref, groupID=group.ref, permissions=perm_list) if user_group.permissions != perm_list: user_group.permissions = perm_list user_group.save() return user_group + + +class UserGroups(Collection): + """ Many-to-many collection of the user-group relationship + + The native fauna-way of holding many-to-many relationship + is to only have the ID of the 2 object. Here in pfunk, we + leverage the flexibility of the collection to have another + field, which is `permissions`, this field holds the capablities + of a user, allowing us to add easier permission handling. + Instead of manually going to roles and adding individual + collections which can be painful in long term. + + Attributes: + collection_name (str): + Name of the collection in Fauna + userID (str): + Fauna ref of user that is tied to the group + groupID (str): + Fauna ref of a collection that is tied with the user + permissions (str[]): + List of permissions, `['create', 'read', 'delete', 'write']` + """ + collection_name = 'users_groups' + userID = ReferenceField( + env('USER_COLLECTION', 'pfunk.contrib.auth.collections.User')) + groupID = ReferenceField( + env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group')) + permissions = ListField() + + def __unicode__(self): + return f"{self.userID}, {self.groupID}, {self.permissions}" + + +class Group(BaseGroup): + """ A default group that already has predefined M2M relationship with `pfunk.contrib.auth.collections.User` """ + users = ManyToManyField( + 'pfunk.contrib.auth.collections.User', 'users_groups') + + +class User(ExtendedUser): + """ A default user that already has predefined M2M relationship with `pfunk.contrib.auth.collections.Group` """ + user_group_class = import_util('pfunk.contrib.auth.collections.UserGroups') + group_class = import_util('pfunk.contrib.auth.collections.Group') + groups = ManyToManyField( + 'pfunk.contrib.auth.collections.Group', 'users_groups') diff --git a/pfunk/fields.py b/pfunk/fields.py index ddb1c1a..fa5755a 100644 --- a/pfunk/fields.py +++ b/pfunk/fields.py @@ -194,7 +194,6 @@ class ManyToManyField(GraphQLMixin, ForeignListProperty): relation_field = True def __init__(self, foreign_class, relation_name, return_type=None, return_prop=None, **kwargs): - self.foreign_class = foreign_class self.relation_name = relation_name super(ManyToManyField, self).__init__(foreign_class, return_type=return_type, return_prop=return_prop, **kwargs) diff --git a/pfunk/project.py b/pfunk/project.py index de66fb0..34cc745 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -204,6 +204,10 @@ def publish(self, mode: str = 'merge') -> int: auth=BearerAuth(secret), data=gql_io ) + + print(f'\n') + print(self.render()) + print('----------------------------------------\n') if resp.status_code == 200: test_mode = env('PFUNK_TEST_MODE', False, var_type='boolean') if not test_mode: diff --git a/pfunk/tests/test_dev.py b/pfunk/tests/test_dev.py index 5acd567..0ea6dd2 100644 --- a/pfunk/tests/test_dev.py +++ b/pfunk/tests/test_dev.py @@ -3,50 +3,62 @@ import os from valley.utils import import_util -from pfunk.contrib.auth.collections import BaseUser, User +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, Group, User from pfunk.testcase import APITestCase -from pfunk.contrib.auth.collections import Group from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField -from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField +from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField -# Simple setup -# Env var setup for user and group -# os.environ['GROUP_COLLECTION'] = 'pfunk.tests.test_dev.NewGroup' -# os.environ['USER_COLLECTION'] = 'pfunk.tests.test_dev.NewUser' +class UserGroups(Collection): + collection_name = 'custom_users_groups' + userID = ReferenceField('pfunk.tests.test_dev.Newuser') + groupID = ReferenceField('pfunk.tests.test_dev.Newgroup') + permissions = ListField() -class NewUser(User): - # groups = ManyToManyField('pfunk.tests.test_dev.NewGroup') - pass + def __unicode__(self): + return f"{self.userID}, {self.groupID}, {self.permissions}" + + +class Newgroup(BaseGroup): + users = ManyToManyField('pfunk.tests.test_dev.Newuser', + relation_name='custom_users_groups') + + +class Newuser(ExtendedUser): + group_class = import_util('pfunk.tests.test_dev.Newgroup') + groups = ManyToManyField( + 'pfunk.tests.test_dev.Newgroup', relation_name='custom_users_groups') + # blogs = ManyToManyField('pfunk.tests.test_dev.Blog', relation_name='users_blogs') -class NewGroup(Group): - users = ManyToManyField('pfunk.tests.test_dev.NewUser') class Blog(Collection): """ Collection for DigitalOcean-Type request """ title = StringField(required=True) content = StringField(required=True) - user = ReferenceField(NewUser) + # users = ManyToManyField('pfunk.tests.test_dev.Newuser', relation_name='users_blogs') def __unicode__(self): return self.title -# Test case to see if user-group is working + +# Test case to see if user-group is working class TestUserGroupError(APITestCase): - collections = [NewUser, NewGroup, Blog] + collections = [Newuser, Newgroup, Blog] def setUp(self) -> None: super().setUp() - self.group = NewGroup.create(name='Power Users', slug='power-users') - self.user = NewUser.create(username='test', email='tlasso@example.org', first_name='Ted', - last_name='Lasso', _credentials='abc123', account_status='ACTIVE', - groups=[self.group]) + self.group = Newgroup.create(name='Power Users', slug='power-users') + self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.group]) self.blog = Blog.create( title='test_blog', content='test content', user=self.user) - self.token, self.exp = NewUser.api_login("test", "abc123") + # BUG: logging in returns wrong credentials error + print(f'TEST USER: {self.user.__dict__}') + self.token, self.exp = Newuser.api_login("test", "abc123") print(f'\n\nTOKEN: {self.token}') print(f'\n\nEXP: {self.exp}') def test_mock(self): - assert True \ No newline at end of file + assert True diff --git a/pfunk/tests/unittest_keys.py b/pfunk/tests/unittest_keys.py new file mode 100644 index 0000000..d13268d --- /dev/null +++ b/pfunk/tests/unittest_keys.py @@ -0,0 +1,2 @@ + +KEYS = {'06bcf013-403a-481b-b6ea-d00a83b7e464': {'signature_key': 'hYJTHn5rF3GeXARPyJyPL1vJhCF7yr-k1H9mITCH5hA=', 'payload_key': '-GF_6GYvo17Udk7AqtGIityqBXYizkENlxc_PDRODJQ=', 'kid': '06bcf013-403a-481b-b6ea-d00a83b7e464'}, '223e62b9-5686-40cb-9e5f-29eb2709d182': {'signature_key': '0pSOP2OxKberj2-kQdPSZDgDryTIWzAnIo0zU16LBQE=', 'payload_key': 'yk3GPQQomCeZIqk1-DWuXORCQpAA8cJdDI4faq3snTo=', 'kid': '223e62b9-5686-40cb-9e5f-29eb2709d182'}, 'c4545030-eac0-4286-8499-f4d5229d4520': {'signature_key': '1vEP5nxo1bUqkEJYjYOedcDtqDO-BWCTvybia5sbszY=', 'payload_key': 'feMygEXTzYddREZaLsiwKYQQnU8dhY0pxz_xXmQ51aU=', 'kid': 'c4545030-eac0-4286-8499-f4d5229d4520'}, '74d87722-3e42-46fe-ac19-9c24a6a49659': {'signature_key': '1wJp_n09f2lhFDOZY9pwqjHyyKtAZq185hdvbhkr7bg=', 'payload_key': 'nTTCKUoC3wBKhNsAxba65UYvJ2Wow2Lhx1bs95xisIk=', 'kid': '74d87722-3e42-46fe-ac19-9c24a6a49659'}, '34875674-7ace-41f9-b04a-fd0b27f8774f': {'signature_key': 'LHBp-r_TTJXSEMeyl2g2bklk4dg0hArkN_QE2nirKts=', 'payload_key': 'I8kBnhtBZ7SWi2C2EtcZHJ48_QT2J4tWMvGlFNb27w0=', 'kid': '34875674-7ace-41f9-b04a-fd0b27f8774f'}, 'de75b5a2-f950-49bc-91e5-03fba00390ae': {'signature_key': 'BDLMhgjmLHkBZwXYuay9x3eB6_4leetdHHMfR7wHi34=', 'payload_key': 'wNUJQyAnueoHv3zYpkqCOE4eECJvW9O4gUWm5JOLakI=', 'kid': 'de75b5a2-f950-49bc-91e5-03fba00390ae'}, 'a8254567-995a-43a7-a79a-e8855c50af51': {'signature_key': 'ArxuXmhYJ41YAM8yQR1uSFyBk5Y5vABBeA103X0PFYI=', 'payload_key': 'ocrhhe1GkdzYQrEPq4ibKd6qHTiEzXXXhpQZZeOKw2k=', 'kid': 'a8254567-995a-43a7-a79a-e8855c50af51'}, 'd6190824-36b6-421b-9c48-ea9a2fd1a48a': {'signature_key': 'GS0gGJnia3bI01w0o4JbD3YMOvLXzzPqOyF4wD1lFwk=', 'payload_key': 'ZXquXtViWDhSGID7Ltufv40x3op-7T8dymOnnN3NAuE=', 'kid': 'd6190824-36b6-421b-9c48-ea9a2fd1a48a'}, '81239a5d-2ead-44ca-803a-a89fc6113b22': {'signature_key': 'MV8caU2wEw9SpudpKVXEVwiyei_2dpr3D4Va40ObsOk=', 'payload_key': 'G5dY4O038k9oKtCq1YPsYp37PZ1RBKLKKX31JXJKHL8=', 'kid': '81239a5d-2ead-44ca-803a-a89fc6113b22'}, 'b136e397-f3e9-4760-af51-270be165fe3a': {'signature_key': 'dKON8KVgtb1p4n2fe7TnXQ3-hlt85JO1uoZDZeVHt0w=', 'payload_key': 'WphxdwSWT-Oxso1MoC9zRnkUyc2zyf5kL6yVooco7ic=', 'kid': 'b136e397-f3e9-4760-af51-270be165fe3a'}} \ No newline at end of file From 973d0afd37f1e93c7d75b399f75d996c632944ab Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 7 Oct 2022 16:06:31 +0800 Subject: [PATCH 095/214] Refactored functions to use index that correctly corresponds to the collection's name --- pfunk/contrib/auth/collections.py | 6 ++++-- pfunk/contrib/auth/resources.py | 2 +- pfunk/tests/test_dev.py | 19 +++++++++---------- 3 files changed, 14 insertions(+), 13 deletions(-) diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index bf021ad..d7dbf4e 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -1,3 +1,4 @@ +from cmath import log import uuid from envs import env @@ -80,12 +81,11 @@ def login(cls, username, password, _token=None): """ c = cls() try: - print(f'USERNAME: {username}\nPASSWORD: {password}') return c.client(_token=_token).query( q.call("login_user", { "username": username, "password": password}) ) - except BadRequest: + except Exception as err: raise LoginFailed( 'The login credentials you entered are incorrect.') @@ -103,7 +103,9 @@ def permissions(self, _token=None): @classmethod def api_login(cls, username, password, _token=None): token = cls.login(username=username, password=password, _token=_token) + print(f'\n\nLOGIN: {token}\n\n') user = cls.get_current_user(_token=token) + print(f'\n\nUSER: {user}\n\n') claims = user.to_dict().copy() try: claims.get('data').pop('verification_key') diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index ab0c65a..1a7944e 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -13,7 +13,7 @@ def get_body(self): return q.query( q.lambda_(["input"], q.let({ - "user": q.match(q.index("unique_User_username"), q.select("username", q.var("input"))) + "user": q.match(q.index(f"unique_{self.collection.__class__.__name__}_username"), q.select("username", q.var("input"))) }, q.if_( q.equals( diff --git a/pfunk/tests/test_dev.py b/pfunk/tests/test_dev.py index 0ea6dd2..b0d3c0f 100644 --- a/pfunk/tests/test_dev.py +++ b/pfunk/tests/test_dev.py @@ -3,7 +3,7 @@ import os from valley.utils import import_util -from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, Group, User +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser from pfunk.testcase import APITestCase from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField @@ -15,9 +15,6 @@ class UserGroups(Collection): groupID = ReferenceField('pfunk.tests.test_dev.Newgroup') permissions = ListField() - def __unicode__(self): - return f"{self.userID}, {self.groupID}, {self.permissions}" - class Newgroup(BaseGroup): users = ManyToManyField('pfunk.tests.test_dev.Newuser', @@ -25,17 +22,19 @@ class Newgroup(BaseGroup): class Newuser(ExtendedUser): + user_group_class = import_util('pfunk.tests.test_dev.UserGroups') group_class = import_util('pfunk.tests.test_dev.Newgroup') groups = ManyToManyField( 'pfunk.tests.test_dev.Newgroup', relation_name='custom_users_groups') - # blogs = ManyToManyField('pfunk.tests.test_dev.Blog', relation_name='users_blogs') + blogs = ManyToManyField('pfunk.tests.test_dev.Blog', + relation_name='users_blogs') class Blog(Collection): - """ Collection for DigitalOcean-Type request """ title = StringField(required=True) content = StringField(required=True) - # users = ManyToManyField('pfunk.tests.test_dev.Newuser', relation_name='users_blogs') + users = ManyToManyField('pfunk.tests.test_dev.Newuser', + relation_name='users_blogs') def __unicode__(self): return self.title @@ -43,7 +42,7 @@ def __unicode__(self): # Test case to see if user-group is working class TestUserGroupError(APITestCase): - collections = [Newuser, Newgroup, Blog] + collections = [Newuser, Newgroup, UserGroups, Blog] def setUp(self) -> None: super().setUp() @@ -52,9 +51,9 @@ def setUp(self) -> None: last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.group]) self.blog = Blog.create( - title='test_blog', content='test content', user=self.user) + title='test_blog', content='test content', user=self.user, token=self.secret) - # BUG: logging in returns wrong credentials error + # BUG: logging in returns missing identity print(f'TEST USER: {self.user.__dict__}') self.token, self.exp = Newuser.api_login("test", "abc123") print(f'\n\nTOKEN: {self.token}') From 4131365b50db724c61112ccfaab95d2be84d22be Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 11 Oct 2022 14:57:59 +0800 Subject: [PATCH 096/214] added dynamic indexing in auth contrib collections --- pfunk/collection.py | 1 - pfunk/contrib/auth/collections.py | 19 +++++++++++++++---- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/pfunk/collection.py b/pfunk/collection.py index 8cb1b64..03ee860 100644 --- a/pfunk/collection.py +++ b/pfunk/collection.py @@ -213,7 +213,6 @@ def client(self, _token=None) -> FaunaClient: Returns: FaunaClient """ - if _token: return FaunaClient(secret=_token) return FaunaClient(secret=env('FAUNA_SECRET')) diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index d7dbf4e..254df59 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -103,9 +103,7 @@ def permissions(self, _token=None): @classmethod def api_login(cls, username, password, _token=None): token = cls.login(username=username, password=password, _token=_token) - print(f'\n\nLOGIN: {token}\n\n') user = cls.get_current_user(_token=token) - print(f'\n\nUSER: {user}\n\n') claims = user.to_dict().copy() try: claims.get('data').pop('verification_key') @@ -284,8 +282,15 @@ def get_groups(self, _token=None): """ Returns the groups (collections) that the user is bound with """ if not self.group_class: raise NotImplementedError + user_class = self.__class__.__name__.lower() + group_class = self.group_class.__name__.lower() + relation_name = self._base_properties.get("groups").relation_name + index_name = f'{user_class}s_{group_class}s_by_{user_class}' + if relation_name: + index_name = f'{relation_name}_by_{user_class}' + return [self.group_class.get(i.id(), _token=_token) for i in self.client(_token=_token).query( - q.paginate(q.match('users_groups_by_user', self.ref)) + q.paginate(q.match(index_name, self.ref)) ).get('data')] def permissions(self, _token=None): @@ -302,9 +307,15 @@ def permissions(self, _token=None): perm_list (str[]): Permissions of the user in list: `['create', 'read', 'delete', 'write']` """ + user_class = self.__class__.__name__.lower() + group_class = self.group_class.__name__.lower() + relation_name = self._base_properties.get("groups").relation_name + index_name = f'{user_class}s_{group_class}s_by_{group_class}_and_{user_class}' + if relation_name: + index_name = f'{relation_name}_by_{group_class}_and_{user_class}' perm_list = [] for i in self.get_groups(_token=_token): - ug = self.user_group_class.get_index('users_groups_by_group_and_user', [ + ug = self.user_group_class.get_index(index_name, [ i.ref, self.ref], _token=_token) for user_group in ug: p = [] From 496e17d18af5396815c04c81e3cb77900eeca9b9 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 24 Oct 2022 17:57:01 +0800 Subject: [PATCH 097/214] made permissions in genericUserBasedRole to dynamically use the field that the user and group is using to work properly on custom user and group --- pfunk/collection.py | 29 ++++++++ pfunk/contrib/auth/resources.py | 89 ++++++++++++++++++----- pfunk/resources.py | 6 +- pfunk/tests/test_dev.py | 63 ----------------- pfunk/tests/test_user_subclass.py | 113 ++++++++++++++++++++++++++++++ pfunk/tests/unittest_keys.py | 2 - pfunk/utils/publishing.py | 4 +- 7 files changed, 221 insertions(+), 85 deletions(-) delete mode 100644 pfunk/tests/test_dev.py create mode 100644 pfunk/tests/test_user_subclass.py delete mode 100644 pfunk/tests/unittest_keys.py diff --git a/pfunk/collection.py b/pfunk/collection.py index 03ee860..233caf3 100644 --- a/pfunk/collection.py +++ b/pfunk/collection.py @@ -124,6 +124,35 @@ def get_fields(self) -> dict: """ return {k: q.select(k, q.var("input")) for k, v in self._base_properties.items() if k not in self.non_public_fields} + + def get_user_field(self) -> str: + """ Acquires the field where the relationship with a user was defined. + + It is required to define the `USER_COLLECTION` in env var if a custom + user will be used. This is to ensure the permissions to work properly + """ + fields = self._base_properties.items() + user_class = env('USER_COLLECTION', 'User') + user_field = None + for k, v in fields: + if user_class in v.get_graphql_type(): + user_field = k + return user_field + + def get_group_field(self) -> str: + """ Acquires the field where the relationship with a group was defined. + + It is required to define the `GROUP_COLLECTION` in env var if a custom + user will be used. This is to ensure the permissions to work properly + + """ + fields = self._base_properties.items() + group_class = env('GROUP_COLLECTION', 'Group') + group_field = None + for k, v in fields: + if group_class in v.get_graphql_type(): + group_field = k + return group_field def get_collection_name(self) -> str: """ diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 1a7944e..c315907 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -171,12 +171,35 @@ def get_lambda(self, resource_type): class GenericAuthorizationRole(Role): - user_table = 'User' - current_user_field = 'user' - name_suffix = 'user_based_crud_role' + + def get_user_collection(self): + user_field = self.collection._base_properties.get("users") + if not user_field: + user_field = self.collection._base_properties.get("user") + return user_field + + def get_user_table(self): + user_field = self.get_user_collection() + if user_field: + return user_field.get_foreign_class().__name__ + else: + return None + + def get_relation_index_name(self): + self.current_user_field = self.collection.__class__.__name__.lower() + self.user_table = self.collection.__class__.__name__ + relation_index_name = (self.get_user_collection().__base_properties.get('groups').relation_name + + '_by_' + + self.collection.group_class.__name__.lower() + + '_' + + self.get_user_table()) + return relation_index_name + + def get_name_suffix(self): + return f'{self.get_user_table().lower()}_based_crud_role' def get_name(self): - return self.name or f"{self.collection.get_class_name()}_{self.name_suffix}" + return self.name or f"{self.collection.get_class_name()}_{self.get_name_suffix()}" def get_privileges(self): priv_list = [ @@ -190,7 +213,7 @@ def get_privileges(self): } }, { - "resource": q.index(self.relation_index_name), + "resource": q.index(self.get_relation_index_name()), "actions": { "read": True } @@ -226,10 +249,23 @@ def get_privileges(self): class GenericUserBasedRole(GenericAuthorizationRole): relation_index_name = 'users_groups_by_user' + def get_relation_index_name(self): + # Acquires the `groups` field from the user collection + user_col = self.get_user_collection().get_foreign_class() + user_groups = user_col._base_properties.get("groups") + + if user_groups: + relation_index_name = (user_groups.relation_name + + '_by_' + + self.get_user_table().lower()) + return relation_index_name + return None + def get_lambda(self, resource_type): + current_user_field = self.collection.get_user_field() if resource_type == 'write': lambda_args = ["old_object", "new_object", "object_ref"] - user_ref = q.select(self.current_user_field, + user_ref = q.select(current_user_field, q.select('data', q.var('old_object'))) return q.query( q.lambda_(lambda_args, @@ -239,7 +275,7 @@ def get_lambda(self, resource_type): q.current_identity() ), q.equals( - q.select(self.current_user_field, q.select('data', q.var('new_object'))), + q.select(current_user_field, q.select('data', q.var('new_object'))), q.current_identity() ) ) @@ -248,11 +284,11 @@ def get_lambda(self, resource_type): ) elif resource_type == 'create': lambda_args = ["new_object"] - user_ref = q.select(self.current_user_field, + user_ref = q.select(current_user_field, q.select('data', q.var('new_object'))) elif resource_type == 'read' or resource_type == 'delete': lambda_args = ["object_ref"] - user_ref = q.select(self.current_user_field, + user_ref = q.select(current_user_field, q.select('data', q.get(q.var('object_ref')))) return q.query( @@ -273,10 +309,31 @@ class GenericGroupBasedRole(GenericAuthorizationRole): user_table = 'User' name_suffix = 'group_based_crud_role' + def get_name_suffix(self): + """ """ + # TODO: Return `group_based_crud_role` with dynamic group name class + pass + + def get_relation_index_name(self): + user_col = self.get_user_collection().get_foreign_class() + user_groups = user_col._base_properties.get("groups") + + if user_groups: + # TODO: be able to return `_by_` .e.g. `users_groups_by_user` + relation_index_name = (user_groups.relation_name + + '_by_' + + self.collection.group_class.__name__.lower() + + '_' + + self.get_user_table().lower()) + return relation_index_name + return None + def get_lambda(self, resource_type): + current_group_field = self.collection.get_group_field() + print(f'\n\nCURRENT GROUP FIELD: {current_group_field}\n\n') perm = f'{self.collection.get_collection_name()}-{resource_type}'.lower() if resource_type == 'write': - group_ref = q.select(self.current_group_field, + group_ref = q.select(current_group_field, q.select('data', q.var('old_object'))) lambda_args = ["old_object", "new_object", "object_ref"] @@ -289,7 +346,7 @@ def get_lambda(self, resource_type): q.select(self.permissions_field, q.get( q.match( - q.index(self.relation_index_name), + q.index(self.get_relation_index_name()), group_ref, q.current_identity() ) @@ -297,18 +354,18 @@ def get_lambda(self, resource_type): perm ), q.equals( - q.select(self.current_group_field, q.select('data', q.var('old_object'))), - q.select(self.current_group_field, q.select('data', q.var('new_object'))), + q.select(current_group_field, q.select('data', q.var('old_object'))), + q.select(current_group_field, q.select('data', q.var('new_object'))), ) ) ) ) elif resource_type == 'create': - group_ref = q.select(self.current_group_field, + group_ref = q.select(current_group_field, q.select('data', q.var('new_object'))) lambda_args = ["new_object"] elif resource_type == 'read' or resource_type == 'delete': - group_ref = q.select(self.current_group_field, + group_ref = q.select(current_group_field, q.select('data', q.get(q.var('object_ref')))) lambda_args = ["object_ref"] @@ -320,7 +377,7 @@ def get_lambda(self, resource_type): q.select(self.permissions_field, q.select("data", q.get(q.match( - q.index(self.relation_index_name), + q.index(self.get_relation_index_name()), group_ref, q.current_identity() )))))), diff --git a/pfunk/resources.py b/pfunk/resources.py index c31f98e..18b4687 100644 --- a/pfunk/resources.py +++ b/pfunk/resources.py @@ -133,6 +133,10 @@ def get_membership_lambda(self): ) )) + def get_user_table(self): + """ Acquires user table from the class name """ + return None + def get_membership(self) -> dict: """ Returns the membership configuration for the role @@ -141,7 +145,7 @@ def get_membership(self) -> dict: """ membership = self.get_membership_lambda() payload_dict = { - 'resource': q.collection(self.user_table or self.collection.get_collection_name()), + 'resource': q.collection(self.get_user_table() or self.collection.get_collection_name()), } if membership: payload_dict['predicate'] = self.get_membership_lambda() diff --git a/pfunk/tests/test_dev.py b/pfunk/tests/test_dev.py deleted file mode 100644 index b0d3c0f..0000000 --- a/pfunk/tests/test_dev.py +++ /dev/null @@ -1,63 +0,0 @@ -# test_dev.py - a placeholder test for fixing User - Group circular import errors - -import os -from valley.utils import import_util - -from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser -from pfunk.testcase import APITestCase -from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField -from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField - - -class UserGroups(Collection): - collection_name = 'custom_users_groups' - userID = ReferenceField('pfunk.tests.test_dev.Newuser') - groupID = ReferenceField('pfunk.tests.test_dev.Newgroup') - permissions = ListField() - - -class Newgroup(BaseGroup): - users = ManyToManyField('pfunk.tests.test_dev.Newuser', - relation_name='custom_users_groups') - - -class Newuser(ExtendedUser): - user_group_class = import_util('pfunk.tests.test_dev.UserGroups') - group_class = import_util('pfunk.tests.test_dev.Newgroup') - groups = ManyToManyField( - 'pfunk.tests.test_dev.Newgroup', relation_name='custom_users_groups') - blogs = ManyToManyField('pfunk.tests.test_dev.Blog', - relation_name='users_blogs') - - -class Blog(Collection): - title = StringField(required=True) - content = StringField(required=True) - users = ManyToManyField('pfunk.tests.test_dev.Newuser', - relation_name='users_blogs') - - def __unicode__(self): - return self.title - - -# Test case to see if user-group is working -class TestUserGroupError(APITestCase): - collections = [Newuser, Newgroup, UserGroups, Blog] - - def setUp(self) -> None: - super().setUp() - self.group = Newgroup.create(name='Power Users', slug='power-users') - self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', - last_name='Lasso', _credentials='abc123', account_status='ACTIVE', - groups=[self.group]) - self.blog = Blog.create( - title='test_blog', content='test content', user=self.user, token=self.secret) - - # BUG: logging in returns missing identity - print(f'TEST USER: {self.user.__dict__}') - self.token, self.exp = Newuser.api_login("test", "abc123") - print(f'\n\nTOKEN: {self.token}') - print(f'\n\nEXP: {self.exp}') - - def test_mock(self): - assert True diff --git a/pfunk/tests/test_user_subclass.py b/pfunk/tests/test_user_subclass.py new file mode 100644 index 0000000..43cd47f --- /dev/null +++ b/pfunk/tests/test_user_subclass.py @@ -0,0 +1,113 @@ +# test_dev.py - a placeholder test for fixing User - Group circular import errors + +import os +from valley.utils import import_util +from pprint import pprint as p + +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser +from pfunk.testcase import APITestCase +from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField +from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField +from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole + + +class UserGroups(Collection): + collection_name = 'users_groups' + userID = ReferenceField('pfunk.tests.test_user_subclass.Newuser') + groupID = ReferenceField('pfunk.tests.test_user_subclass.Newgroup') + permissions = ListField() + + +class Newgroup(BaseGroup): + users = ManyToManyField('pfunk.tests.test_user_subclass.Newuser', + relation_name='custom_users_groups') + + +class Newuser(ExtendedUser): + user_group_class = import_util('pfunk.tests.test_user_subclass.UserGroups') + group_class = import_util('pfunk.tests.test_user_subclass.Newgroup') + groups = ManyToManyField( + 'pfunk.tests.test_user_subclass.Newgroup', relation_name='custom_users_groups') + blogs = ManyToManyField('pfunk.tests.test_user_subclass.Blog', + relation_name='users_blogs') + + +class Blog(Collection): + collection_roles = [GenericUserBasedRole] + title = StringField(required=True) + content = StringField(required=True) + user = ReferenceField('pfunk.tests.test_user_subclass.Newuser', + relation_name='users_blogs') + + def __unicode__(self): + return self.title + + +# Test case to see if user-group is working +class TestUserGroupError(APITestCase): + collections = [Newuser, Newgroup, UserGroups, Blog] + + def setUp(self) -> None: + super().setUp() + self.group = Newgroup.create(name='Power Users', slug='power-users') + self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.group]) + self.blog = Blog.create( + title='test_blog', content='test content', user=self.user, token=self.secret) + self.token, self.exp = Newuser.api_login("test", "abc123") + # p(f'@Test Setup: Blog Created: {self.blog.__dict__}\n') + # p(f'@Test Setup: User Created: {self.user.__dict__}') + + + def test_read(self): + res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) + self.assertEqual("test_blog", res.json['data']['data']['title']) + + def test_read_all(self): + res = self.c.get(f'/json/blog/list/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) + + def test_create(self): + self.assertNotIn("new blog", [ + blog.title for blog in Blog.all()]) + res = self.c.post('/json/blog/create/', + json={ + "title": "new blog", + "content": "I created a new blog.", + "user": self.user.ref.id()}, + headers={ + "Authorization": self.token}) + + self.assertTrue(res.status_code, 200) + self.assertIn("new blog", [ + blog.title for blog in Blog.all()]) + + def test_update(self): + self.assertNotIn("the updated street somewhere", [ + house.address for house in Blog.all()]) + res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', + json={ + "title": "updated blog", + "content": "I updated my blog.", + "user": self.user.ref.id()}, + headers={ + "Authorization": self.token}) + + self.assertTrue(res.status_code, 200) + self.assertIn("updated blog", [ + blog.title for blog in Blog.all()]) + + def test_delete(self): + res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) + + self.assertTrue(res.status_code, 200) diff --git a/pfunk/tests/unittest_keys.py b/pfunk/tests/unittest_keys.py deleted file mode 100644 index d13268d..0000000 --- a/pfunk/tests/unittest_keys.py +++ /dev/null @@ -1,2 +0,0 @@ - -KEYS = {'06bcf013-403a-481b-b6ea-d00a83b7e464': {'signature_key': 'hYJTHn5rF3GeXARPyJyPL1vJhCF7yr-k1H9mITCH5hA=', 'payload_key': '-GF_6GYvo17Udk7AqtGIityqBXYizkENlxc_PDRODJQ=', 'kid': '06bcf013-403a-481b-b6ea-d00a83b7e464'}, '223e62b9-5686-40cb-9e5f-29eb2709d182': {'signature_key': '0pSOP2OxKberj2-kQdPSZDgDryTIWzAnIo0zU16LBQE=', 'payload_key': 'yk3GPQQomCeZIqk1-DWuXORCQpAA8cJdDI4faq3snTo=', 'kid': '223e62b9-5686-40cb-9e5f-29eb2709d182'}, 'c4545030-eac0-4286-8499-f4d5229d4520': {'signature_key': '1vEP5nxo1bUqkEJYjYOedcDtqDO-BWCTvybia5sbszY=', 'payload_key': 'feMygEXTzYddREZaLsiwKYQQnU8dhY0pxz_xXmQ51aU=', 'kid': 'c4545030-eac0-4286-8499-f4d5229d4520'}, '74d87722-3e42-46fe-ac19-9c24a6a49659': {'signature_key': '1wJp_n09f2lhFDOZY9pwqjHyyKtAZq185hdvbhkr7bg=', 'payload_key': 'nTTCKUoC3wBKhNsAxba65UYvJ2Wow2Lhx1bs95xisIk=', 'kid': '74d87722-3e42-46fe-ac19-9c24a6a49659'}, '34875674-7ace-41f9-b04a-fd0b27f8774f': {'signature_key': 'LHBp-r_TTJXSEMeyl2g2bklk4dg0hArkN_QE2nirKts=', 'payload_key': 'I8kBnhtBZ7SWi2C2EtcZHJ48_QT2J4tWMvGlFNb27w0=', 'kid': '34875674-7ace-41f9-b04a-fd0b27f8774f'}, 'de75b5a2-f950-49bc-91e5-03fba00390ae': {'signature_key': 'BDLMhgjmLHkBZwXYuay9x3eB6_4leetdHHMfR7wHi34=', 'payload_key': 'wNUJQyAnueoHv3zYpkqCOE4eECJvW9O4gUWm5JOLakI=', 'kid': 'de75b5a2-f950-49bc-91e5-03fba00390ae'}, 'a8254567-995a-43a7-a79a-e8855c50af51': {'signature_key': 'ArxuXmhYJ41YAM8yQR1uSFyBk5Y5vABBeA103X0PFYI=', 'payload_key': 'ocrhhe1GkdzYQrEPq4ibKd6qHTiEzXXXhpQZZeOKw2k=', 'kid': 'a8254567-995a-43a7-a79a-e8855c50af51'}, 'd6190824-36b6-421b-9c48-ea9a2fd1a48a': {'signature_key': 'GS0gGJnia3bI01w0o4JbD3YMOvLXzzPqOyF4wD1lFwk=', 'payload_key': 'ZXquXtViWDhSGID7Ltufv40x3op-7T8dymOnnN3NAuE=', 'kid': 'd6190824-36b6-421b-9c48-ea9a2fd1a48a'}, '81239a5d-2ead-44ca-803a-a89fc6113b22': {'signature_key': 'MV8caU2wEw9SpudpKVXEVwiyei_2dpr3D4Va40ObsOk=', 'payload_key': 'G5dY4O038k9oKtCq1YPsYp37PZ1RBKLKKX31JXJKHL8=', 'kid': '81239a5d-2ead-44ca-803a-a89fc6113b22'}, 'b136e397-f3e9-4760-af51-270be165fe3a': {'signature_key': 'dKON8KVgtb1p4n2fe7TnXQ3-hlt85JO1uoZDZeVHt0w=', 'payload_key': 'WphxdwSWT-Oxso1MoC9zRnkUyc2zyf5kL6yVooco7ic=', 'kid': 'b136e397-f3e9-4760-af51-270be165fe3a'}} \ No newline at end of file diff --git a/pfunk/utils/publishing.py b/pfunk/utils/publishing.py index 9558a0e..6936280 100644 --- a/pfunk/utils/publishing.py +++ b/pfunk/utils/publishing.py @@ -33,15 +33,13 @@ def create_or_update_role(client, payload: dict = {}): Returns: query """ - try: response = client.query( q.create_role(payload) ) except BadRequest as err: - payload_copy = payload.copy() - role_name = payload_copy.pop("name") + role_name = payload_copy.pop("name") response = client.query( q.update( From 1c4b0c8a9269171324e0796611041aa6fabdf338 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 25 Oct 2022 15:39:40 +0800 Subject: [PATCH 098/214] Did refactoring for better readablity in roles. Applied with pep8 --- pfunk/contrib/auth/resources.py | 108 +++++++++++++----------- pfunk/resources.py | 17 ++-- pfunk/tests/test_user_subclass.py | 11 +-- pfunk/tests/test_user_subclass_m2m.py | 114 ++++++++++++++++++++++++++ pfunk/utils/publishing.py | 2 +- 5 files changed, 189 insertions(+), 63 deletions(-) create mode 100644 pfunk/tests/test_user_subclass_m2m.py diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index c315907..3f79774 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -34,10 +34,11 @@ def get_body(self): } ) ), - q.abort("Account is not active. Please check email for activation.") - ) - ) + q.abort( + "Account is not active. Please check email for activation.") ) + ) + ) ) @@ -61,9 +62,9 @@ def get_body(self): "credentials": {"password": q.select("new_password", q.var("input"))} }), q.abort("Wrong current password.") - ) - ) - ) + ) + ) + ) class CreateUser(AuthFunction): @@ -71,7 +72,8 @@ def get_body(self): data_dict = { "data": self.collection.get_fields(), "credentials": { - self.collection._credential_field: q.select(self.collection._credential_field, q.var("input")) + self.collection._credential_field: q.select( + self.collection._credential_field, q.var("input")) } } return q.query( @@ -79,7 +81,8 @@ def get_body(self): q.let( { 'result': q.create( - q.collection(self.collection.get_collection_name()), + q.collection( + self.collection.get_collection_name()), data_dict), 'input': q.var('input') }, @@ -90,20 +93,20 @@ def get_body(self): q.lambda_( 'group', q.create( - q.collection(self.collection._base_properties.get('groups').relation_name), + q.collection(self.collection._base_properties.get( + 'groups').relation_name), {'data': { 'userID': q.select('ref', q.var('result')), 'groupID': q.var('group') }} ) - ) - , + ), q.select('groups', q.var('input')) ), q.abort('Groups not defined.') ) - ) - )) + ) + )) class Public(Role): @@ -116,11 +119,11 @@ def get_function_lambda(self): q.lambda_(['data'], q.equals( q.select('account_status', q.select('data', - q.match(q.index('unique_User_username', + q.match(q.index(f'unique_{self.collection.__class__.__name__}_username', q.select('username', q.var('data')))))), "ACTIVE" - ) - )) + ) + )) def get_privileges(self): return [ @@ -173,30 +176,32 @@ def get_lambda(self, resource_type): class GenericAuthorizationRole(Role): def get_user_collection(self): - user_field = self.collection._base_properties.get("users") - if not user_field: - user_field = self.collection._base_properties.get("user") - return user_field - - def get_user_table(self): - user_field = self.get_user_collection() - if user_field: - return user_field.get_foreign_class().__name__ + """ Acquires User collection type """ + user_field = self.collection.get_user_field().lower() + col = self.collection._base_properties.get(user_field) + if col: + return col.get_foreign_class() else: return None + def get_user_table(self): + """ Acquires User's class name """ + col = self.get_user_collection() + if col: + return col.__name__ + return None + def get_relation_index_name(self): - self.current_user_field = self.collection.__class__.__name__.lower() - self.user_table = self.collection.__class__.__name__ - relation_index_name = (self.get_user_collection().__base_properties.get('groups').relation_name - + '_by_' - + self.collection.group_class.__name__.lower() - + '_' - + self.get_user_table()) + user_col = self.get_user_collection() + user_groups = user_col._base_properties.get("groups") + self.user_table = self.get_user_table().lower() + relation_index_name = (user_groups.relation_name + + '_by_' + + self.user_table) return relation_index_name def get_name_suffix(self): - return f'{self.get_user_table().lower()}_based_crud_role' + return f'{self.collection.get_user_field().lower()}_based_crud_role' def get_name(self): return self.name or f"{self.collection.get_class_name()}_{self.get_name_suffix()}" @@ -247,17 +252,20 @@ def get_privileges(self): class GenericUserBasedRole(GenericAuthorizationRole): - relation_index_name = 'users_groups_by_user' def get_relation_index_name(self): + """ Returns the user-group by user index name + + Formatted as: {user_group_relation_name}_by_{user_class} + """ # Acquires the `groups` field from the user collection - user_col = self.get_user_collection().get_foreign_class() + user_col = self.get_user_collection() user_groups = user_col._base_properties.get("groups") if user_groups: relation_index_name = (user_groups.relation_name - + '_by_' - + self.get_user_table().lower()) + + '_by_' + + self.get_user_table().lower()) return relation_index_name return None @@ -275,7 +283,8 @@ def get_lambda(self, resource_type): q.current_identity() ), q.equals( - q.select(current_user_field, q.select('data', q.var('new_object'))), + q.select(current_user_field, q.select( + 'data', q.var('new_object'))), q.current_identity() ) ) @@ -317,20 +326,19 @@ def get_name_suffix(self): def get_relation_index_name(self): user_col = self.get_user_collection().get_foreign_class() user_groups = user_col._base_properties.get("groups") - + if user_groups: # TODO: be able to return `_by_` .e.g. `users_groups_by_user` relation_index_name = (user_groups.relation_name - + '_by_' - + self.collection.group_class.__name__.lower() - + '_' - + self.get_user_table().lower()) + + '_by_' + + self.collection.group_class.__name__.lower() + + '_' + + self.get_user_table().lower()) return relation_index_name return None def get_lambda(self, resource_type): current_group_field = self.collection.get_group_field() - print(f'\n\nCURRENT GROUP FIELD: {current_group_field}\n\n') perm = f'{self.collection.get_collection_name()}-{resource_type}'.lower() if resource_type == 'write': group_ref = q.select(current_group_field, @@ -346,7 +354,8 @@ def get_lambda(self, resource_type): q.select(self.permissions_field, q.get( q.match( - q.index(self.get_relation_index_name()), + q.index( + self.get_relation_index_name()), group_ref, q.current_identity() ) @@ -354,8 +363,10 @@ def get_lambda(self, resource_type): perm ), q.equals( - q.select(current_group_field, q.select('data', q.var('old_object'))), - q.select(current_group_field, q.select('data', q.var('new_object'))), + q.select(current_group_field, q.select( + 'data', q.var('old_object'))), + q.select(current_group_field, q.select( + 'data', q.var('new_object'))), ) ) ) @@ -377,7 +388,8 @@ def get_lambda(self, resource_type): q.select(self.permissions_field, q.select("data", q.get(q.match( - q.index(self.get_relation_index_name()), + q.index( + self.get_relation_index_name()), group_ref, q.current_identity() )))))), diff --git a/pfunk/resources.py b/pfunk/resources.py index 18b4687..74786e2 100644 --- a/pfunk/resources.py +++ b/pfunk/resources.py @@ -128,10 +128,11 @@ def get_membership_lambda(self): return q.query( q.lambda_(['object_ref'], q.equals( - q.select('account_status', q.select('data', q.get(q.var('object_ref')))), + q.select('account_status', q.select( + 'data', q.get(q.var('object_ref')))), "ACTIVE" - ) - )) + ) + )) def get_user_table(self): """ Acquires user table from the class name """ @@ -175,7 +176,8 @@ class Index(object): serialized: bool = None terms: list = None values: list = None - _accept_kwargs: list = ['name', 'source', 'unique', 'serialized', 'terms', 'values'] + _accept_kwargs: list = ['name', 'source', + 'unique', 'serialized', 'terms', 'values'] def __init__(self, **kwargs): """ @@ -248,9 +250,10 @@ def get_body(self): q.get(q.var('ref')) ), q.paginate( - q.match(q.index(self.collection.all_index_name())), + q.match( + q.index(self.collection.all_index_name())), q.select('size', q.var('input')) ) - ) - ) + ) + ) ) diff --git a/pfunk/tests/test_user_subclass.py b/pfunk/tests/test_user_subclass.py index 43cd47f..ae37c1f 100644 --- a/pfunk/tests/test_user_subclass.py +++ b/pfunk/tests/test_user_subclass.py @@ -37,7 +37,7 @@ class Blog(Collection): title = StringField(required=True) content = StringField(required=True) user = ReferenceField('pfunk.tests.test_user_subclass.Newuser', - relation_name='users_blogs') + relation_name='users_blogs') def __unicode__(self): return self.title @@ -56,9 +56,6 @@ def setUp(self) -> None: self.blog = Blog.create( title='test_blog', content='test content', user=self.user, token=self.secret) self.token, self.exp = Newuser.api_login("test", "abc123") - # p(f'@Test Setup: Blog Created: {self.blog.__dict__}\n') - # p(f'@Test Setup: User Created: {self.user.__dict__}') - def test_read(self): res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', @@ -93,9 +90,9 @@ def test_update(self): house.address for house in Blog.all()]) res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', json={ - "title": "updated blog", - "content": "I updated my blog.", - "user": self.user.ref.id()}, + "title": "updated blog", + "content": "I updated my blog.", + "user": self.user.ref.id()}, headers={ "Authorization": self.token}) diff --git a/pfunk/tests/test_user_subclass_m2m.py b/pfunk/tests/test_user_subclass_m2m.py new file mode 100644 index 0000000..6fff9c3 --- /dev/null +++ b/pfunk/tests/test_user_subclass_m2m.py @@ -0,0 +1,114 @@ +# test_dev.py - a placeholder test for fixing User - Group circular import errors + +import os +from valley.utils import import_util +from pprint import pprint as p + +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser +from pfunk.testcase import APITestCase +from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField +from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField +from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole + + +class UserGroups(Collection): + collection_name = 'users_groups' + userID = ReferenceField('pfunk.tests.test_user_subclass.Newuser') + groupID = ReferenceField('pfunk.tests.test_user_subclass.Newgroup') + permissions = ListField() + + +class Newgroup(BaseGroup): + users = ManyToManyField('pfunk.tests.test_user_subclass.Newuser', + relation_name='custom_users_groups') + + +class Newuser(ExtendedUser): + user_group_class = import_util('pfunk.tests.test_user_subclass.UserGroups') + group_class = import_util('pfunk.tests.test_user_subclass.Newgroup') + groups = ManyToManyField( + 'pfunk.tests.test_user_subclass.Newgroup', relation_name='custom_users_groups') + blogs = ManyToManyField('pfunk.tests.test_user_subclass.Blog', + relation_name='users_blogs') + + +class Blog(Collection): + collection_roles = [GenericUserBasedRole] + title = StringField(required=True) + content = StringField(required=True) + users = ManyToManyField('pfunk.tests.test_user_subclass.Newuser', + relation_name='users_blogs') + + def __unicode__(self): + return self.title + + +# Test case to see if user-group is working +class TestUserGroupError(APITestCase): + collections = [Newuser, Newgroup, UserGroups, Blog] + + def setUp(self) -> None: + super().setUp() + self.group = Newgroup.create(name='Power Users', slug='power-users') + self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.group]) + self.blog = Blog.create( + title='test_blog', content='test content', users=[self.user], token=self.secret) + self.token, self.exp = Newuser.api_login("test", "abc123") + # p(f'@Test Setup: Blog Created: {self.blog.__dict__}\n') + # p(f'@Test Setup: User Created: {self.user.__dict__}') + + + def test_read(self): + res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token}) + print(f'\n\nRESPONSE: {res.json}\n\n') + self.assertTrue(res.status_code, 200) + self.assertEqual("test_blog", res.json['data']['data']['title']) + + # def test_read_all(self): + # res = self.c.get(f'/json/blog/list/', + # headers={ + # "Authorization": self.token}) + # self.assertTrue(res.status_code, 200) + + # def test_create(self): + # self.assertNotIn("new blog", [ + # blog.title for blog in Blog.all()]) + # res = self.c.post('/json/blog/create/', + # json={ + # "title": "new blog", + # "content": "I created a new blog.", + # "user": self.user.ref.id()}, + # headers={ + # "Authorization": self.token}) + + # self.assertTrue(res.status_code, 200) + # self.assertIn("new blog", [ + # blog.title for blog in Blog.all()]) + + # def test_update(self): + # self.assertNotIn("the updated street somewhere", [ + # house.address for house in Blog.all()]) + # res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', + # json={ + # "title": "updated blog", + # "content": "I updated my blog.", + # "user": self.user.ref.id()}, + # headers={ + # "Authorization": self.token}) + + # self.assertTrue(res.status_code, 200) + # self.assertIn("updated blog", [ + # blog.title for blog in Blog.all()]) + + # def test_delete(self): + # res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', + # headers={ + # "Authorization": self.token, + # "Content-Type": "application/json" + # }) + + # self.assertTrue(res.status_code, 200) diff --git a/pfunk/utils/publishing.py b/pfunk/utils/publishing.py index 6936280..f98efe4 100644 --- a/pfunk/utils/publishing.py +++ b/pfunk/utils/publishing.py @@ -39,7 +39,7 @@ def create_or_update_role(client, payload: dict = {}): ) except BadRequest as err: payload_copy = payload.copy() - role_name = payload_copy.pop("name") + role_name = payload_copy.pop("name") response = client.query( q.update( From 831ee9985ef091071798122a13e351f1752354a2 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 26 Oct 2022 15:50:44 +0800 Subject: [PATCH 099/214] Fixed GroupBasedPermission to properly use functions and dynamic sytaxes --- pfunk/collection.py | 2 ++ pfunk/contrib/auth/resources.py | 40 +++++++++++++++++++-------- pfunk/tests/test_user_subclass_m2m.py | 1 + 3 files changed, 32 insertions(+), 11 deletions(-) diff --git a/pfunk/collection.py b/pfunk/collection.py index 233caf3..3af0c40 100644 --- a/pfunk/collection.py +++ b/pfunk/collection.py @@ -137,6 +137,7 @@ def get_user_field(self) -> str: for k, v in fields: if user_class in v.get_graphql_type(): user_field = k + break return user_field def get_group_field(self) -> str: @@ -152,6 +153,7 @@ def get_group_field(self) -> str: for k, v in fields: if group_class in v.get_graphql_type(): group_field = k + break return group_field def get_collection_name(self) -> str: diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 3f79774..f745cc1 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -1,3 +1,4 @@ +from tokenize import group from pfunk.client import q from pfunk.resources import Function, Role @@ -184,6 +185,17 @@ def get_user_collection(self): else: return None + def get_group_collection(self): + """ Acquires Group collection type from User's fields """ + user_col = self.get_user_collection() + col = user_col() + group_field = col.get_group_field() + user_groups = user_col._base_properties.get(group_field) + if user_groups: + return user_groups.get_foreign_class() + else: + return None + def get_user_table(self): """ Acquires User's class name """ col = self.get_user_collection() @@ -191,9 +203,17 @@ def get_user_table(self): return col.__name__ return None + def get_group_table(self): + """ Acquires group class name from the user's fields """ + group_col = self.get_group_collection() + if group_col: + return group_col.__name__ + return None + def get_relation_index_name(self): user_col = self.get_user_collection() - user_groups = user_col._base_properties.get("groups") + group_field = user_col.get_group_field() + user_groups = user_col._base_properties.get(group_field) self.user_table = self.get_user_table().lower() relation_index_name = (user_groups.relation_name + '_by_' @@ -319,26 +339,24 @@ class GenericGroupBasedRole(GenericAuthorizationRole): name_suffix = 'group_based_crud_role' def get_name_suffix(self): - """ """ - # TODO: Return `group_based_crud_role` with dynamic group name class - pass + return f'{self.get_group_table().lower()}_based_crud_role' def get_relation_index_name(self): - user_col = self.get_user_collection().get_foreign_class() + """ Returns the index name of the m2m index of group and user e.g. 'users_groups_by_group_and_user' """ + user_col = self.get_user_collection() user_groups = user_col._base_properties.get("groups") - - if user_groups: - # TODO: be able to return `_by_` .e.g. `users_groups_by_user` + group_table = self.get_group_table().lower() + if group_table: relation_index_name = (user_groups.relation_name + '_by_' - + self.collection.group_class.__name__.lower() - + '_' + + group_table + + '_and_' + self.get_user_table().lower()) return relation_index_name return None def get_lambda(self, resource_type): - current_group_field = self.collection.get_group_field() + current_group_field = self.get_group_table() perm = f'{self.collection.get_collection_name()}-{resource_type}'.lower() if resource_type == 'write': group_ref = q.select(current_group_field, diff --git a/pfunk/tests/test_user_subclass_m2m.py b/pfunk/tests/test_user_subclass_m2m.py index 6fff9c3..8516db2 100644 --- a/pfunk/tests/test_user_subclass_m2m.py +++ b/pfunk/tests/test_user_subclass_m2m.py @@ -53,6 +53,7 @@ def setUp(self) -> None: self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.group]) + p(f'@Test Setup: User Created: {self.user.__dict__}') self.blog = Blog.create( title='test_blog', content='test content', users=[self.user], token=self.secret) self.token, self.exp = Newuser.api_login("test", "abc123") From 01833c393e487ca53db25a5438dcb08007850834 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 28 Oct 2022 13:59:15 +0800 Subject: [PATCH 100/214] fixed permission adding group based perm --- pfunk/contrib/auth/collections.py | 104 +++++++++++++----------- pfunk/contrib/auth/resources.py | 105 ++++++++++++++++++++---- pfunk/tests/test_group_subclass.py | 110 ++++++++++++++++++++++++++ pfunk/tests/test_user_subclass_m2m.py | 11 +-- 4 files changed, 265 insertions(+), 65 deletions(-) create mode 100644 pfunk/tests/test_group_subclass.py diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index 254df59..3b78916 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -9,6 +9,7 @@ from pfunk import ReferenceField from pfunk.client import q from pfunk.collection import Collection, Enum +from pfunk.resources import Index from pfunk.contrib.auth.key import Key from pfunk.contrib.auth.resources import LoginUser, UpdatePassword, Public, UserRole, LogoutUser from pfunk.contrib.auth.views import ForgotPasswordChangeView, LoginView, SignUpView, VerifyEmailView, LogoutView, \ @@ -29,6 +30,50 @@ def __unicode__(self): return self.name # pragma: no cover +class UserGroupByUserAndGroupIndex(Index): + name = 'usergroups_by_userID_and_groupID' + source = 'Usergroups' + terms = [ + {'field': ['data', 'userID']}, + {'field': ['data', 'groupID']} + ] + values = [ + {'field': ['ref']} + ] + + +class UserGroups(Collection): + """ Many-to-many collection of the user-group relationship + + The native fauna-way of holding many-to-many relationship + is to only have the ID of the 2 object. Here in pfunk, we + leverage the flexibility of the collection to have another + field, which is `permissions`, this field holds the capablities + of a user, allowing us to add easier permission handling. + Instead of manually going to roles and adding individual + collections which can be painful in long term. + + Attributes: + collection_name (str): + Name of the collection in Fauna + userID (str): + Fauna ref of user that is tied to the group + groupID (str): + Fauna ref of a collection that is tied with the user + permissions (str[]): + List of permissions, `['create', 'read', 'delete', 'write']` + """ + collection_indexes = [UserGroupByUserAndGroupIndex] + userID = ReferenceField( + env('USER_COLLECTION_DIR', 'pfunk.contrib.auth.collections.User')) + groupID = ReferenceField( + env('GROUP_COLLECTION_DIR', 'pfunk.contrib.auth.collections.Group')) + permissions = ListField() + + def __unicode__(self): + return f"{self.userID}, {self.groupID}, {self.permissions}" + + AccountStatus = Enum(name='AccountStatus', choices=['ACTIVE', 'INACTIVE']) @@ -273,6 +318,7 @@ class ExtendedUser(BaseUser): Provides base methods for group-user permissions. If there are no supplied `groups` property, will raise `NotImplementedErrror` """ + user_group_class = import_util('pfunk.contrib.auth.collections.UserGroups') @classmethod def get_permissions(cls, ref, _token=None): @@ -290,7 +336,7 @@ def get_groups(self, _token=None): index_name = f'{relation_name}_by_{user_class}' return [self.group_class.get(i.id(), _token=_token) for i in self.client(_token=_token).query( - q.paginate(q.match(index_name, self.ref)) + q.paginate(q.match(index_name, self.ref)) ).get('data')] def permissions(self, _token=None): @@ -307,21 +353,18 @@ def permissions(self, _token=None): perm_list (str[]): Permissions of the user in list: `['create', 'read', 'delete', 'write']` """ - user_class = self.__class__.__name__.lower() - group_class = self.group_class.__name__.lower() - relation_name = self._base_properties.get("groups").relation_name - index_name = f'{user_class}s_{group_class}s_by_{group_class}_and_{user_class}' - if relation_name: - index_name = f'{relation_name}_by_{group_class}_and_{user_class}' + + index_name = 'usergroups_by_userID_and_groupID' perm_list = [] for i in self.get_groups(_token=_token): ug = self.user_group_class.get_index(index_name, [ - i.ref, self.ref], _token=_token) + self.ref, i.ref], _token=_token) for user_group in ug: + print(f'\n\n@contrib auth: USER GROUP: {user_group}\n\n') p = [] if isinstance(user_group.permissions, list): p = [ - f'{user_group.groupID.slug}-{i}' for i in user_group.permissions] + f'{user_group.groupID}-{i}' for i in user_group.permissions] perm_list.extend(p) return perm_list @@ -347,54 +390,24 @@ def add_permissions(self, group, permissions: list, _token=None): of the user """ perm_list = [] - for i in permissions: - perm_list.extend(i.permissions) + index_name = 'usergroups_by_userID_and_groupID' + for i in permissions: + perm_list.append(i) if not self.user_group_class: raise NotImplementedError try: user_group = self.user_group_class.get_by( - 'users_groups_by_group_and_user', terms=[group.ref, self.ref]) + index_name, terms=[self.ref, group.ref]) except DocNotFound: user_group = self.user_group_class.create( - userID=self.ref, groupID=group.ref, permissions=perm_list) + userID=self, groupID=group, permissions=perm_list, _token=_token) if user_group.permissions != perm_list: user_group.permissions = perm_list user_group.save() - return user_group - - -class UserGroups(Collection): - """ Many-to-many collection of the user-group relationship - The native fauna-way of holding many-to-many relationship - is to only have the ID of the 2 object. Here in pfunk, we - leverage the flexibility of the collection to have another - field, which is `permissions`, this field holds the capablities - of a user, allowing us to add easier permission handling. - Instead of manually going to roles and adding individual - collections which can be painful in long term. - - Attributes: - collection_name (str): - Name of the collection in Fauna - userID (str): - Fauna ref of user that is tied to the group - groupID (str): - Fauna ref of a collection that is tied with the user - permissions (str[]): - List of permissions, `['create', 'read', 'delete', 'write']` - """ - collection_name = 'users_groups' - userID = ReferenceField( - env('USER_COLLECTION', 'pfunk.contrib.auth.collections.User')) - groupID = ReferenceField( - env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group')) - permissions = ListField() - - def __unicode__(self): - return f"{self.userID}, {self.groupID}, {self.permissions}" + return user_group class Group(BaseGroup): @@ -405,7 +418,6 @@ class Group(BaseGroup): class User(ExtendedUser): """ A default user that already has predefined M2M relationship with `pfunk.contrib.auth.collections.Group` """ - user_group_class = import_util('pfunk.contrib.auth.collections.UserGroups') group_class = import_util('pfunk.contrib.auth.collections.Group') groups = ManyToManyField( 'pfunk.contrib.auth.collections.Group', 'users_groups') diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index f745cc1..1cd16ad 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -1,7 +1,12 @@ from tokenize import group +from envs import env + from pfunk.client import q from pfunk.resources import Function, Role +# Global collections +USER_CLASS = env('USER_COLLECTION', 'User') +GROUP_CLASS = env('GROUP_COLLECTION', 'Group') class AuthFunction(Function): @@ -178,7 +183,9 @@ class GenericAuthorizationRole(Role): def get_user_collection(self): """ Acquires User collection type """ - user_field = self.collection.get_user_field().lower() + user_field = self.collection.get_user_field() + if user_field: + user_field = user_field.lower() col = self.collection._base_properties.get(user_field) if col: return col.get_foreign_class() @@ -272,6 +279,7 @@ def get_privileges(self): class GenericUserBasedRole(GenericAuthorizationRole): + """ Generic set of permissions for entity to user relationship """ def get_relation_index_name(self): """ Returns the user-group by user index name @@ -331,32 +339,37 @@ def get_lambda(self, resource_type): class GenericGroupBasedRole(GenericAuthorizationRole): - relation_index_name = 'users_groups_by_group_and_user' - through_user_field = 'userID' - current_group_field = 'group' permissions_field = 'permissions' - user_table = 'User' - name_suffix = 'group_based_crud_role' + user_table = USER_CLASS + group_table = GROUP_CLASS + through_user_field = USER_CLASS.lower() + 'ID' def get_name_suffix(self): - return f'{self.get_group_table().lower()}_based_crud_role' + return f'{self.group_table.lower()}_based_crud_role' def get_relation_index_name(self): """ Returns the index name of the m2m index of group and user e.g. 'users_groups_by_group_and_user' """ - user_col = self.get_user_collection() - user_groups = user_col._base_properties.get("groups") - group_table = self.get_group_table().lower() - if group_table: + group_field = self.collection.get_group_field() + group_col = self.collection._base_properties.get(group_field).get_foreign_class() + group_user_field = group_col().get_user_field() + user_groups = group_col._base_properties.get(group_user_field) + if self.group_table: relation_index_name = (user_groups.relation_name + '_by_' - + group_table + + self.group_table.lower() + '_and_' - + self.get_user_table().lower()) + + self.user_table.lower()) return relation_index_name return None def get_lambda(self, resource_type): - current_group_field = self.get_group_table() + """ Returns the lambda function for giving the permission to Group-based entities + + Allows modification if: + 1. You belong to the group that owns the document + 2. You have the create permission to perform the action (create, read, write, and delete) + """ + current_group_field = self.collection.get_group_field().lower() perm = f'{self.collection.get_collection_name()}-{resource_type}'.lower() if resource_type == 'write': group_ref = q.select(current_group_field, @@ -415,3 +428,67 @@ def get_lambda(self, resource_type): ) ) ) + + +# class GenericUserBasedRoleM2M(GenericAuthorizationRole): +# """ Generic set of permissions for many-to-many entity to user relationship """ + +# def get_name_suffix(self): +# # TODO: return suffix: +# return f'{self.get_group_table().lower()}_based_crud_role' + +# def get_relation_index_name(self): +# # TODO: return index name: `users_blogs_by_blog_and_newuser` +# """ Returns the index name of the m2m index of group and user e.g. 'users_groups_by_group_and_user' """ +# user_col = self.get_user_collection() +# user_groups = user_col._base_properties.get("groups") +# group_table = self.get_group_table().lower() +# if group_table: +# relation_index_name = (user_groups.relation_name +# + '_by_' +# + group_table +# + '_and_' +# + self.get_user_table().lower()) +# return relation_index_name +# return None + +# def get_lambda(self, resource_type): +# # TODO: refactor to look for the M2M index and see if the user has permission for the entity +# current_user_field = self.collection.get_user_field() +# if resource_type == 'write': +# lambda_args = ["old_object", "new_object", "object_ref"] +# user_ref = q.select(current_user_field, +# q.select('data', q.var('old_object'))) +# return q.query( +# q.lambda_(lambda_args, +# q.and_( +# q.equals( +# user_ref, +# q.current_identity() +# ), +# q.equals( +# q.select(current_user_field, q.select( +# 'data', q.var('new_object'))), +# q.current_identity() +# ) +# ) + +# ) +# ) +# elif resource_type == 'create': +# lambda_args = ["new_object"] +# user_ref = q.select(current_user_field, +# q.select('data', q.var('new_object'))) +# elif resource_type == 'read' or resource_type == 'delete': +# lambda_args = ["object_ref"] +# user_ref = q.select(current_user_field, +# q.select('data', q.get(q.var('object_ref')))) + +# return q.query( +# q.lambda_(lambda_args, +# q.equals( +# user_ref, +# q.current_identity() +# ) +# ) +# ) diff --git a/pfunk/tests/test_group_subclass.py b/pfunk/tests/test_group_subclass.py new file mode 100644 index 0000000..45a5394 --- /dev/null +++ b/pfunk/tests/test_group_subclass.py @@ -0,0 +1,110 @@ +# test_dev.py - a placeholder test for fixing User - Group circular import errors + +import os +from valley.utils import import_util +from pprint import pprint as p + +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, UserGroups +from pfunk.testcase import APITestCase +from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField +from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField +from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole + + +class Newgroup(BaseGroup): + users = ManyToManyField('pfunk.tests.test_group_subclass.Newuser', + relation_name='custom_users_groups') + + +class Newuser(ExtendedUser): + group_class = import_util('pfunk.tests.test_group_subclass.Newgroup') + groups = ManyToManyField( + 'pfunk.tests.test_group_subclass.Newgroup', relation_name='custom_users_groups') + + +class Blog(Collection): + collection_roles = [GenericGroupBasedRole] + title = StringField(required=True) + content = StringField(required=True) + group = ReferenceField('pfunk.tests.test_group_subclass.Newgroup', + relation_name='newgroup_blogs') + + def __unicode__(self): + return self.title + + +# Test case to see if user-group is working +class TestUserGroupError(APITestCase): + collections = [Newuser, Newgroup, UserGroups, Blog] + + def setUp(self) -> None: + super().setUp() + self.group = Newgroup.create(name='Power Users', slug='power-users') + self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.group]) + print(f'\n\nALL INDEXES: {self.project.indexes}\n\n') + perms = self.user.add_permissions(self.group, ['create', 'read', 'write', 'delete']) + + p(f'\n\nest setup: Added User permissions: {perms}\n\n') + p(f'@test setup: User permissions: {self.user.permissions()}') + p(f'@Test Setup: User Created: {self.user.__dict__}') + self.blog = Blog.create( + title='test_blog', content='test content', group=self.group, token=self.secret) + self.token, self.exp = Newuser.api_login("test", "abc123") + # p(f'@Test Setup: Blog Created: {self.blog.__dict__}\n') + # p(f'@Test Setup: User Created: {self.user.__dict__}') + + + def test_read(self): + res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token}) + print(f'\n\nRESPONSE: {res.json}\n\n') + self.assertTrue(res.status_code, 200) + self.assertEqual("test_blog", res.json['data']['data']['title']) + + # def test_read_all(self): + # res = self.c.get(f'/json/blog/list/', + # headers={ + # "Authorization": self.token}) + # self.assertTrue(res.status_code, 200) + + # def test_create(self): + # self.assertNotIn("new blog", [ + # blog.title for blog in Blog.all()]) + # res = self.c.post('/json/blog/create/', + # json={ + # "title": "new blog", + # "content": "I created a new blog.", + # "user": self.user.ref.id()}, + # headers={ + # "Authorization": self.token}) + + # self.assertTrue(res.status_code, 200) + # self.assertIn("new blog", [ + # blog.title for blog in Blog.all()]) + + # def test_update(self): + # self.assertNotIn("the updated street somewhere", [ + # house.address for house in Blog.all()]) + # res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', + # json={ + # "title": "updated blog", + # "content": "I updated my blog.", + # "user": self.user.ref.id()}, + # headers={ + # "Authorization": self.token}) + + # self.assertTrue(res.status_code, 200) + # self.assertIn("updated blog", [ + # blog.title for blog in Blog.all()]) + + # def test_delete(self): + # res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', + # headers={ + # "Authorization": self.token, + # "Content-Type": "application/json" + # }) + + # self.assertTrue(res.status_code, 200) diff --git a/pfunk/tests/test_user_subclass_m2m.py b/pfunk/tests/test_user_subclass_m2m.py index 8516db2..a3d4972 100644 --- a/pfunk/tests/test_user_subclass_m2m.py +++ b/pfunk/tests/test_user_subclass_m2m.py @@ -21,6 +21,9 @@ class UserGroups(Collection): class Newgroup(BaseGroup): users = ManyToManyField('pfunk.tests.test_user_subclass.Newuser', relation_name='custom_users_groups') + blogs = ReferenceField('pfunk.tests.test_user_subclass.Blog', + relation_name='newgroup_blogs') + class Newuser(ExtendedUser): @@ -28,16 +31,14 @@ class Newuser(ExtendedUser): group_class = import_util('pfunk.tests.test_user_subclass.Newgroup') groups = ManyToManyField( 'pfunk.tests.test_user_subclass.Newgroup', relation_name='custom_users_groups') - blogs = ManyToManyField('pfunk.tests.test_user_subclass.Blog', - relation_name='users_blogs') class Blog(Collection): collection_roles = [GenericUserBasedRole] title = StringField(required=True) content = StringField(required=True) - users = ManyToManyField('pfunk.tests.test_user_subclass.Newuser', - relation_name='users_blogs') + group = ReferenceField('pfunk.tests.test_user_subclass.Newgroup', + relation_name='newgroup_blogs') def __unicode__(self): return self.title @@ -55,7 +56,7 @@ def setUp(self) -> None: groups=[self.group]) p(f'@Test Setup: User Created: {self.user.__dict__}') self.blog = Blog.create( - title='test_blog', content='test content', users=[self.user], token=self.secret) + title='test_blog', content='test content', group=[self.group], token=self.secret) self.token, self.exp = Newuser.api_login("test", "abc123") # p(f'@Test Setup: Blog Created: {self.blog.__dict__}\n') # p(f'@Test Setup: User Created: {self.user.__dict__}') From cf0b9d619e83b6d71bee1a8c864222838005c30b Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 2 Nov 2022 15:11:31 +0800 Subject: [PATCH 101/214] Added priority publish for user-group-usergroup collections to avoid undefined index and permission publish --- pfunk/project.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/pfunk/project.py b/pfunk/project.py index 34cc745..54a6469 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -17,6 +17,7 @@ from pfunk.web.request import HTTPRequest, RESTRequest, WSGIRequest from pfunk.web.response import HttpNotFoundResponse, JSONMethodNotAllowedResponse +from .contrib.auth.collections import User, Group, UserGroups, BaseGroup, BaseUser, ExtendedUser from .collection import Collection from .fields import ForeignList from .template import graphql_template @@ -217,7 +218,19 @@ def publish(self, mode: str = 'merge') -> int: print('----------------------------------------') print(resp.content) return - for col in set(self.collections): + + collections = set(self.collections) + # make publishing prioritize User, Group and UserGroups + for col in collections.copy(): + if (issubclass(col, User) + or issubclass(col, Group) + or issubclass(col, BaseGroup) + or issubclass(col, ExtendedUser) + or issubclass(col, BaseUser) + or issubclass(col, UserGroups)): + col.publish() + collections.remove(col) + for col in collections: col.publish() return resp.status_code From f298c8d4c539e49cceed5ad5d27680c3e8a20238 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 7 Nov 2022 10:24:20 +0800 Subject: [PATCH 102/214] first stab at generic group based refactors --- pfunk/contrib/auth/collections.py | 5 ++-- pfunk/contrib/auth/resources.py | 42 ++++++++++-------------------- pfunk/tests/test_group_subclass.py | 10 +++---- 3 files changed, 21 insertions(+), 36 deletions(-) diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index 3b78916..ba93f10 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -360,11 +360,10 @@ def permissions(self, _token=None): ug = self.user_group_class.get_index(index_name, [ self.ref, i.ref], _token=_token) for user_group in ug: - print(f'\n\n@contrib auth: USER GROUP: {user_group}\n\n') p = [] if isinstance(user_group.permissions, list): p = [ - f'{user_group.groupID}-{i}' for i in user_group.permissions] + f'{user_group.groupID.slug}-{i}' for i in user_group.permissions] perm_list.extend(p) return perm_list @@ -402,7 +401,7 @@ def add_permissions(self, group, permissions: list, _token=None): index_name, terms=[self.ref, group.ref]) except DocNotFound: user_group = self.user_group_class.create( - userID=self, groupID=group, permissions=perm_list, _token=_token) + userID=self.ref, groupID=group.ref, permissions=perm_list, _token=_token) if user_group.permissions != perm_list: user_group.permissions = perm_list user_group.save() diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 1cd16ad..6240904 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -218,14 +218,8 @@ def get_group_table(self): return None def get_relation_index_name(self): - user_col = self.get_user_collection() - group_field = user_col.get_group_field() - user_groups = user_col._base_properties.get(group_field) - self.user_table = self.get_user_table().lower() - relation_index_name = (user_groups.relation_name - + '_by_' - + self.user_table) - return relation_index_name + """ Returns the index name of the created permission index of group and user -> 'usergroups_by_userID_and_groupID' """ + return 'usergroups_by_userID_and_groupID' def get_name_suffix(self): return f'{self.collection.get_user_field().lower()}_based_crud_role' @@ -347,21 +341,6 @@ class GenericGroupBasedRole(GenericAuthorizationRole): def get_name_suffix(self): return f'{self.group_table.lower()}_based_crud_role' - def get_relation_index_name(self): - """ Returns the index name of the m2m index of group and user e.g. 'users_groups_by_group_and_user' """ - group_field = self.collection.get_group_field() - group_col = self.collection._base_properties.get(group_field).get_foreign_class() - group_user_field = group_col().get_user_field() - user_groups = group_col._base_properties.get(group_user_field) - if self.group_table: - relation_index_name = (user_groups.relation_name - + '_by_' - + self.group_table.lower() - + '_and_' - + self.user_table.lower()) - return relation_index_name - return None - def get_lambda(self, resource_type): """ Returns the lambda function for giving the permission to Group-based entities @@ -370,7 +349,10 @@ def get_lambda(self, resource_type): 2. You have the create permission to perform the action (create, read, write, and delete) """ current_group_field = self.collection.get_group_field().lower() + # group_slug = self.collection. + # TODO: perm won't match with the entity that is being queried perm = f'{self.collection.get_collection_name()}-{resource_type}'.lower() + if resource_type == 'write': group_ref = q.select(current_group_field, q.select('data', q.var('old_object'))) @@ -383,14 +365,15 @@ def get_lambda(self, resource_type): # User ID from index q.select(0, q.filter_(lambda i: q.equals(perm, i), q.select(self.permissions_field, + q.select("data", q.get( q.match( q.index( self.get_relation_index_name()), - group_ref, - q.current_identity() + q.current_identity(), + group_ref ) - )))), + ))))), perm ), q.equals( @@ -415,14 +398,17 @@ def get_lambda(self, resource_type): q.lambda_( lambda_args, q.equals( + # NOTE: After acquiring the instance of `UserGroup`, filter the result: permission field + # that matches the `perm` variable AND then see if that is equals to `perm` var + # IMPORTANT: by using this, it will easily filter permissions available, and if there were none, then it is automatically false q.select(0, q.filter_(lambda i: q.equals(perm, i), q.select(self.permissions_field, q.select("data", q.get(q.match( q.index( self.get_relation_index_name()), - group_ref, - q.current_identity() + q.current_identity(), + group_ref )))))), perm ) diff --git a/pfunk/tests/test_group_subclass.py b/pfunk/tests/test_group_subclass.py index 45a5394..a0792cd 100644 --- a/pfunk/tests/test_group_subclass.py +++ b/pfunk/tests/test_group_subclass.py @@ -27,7 +27,7 @@ class Blog(Collection): title = StringField(required=True) content = StringField(required=True) group = ReferenceField('pfunk.tests.test_group_subclass.Newgroup', - relation_name='newgroup_blogs') + relation_name='newgroup_blogs') def __unicode__(self): return self.title @@ -44,18 +44,18 @@ def setUp(self) -> None: last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.group]) print(f'\n\nALL INDEXES: {self.project.indexes}\n\n') - perms = self.user.add_permissions(self.group, ['create', 'read', 'write', 'delete']) - + perms = self.user.add_permissions( + self.group, ['create', 'read', 'write', 'delete']) + p(f'\n\nest setup: Added User permissions: {perms}\n\n') p(f'@test setup: User permissions: {self.user.permissions()}') p(f'@Test Setup: User Created: {self.user.__dict__}') self.blog = Blog.create( - title='test_blog', content='test content', group=self.group, token=self.secret) + title='test_blog', content='test content', group=self.group) self.token, self.exp = Newuser.api_login("test", "abc123") # p(f'@Test Setup: Blog Created: {self.blog.__dict__}\n') # p(f'@Test Setup: User Created: {self.user.__dict__}') - def test_read(self): res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', headers={ From 4ed00d5ad2c313231675da96037748da5d372a23 Mon Sep 17 00:00:00 2001 From: Brian Jinwright Date: Fri, 18 Nov 2022 01:00:20 -0500 Subject: [PATCH 103/214] updated requirements, tests, html views, and templates --- docker-compose.yaml | 2 +- pfunk/collection.py | 8 +- pfunk/{web => }/forms/__init__.py | 0 pfunk/{web => }/forms/collections.py | 20 +- pfunk/forms/fields.py | 166 ++++++++ pfunk/forms/form.py | 43 ++ pfunk/{web => }/forms/templates/forms/ul.html | 0 pfunk/project.py | 1 + pfunk/testcase.py | 10 +- pfunk/tests/templates/house/create.html | 12 + pfunk/tests/templates/house/delete.html | 15 + pfunk/tests/templates/house/detail.html | 13 + pfunk/tests/test_dev.py | 100 ++--- pfunk/tests/test_web_crud.py | 48 ++- pfunk/utils/routing.py | 48 +++ pfunk/utils/swagger.py | 8 +- pfunk/utils/templates.py | 1 + pfunk/web/request.py | 8 + pfunk/web/response.py | 12 + pfunk/web/views/base.py | 26 +- pfunk/web/views/html.py | 173 +++++--- poetry.lock | 393 ++++++++++++------ pyproject.toml | 2 +- 23 files changed, 838 insertions(+), 271 deletions(-) rename pfunk/{web => }/forms/__init__.py (100%) rename pfunk/{web => }/forms/collections.py (84%) create mode 100644 pfunk/forms/fields.py create mode 100644 pfunk/forms/form.py rename pfunk/{web => }/forms/templates/forms/ul.html (100%) create mode 100644 pfunk/tests/templates/house/create.html create mode 100644 pfunk/tests/templates/house/delete.html create mode 100644 pfunk/tests/templates/house/detail.html create mode 100644 pfunk/utils/routing.py diff --git a/docker-compose.yaml b/docker-compose.yaml index 94e7299..daadf03 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -22,7 +22,7 @@ services: fauna: restart: always - image: fauna/faunadb + image: fauna/faunadb:4.15.0 ports: - 8443:8443 - 8444:8444 diff --git a/pfunk/collection.py b/pfunk/collection.py index 8cb1b64..1e2c78a 100644 --- a/pfunk/collection.py +++ b/pfunk/collection.py @@ -438,7 +438,7 @@ def save(self, _credentials=None, _token=None) -> None: except BadRequest as e: if 'instance not unique' in [i.code for i in e.errors]: raise NotUniqueError(f"{self.get_collection_name()} document is not unique.") - + print(e.errors) self.ref = resp['ref'] self.call_signals('post_create_signals') else: @@ -591,8 +591,12 @@ def delete_from_id(cls, id: str, _token=None) -> None: # JSON # ######## - def to_dict(self): + def to_dict(self, flat=False): field_data = self._data.copy() + if flat: + for k, v in field_data.items(): + if isinstance(v, Collection): + field_data[k] = v.ref.id() ref = {'id': self.ref.id(), 'collection': self.ref.collection().id()} obj = { 'ref': ref, diff --git a/pfunk/web/forms/__init__.py b/pfunk/forms/__init__.py similarity index 100% rename from pfunk/web/forms/__init__.py rename to pfunk/forms/__init__.py diff --git a/pfunk/web/forms/collections.py b/pfunk/forms/collections.py similarity index 84% rename from pfunk/web/forms/collections.py rename to pfunk/forms/collections.py index 1f06704..6c39c17 100644 --- a/pfunk/web/forms/collections.py +++ b/pfunk/forms/collections.py @@ -1,17 +1,20 @@ -from formy import Form from valley.utils import import_util +from pfunk.forms.form import Form + class CollectionForm(Form): _template = 'forms/ul.html' def __init__(self, **kwargs): - super(CollectionForm, self).__init__(**kwargs) - self._instance = kwargs.get('_instance') + try: + self._instance = kwargs.pop('_instance') + except KeyError: + self._instance = None if self._instance: - self._data = self._instance.to_dict().get('data') + self._data = self._instance.to_dict(flat=True).get('data') self.create_fields() - + super(CollectionForm, self).__init__(**kwargs) @classmethod def add_field_choices(cls, class_name, field): @@ -45,8 +48,11 @@ def add_field(self, name, field): class_name, field) if field.default_value: field_kwargs['default_value'] = field.default - if self._data.get(name): - field_kwargs['value'] = self._data.get(name) + try: + if self._data.get(name): + field_kwargs['value'] = self._data.get(name) + except AttributeError: + pass self._base_properties[name] = field_class(**field_kwargs) def create_fields(self): diff --git a/pfunk/forms/fields.py b/pfunk/forms/fields.py new file mode 100644 index 0000000..ab4b6f6 --- /dev/null +++ b/pfunk/forms/fields.py @@ -0,0 +1,166 @@ +from valley.mixins import * +from valley.properties import BaseProperty as VBaseProperty + +from pfunk.utils.templates import temp_env + + +class BaseField(VBaseProperty): + template = 'formy/fields/base.html' + static_assets = tuple() + css_classes = '' + value = None + name = None + input_type = 'text' + + def __init__( + self, + default_value=None, + required=False, + validators=[], + verbose_name=None, + css_classes=None, + placeholder=None, + help_text=None, + static_assets=None, + template=None, + **kwargs + ): + super(BaseField, self).__init__(default_value=default_value, + required=required, + validators=validators, + verbose_name=verbose_name, + **kwargs) + self.default_value = default_value + self.required = required + self.kwargs = kwargs + self.template = template or self.template + self.static_assets = static_assets or self.static_assets + self.css_classes = css_classes or self.css_classes + self.verbose_name = verbose_name + self.placeholder = placeholder or self.verbose_name + self.help_text = help_text + self.validators = list() + self.get_validators() + self.validators = set(self.validators) + + def get_verbose_name(self): + return self.verbose_name or self.name.replace('_', ' ').title() + + def render(self, name=None, value=None, css_classes=None, input_type=None, + placeholder=None, choices=None, errors=dict()): + name = name or self.name + verbose_name = self.verbose_name or name.replace('_', ' ').title() + value = value or self.value + choices = choices or self.choices + input_type = input_type or self.input_type + placeholder = placeholder or self.placeholder or verbose_name + error = errors.get(name) + if css_classes and self.css_classes: + css_classes = '{},{}'.format(self.css_classes, css_classes) + elif not css_classes: + css_classes = self.css_classes + + return temp_env.get_template(self.template).render( + name=name, + error=error, + choices=choices, + value=value, + verbose_name=verbose_name, + placeholder=placeholder, + css_classes=css_classes, + input_type=input_type, + ) + + +class StringField(CharVariableMixin, BaseField): + pass + + +class SlugField(SlugVariableMixin, BaseField): + pass + + +class EmailField(EmailVariableMixin, BaseField): + input_type = 'email' + + +class IntegerField(IntegerVariableMixin, BaseField): + input_type = 'number' + + +class PasswordField(StringField): + input_type = 'password' + + +class FloatField(FloatVariableMixin, BaseField): + input_type = 'number' + + +class BooleanField(BooleanMixin, BaseField): + input_type = 'checkbox' + + +class DateField(DateMixin, BaseField): + input_type = 'date' + + def __init__( + self, + default_value=None, + required=True, + validators=[], + verbose_name=None, + auto_now=False, + auto_now_add=False, + **kwargs): + super( + DateField, + self).__init__( + default_value=default_value, + required=required, + validators=validators, + verbose_name=verbose_name, + **kwargs) + self.auto_now = auto_now + self.auto_now_add = auto_now_add + + +class DateTimeField(DateTimeMixin, BaseField): + input_type = 'datetime-local' + + def __init__( + self, + default_value=None, + required=True, + validators=[], + verbose_name=None, + auto_now=False, + auto_now_add=False, + **kwargs): + super( + DateTimeField, + self).__init__( + default_value=default_value, + required=required, + validators=validators, + verbose_name=verbose_name, + **kwargs) + self.auto_now = auto_now + self.auto_now_add = auto_now_add + + +class ChoiceField(BaseField): + template = 'formy/fields/select.html' + + +class MultipleChoiceField(BaseField): + template = 'formy/fields/select-multiple.html' + + +class TextAreaField(BaseField): + template = 'formy/fields/textarea.html' + + +class CKEditor(BaseField): + template = 'formy/fields/ckeditor.html' + static_assets = ( + '') \ No newline at end of file diff --git a/pfunk/forms/form.py b/pfunk/forms/form.py new file mode 100644 index 0000000..3bb1b61 --- /dev/null +++ b/pfunk/forms/form.py @@ -0,0 +1,43 @@ +from valley.declarative import DeclaredVars as DV, \ + DeclarativeVariablesMetaclass as DVM +from valley.schema import BaseSchema + +from pfunk.forms.fields import BaseField +from pfunk.utils.templates import temp_env + + +class DeclaredVars(DV): + base_field_class = BaseField + + +class DeclarativeVariablesMetaclass(DVM): + declared_vars_class = DeclaredVars + + +class BaseForm(BaseSchema): + """ + Base class for all Formy form classes. + """ + _template = 'formy/form/ul.html' + BUILTIN_DOC_ATTRS = [] + _create_error_dict = True + + def __iter__(self): + for k, field in self._base_properties.items(): + field.name = k + field.value = self._data.get(k) + yield field + + def render(self, include_submit=True): + return temp_env.get_template(self._template).render( + form=self, include_submit=include_submit) + + def render_static_assets(self): + static_assets = [] + for field in self: + static_assets.extend(field.static_assets) + return ''.join(set(static_assets)) + + +class Form(BaseForm, metaclass=DeclarativeVariablesMetaclass): + pass \ No newline at end of file diff --git a/pfunk/web/forms/templates/forms/ul.html b/pfunk/forms/templates/forms/ul.html similarity index 100% rename from pfunk/web/forms/templates/forms/ul.html rename to pfunk/forms/templates/forms/ul.html diff --git a/pfunk/project.py b/pfunk/project.py index de66fb0..0eca94f 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -244,6 +244,7 @@ def event_handler(self, event: dict, context: object) -> object: path = event.get('path') method = event.get('httpMethod') request_cls = RESTRequest + event.reverse = self.urls.build try: view, kwargs = self.urls.match(path, method) except NotFound: diff --git a/pfunk/testcase.py b/pfunk/testcase.py index dac644e..a07ef34 100644 --- a/pfunk/testcase.py +++ b/pfunk/testcase.py @@ -47,6 +47,7 @@ def setUp(self) -> None: coll.append(import_util(i)) else: coll.append(i) + self.project.add_resources(coll) self.project.publish() @@ -61,8 +62,13 @@ def setUp(self) -> None: Key = import_util('pfunk.contrib.auth.key.Key') keys = Key.create_keys() self.keys_path = 'pfunk/tests/unittest_keys.py' - with open(self.keys_path, 'w+') as f: - f.write(key_template.render(keys=keys)) + try: + with open(self.keys_path, 'w+') as f: + f.write(key_template.render(keys=keys)) + except (Exception, FileNotFoundError) as e: + print(e) + # Print the current working directory + print('unittest_keys.py not found in current working directory', os.getcwd()) def tearDown(self) -> None: super(APITestCase, self).tearDown() diff --git a/pfunk/tests/templates/house/create.html b/pfunk/tests/templates/house/create.html new file mode 100644 index 0000000..e930a89 --- /dev/null +++ b/pfunk/tests/templates/house/create.html @@ -0,0 +1,12 @@ + + + + + Title + + +
+ {{ form.render(include_submit=True) }} +
+ + \ No newline at end of file diff --git a/pfunk/tests/templates/house/delete.html b/pfunk/tests/templates/house/delete.html new file mode 100644 index 0000000..fa76b2c --- /dev/null +++ b/pfunk/tests/templates/house/delete.html @@ -0,0 +1,15 @@ + + + + + Delete {{object.address}} + + +

Delete {{object.address}}

+
+

Are you sure you want to delete {{object.address}}?

+ + +
+ + \ No newline at end of file diff --git a/pfunk/tests/templates/house/detail.html b/pfunk/tests/templates/house/detail.html new file mode 100644 index 0000000..07d58a8 --- /dev/null +++ b/pfunk/tests/templates/house/detail.html @@ -0,0 +1,13 @@ + + + + + {{object.address}} + + +

{{object.address}}

+ +

{{object.user}}

+ + + \ No newline at end of file diff --git a/pfunk/tests/test_dev.py b/pfunk/tests/test_dev.py index 5acd567..3704d30 100644 --- a/pfunk/tests/test_dev.py +++ b/pfunk/tests/test_dev.py @@ -1,52 +1,52 @@ -# test_dev.py - a placeholder test for fixing User - Group circular import errors - -import os -from valley.utils import import_util - -from pfunk.contrib.auth.collections import BaseUser, User -from pfunk.testcase import APITestCase -from pfunk.contrib.auth.collections import Group -from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField -from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField - - -# Simple setup -# Env var setup for user and group +# # test_dev.py - a placeholder test for fixing User - Group circular import errors +# +# import os +# from valley.utils import import_util +# +# from pfunk.contrib.auth.collections import BaseUser, User +# from pfunk.testcase import APITestCase +# from pfunk.contrib.auth.collections import Group +# from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField +# from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField +# +# +# # Simple setup +# # Env var setup for user and group # os.environ['GROUP_COLLECTION'] = 'pfunk.tests.test_dev.NewGroup' # os.environ['USER_COLLECTION'] = 'pfunk.tests.test_dev.NewUser' - -class NewUser(User): - # groups = ManyToManyField('pfunk.tests.test_dev.NewGroup') - pass - -class NewGroup(Group): - users = ManyToManyField('pfunk.tests.test_dev.NewUser') - -class Blog(Collection): - """ Collection for DigitalOcean-Type request """ - title = StringField(required=True) - content = StringField(required=True) - user = ReferenceField(NewUser) - - def __unicode__(self): - return self.title - -# Test case to see if user-group is working -class TestUserGroupError(APITestCase): - collections = [NewUser, NewGroup, Blog] - - def setUp(self) -> None: - super().setUp() - self.group = NewGroup.create(name='Power Users', slug='power-users') - self.user = NewUser.create(username='test', email='tlasso@example.org', first_name='Ted', - last_name='Lasso', _credentials='abc123', account_status='ACTIVE', - groups=[self.group]) - self.blog = Blog.create( - title='test_blog', content='test content', user=self.user) - - self.token, self.exp = NewUser.api_login("test", "abc123") - print(f'\n\nTOKEN: {self.token}') - print(f'\n\nEXP: {self.exp}') - - def test_mock(self): - assert True \ No newline at end of file +# +# class NewUser(User): +# # groups = ManyToManyField('pfunk.tests.test_dev.NewGroup') +# pass +# +# class NewGroup(Group): +# users = ManyToManyField('pfunk.tests.test_dev.NewUser', 'group_users') +# +# class Blog(Collection): +# """ Collection for DigitalOcean-Type request """ +# title = StringField(required=True) +# content = StringField(required=True) +# user = ReferenceField(NewUser) +# +# def __unicode__(self): +# return self.title +# +# # Test case to see if user-group is working +# class TestUserGroupError(APITestCase): +# collections = [NewUser, NewGroup, Blog] +# +# def setUp(self) -> None: +# super().setUp() +# self.group = NewGroup.create(name='Power Users', slug='power-users') +# self.user = NewUser.create(username='test', email='tlasso@example.org', first_name='Ted', +# last_name='Lasso', _credentials='abc123', account_status='ACTIVE', +# groups=[self.group]) +# self.blog = Blog.create( +# title='test_blog', content='test content', user=self.user) +# +# self.token, self.exp = NewUser.api_login("test", "abc123") +# print(f'\n\nTOKEN: {self.token}') +# print(f'\n\nEXP: {self.exp}') +# +# def test_mock(self): +# assert True \ No newline at end of file diff --git a/pfunk/tests/test_web_crud.py b/pfunk/tests/test_web_crud.py index 8a1f963..63143a3 100644 --- a/pfunk/tests/test_web_crud.py +++ b/pfunk/tests/test_web_crud.py @@ -17,6 +17,8 @@ def setUp(self) -> None: groups=[self.group]) self.token, self.exp = User.api_login("test", "abc123") self.house = House.create(address="test address", user=self.user) + self.house_b = House.create(address="test another address", user=self.user) + self.house_b = House.create(address="test even another address", user=self.user) self.app = self.project.wsgi_app self.c = Client(self.app) @@ -24,50 +26,64 @@ def test_read(self): res = self.c.get(f'/house/detail/{self.house.ref.id()}/', headers={ "Authorization": self.token}) - self.assertTrue(res.json['success']) - self.assertEqual("test address", res.json['data']['data']['address']) + + self.assertIn("test address", res.text) def test_read_all(self): res = self.c.get(f'/house/list/', headers={ "Authorization": self.token}) self.assertTrue(res.status_code, 200) - self.assertIn("test address", str(res.get_data())) - + self.assertIn("test address", str(res.text)) + self.assertIn("test another address", str(res.text)) + self.assertIn("test even another address", str(res.text)) def test_create(self): self.assertNotIn("the street somewhere", [ house.address for house in House.all()]) res = self.c.post('/house/create/', - json={ + data={ "address": "the street somewhere", "user": self.user.ref.id()}, headers={ "Authorization": self.token}) - self.assertTrue(res.json['success']) self.assertIn("the street somewhere", [ house.address for house in House.all()]) + self.assertEqual(res.status_code, 302) + self.assertEqual(res.location, "/house/list/") def test_update(self): self.assertNotIn("the updated street somewhere", [ house.address for house in House.all()]) - res = self.c.put(f'/house/update/{self.house.ref.id()}/', - json={ - "address": "the updated street somewhere", - "user": self.user.ref.id()}, - headers={ - "Authorization": self.token}) - - self.assertTrue(res.json['success']) + res = self.c.post(f'/house/update/{self.house.ref.id()}/', + data={ + "address": "the updated street somewhere", + "user": self.user.ref.id()}, + headers={ + "Authorization": self.token}) self.assertIn("the updated street somewhere", [ house.address for house in House.all()]) def test_delete(self): - res = self.c.delete(f'/house/delete/{self.house.ref.id()}/', + self.assertIn("test address", [ + house.address for house in House.all()]) + res = self.c.get(f'/house/delete/{self.house.ref.id()}/', + headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) + self.assertEqual(res.status_code, 200) + self.assertIn("Delete test address", str(res.text)) + res = self.c.post(f'/house/delete/{self.house.ref.id()}/', headers={ "Authorization": self.token, "Content-Type": "application/json" }) + self.assertEqual(res.status_code, 302) + self.assertEqual(res.location, "/house/list/") + self.assertNotIn("the address", [ + house.address for house in House.all()]) + + - self.assertTrue(res.json['success']) diff --git a/pfunk/utils/routing.py b/pfunk/utils/routing.py new file mode 100644 index 0000000..2dc92b8 --- /dev/null +++ b/pfunk/utils/routing.py @@ -0,0 +1,48 @@ +import typing as t +import re + +_rule_re = re.compile( + r""" + (?P[^<]*) # static rule data + < + (?: + (?P[a-zA-Z_][a-zA-Z0-9_]*) # converter name + (?:\((?P.*?)\))? # converter arguments + \: # variable delimiter + )? + (?P[a-zA-Z_][a-zA-Z0-9_]*) # variable name + > + """, + re.VERBOSE, +) + + +def parse_rule(rule: str) -> t.Iterator[t.Tuple[t.Optional[str], t.Optional[str], str]]: + """Parse a rule and return it as generator. Each iteration yields tuples + in the form ``(converter, arguments, variable)``. If the converter is + `None` it's a static url part, otherwise it's a dynamic one. + :internal: + """ + pos = 0 + end = len(rule) + do_match = _rule_re.match + used_names = set() + while pos < end: + m = do_match(rule, pos) + if m is None: + break + data = m.groupdict() + if data["static"]: + yield None, None, data["static"] + variable = data["variable"] + converter = data["converter"] or "default" + if variable in used_names: + raise ValueError(f"variable name {variable!r} used twice.") + used_names.add(variable) + yield converter, data["args"] or None, variable + pos = m.end() + if pos < end: + remaining = rule[pos:] + if ">" in remaining or "<" in remaining: + raise ValueError(f"malformed url rule: {rule!r}") + yield None, None, remaining \ No newline at end of file diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 9711904..0a151f3 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -1,10 +1,10 @@ +import json import os import re -import json import swaggyp as sw -from werkzeug.routing import Map, parse_rule from pfunk.collection import Collection +from pfunk.utils.routing import parse_rule GRAPHQL_TO_YAML_TYPES = { "String": "string", @@ -84,7 +84,7 @@ def write_to_yaml(self): there is already one, it will print the yaml file instead. """ if not os.path.exists(f'pfunk.json'): - raise Exception('Missing JSON Config file.') + raise Exception('Missing JSON Config file.') else: with open(f'pfunk.json', 'r') as f: data = json.loads(f.read()) @@ -239,4 +239,4 @@ def generate_swagger(self): col = i() self.get_operations(col) self.get_model_definitions(col) - return self.write_to_yaml() \ No newline at end of file + return self.write_to_yaml() diff --git a/pfunk/utils/templates.py b/pfunk/utils/templates.py index 18c3d7c..f27ae1b 100644 --- a/pfunk/utils/templates.py +++ b/pfunk/utils/templates.py @@ -11,6 +11,7 @@ def get_loaders(): loaders = [ FileSystemLoader(env('TEMPLATE_ROOT_DIR')), PackageLoader('pfunk.contrib.auth'), + PackageLoader('pfunk.forms'), ] for i in env('TEMPLATE_PACKAGES', [], var_type='list'): loaders.append(PackageLoader(i)) diff --git a/pfunk/web/request.py b/pfunk/web/request.py index d794c88..5540c7d 100644 --- a/pfunk/web/request.py +++ b/pfunk/web/request.py @@ -51,6 +51,7 @@ def __init__(self, event, kwargs=None): super(BaseAPIGatewayRequest, self).__init__(event, kwargs) self.is_base64_encoded = event.get('isBase64Encoded') self.body = event.get('body') + self.form_data = event.get('body') self.headers = event.get('headers') or dict() self.query_params = event.get('queryStringParameters') or dict() @@ -64,11 +65,18 @@ def __init__(self, event, kwargs=None): super(WSGIRequest, self).__init__(event, kwargs=kwargs) self.method = event.method self.query_params = event.args + self.form_data = self.build_form_data() self.body = event.data self.headers = event.headers self.path = event.path self.cookies = event.cookies self.source_ip = event.remote_addr + self.reverse = event.reverse + + def build_form_data(self): + """ Builds the form data """ + if self.raw_event.form: + return {k: v for k, v in self.raw_event.form.items()} class RESTRequest(BaseAPIGatewayRequest): diff --git a/pfunk/web/response.py b/pfunk/web/response.py index b81e471..441e196 100644 --- a/pfunk/web/response.py +++ b/pfunk/web/response.py @@ -42,6 +42,12 @@ class NotFoundResponseMixin(object): success: bool = False +class RedirectResponseMixin(object): + status_code = 302 + default_payload = 'Redirect' + success: bool = False + + class BadRequestResponseMixin(object): status_code = 400 default_payload = 'Bad Request' @@ -123,3 +129,9 @@ class HttpBadRequestResponse(BadRequestResponseMixin, Response): class JSONBadRequestResponse(BadRequestResponseMixin, JSONResponse): pass + + +class HttpRedirectResponse(RedirectResponseMixin, Response): + def __init__(self, location, payload=None, headers={}, *args, **kwargs): + super(HttpRedirectResponse, self).__init__(payload, headers, *args, **kwargs) + self.raw_headers['Location'] = location diff --git a/pfunk/web/views/base.py b/pfunk/web/views/base.py index 00f43ff..66248d9 100644 --- a/pfunk/web/views/base.py +++ b/pfunk/web/views/base.py @@ -8,7 +8,6 @@ from werkzeug.routing import Rule from pfunk.exceptions import TokenValidationFailed, LoginFailed, Unauthorized, DocNotFound, GraphQLError, NotUniqueError -from pfunk.web.forms.collections import CollectionForm from pfunk.web.request import Request, RESTRequest, HTTPRequest from pfunk.web.response import (Response, HttpNotFoundResponse, HttpForbiddenResponse, HttpBadRequestResponse, HttpMethodNotAllowedResponse, HttpUnauthorizedResponse) @@ -323,7 +322,7 @@ def get_query(self): return self.collection.all(**self.get_query_kwargs()) def get_query_kwargs(self): - """ Acquires the addutional generic kwargs in a query + """ Acquires the additional generic kwargs in a query This includes the keys that are generic to queries. ['after, 'before', 'page_size'] @@ -354,27 +353,13 @@ class UpdateMixin(object): """ Generic PUT mixin for a fauna object """ form_class = None - def get_form_class(self): - """ Acquires or builds the form class to use for updating the object """ - if self.form_class: - return self.form_class - return self.build_form_class() - - def build_form_class(self): - """ Builds the form class to use for updating the object """ - - class Meta: - collection = self.collection - - form_class = type(f"{self.get_collection_name()}Form", (CollectionForm,), { - # constructor - - "Meta": Meta, - }) + def get_data(self): + """ Acquires the data from the request body """ + return self.request.get_json() def get_query_kwargs(self): - data = self.request.get_json() + data = self.get_data() fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ReferenceField') for k, v in fields.items(): current_value = data.get(k) @@ -384,7 +369,6 @@ def get_query_kwargs(self): if current_value: obj = col.get(current_value) data[k] = obj - return data diff --git a/pfunk/web/views/html.py b/pfunk/web/views/html.py index a9ba9d0..dc66458 100644 --- a/pfunk/web/views/html.py +++ b/pfunk/web/views/html.py @@ -1,11 +1,12 @@ from pfunk.client import q +from pfunk.forms.collections import CollectionForm from pfunk.utils.templates import temp_env from pfunk.web.response import Response, HttpNotFoundResponse, HttpBadRequestResponse, HttpMethodNotAllowedResponse, \ - HttpUnauthorizedResponse, HttpForbiddenResponse -from pfunk.web.views.base import UpdateMixin, ActionMixin, IDMixin, ObjectMixin, QuerysetMixin, RESTView + HttpUnauthorizedResponse, HttpForbiddenResponse, HttpRedirectResponse +from pfunk.web.views.base import UpdateMixin, ActionMixin, IDMixin, ObjectMixin, QuerysetMixin, HTTPView -class HTMLView(RESTView): +class HTMLView(HTTPView): """ Base class for all HTML views """ @@ -34,18 +35,83 @@ def get_response(self): ) -class HTMLCreateView(UpdateMixin, ActionMixin, HTMLView): - """ - Define a `Create` view that allows `creation` of an entity in the collection - """ - action = 'create' - http_methods = ['post'] - login_required = True +class FormMixin(UpdateMixin): + success_url = '/{collection}/{action}/' - def get_query(self): - """ Entity created in a collection """ - obj = self.collection.create(**self.get_query_kwargs(), _token=self.request.token) - return obj + def get_form(self, form_class=None): + """ Acquires the form for the request """ + if form_class is None: + form_class = self.get_form_class() + return form_class(**self.get_form_kwargs()) + + def get_data(self): + return self.request.form_data + + def get_object(self): + """ Acquires the object for the request """ + return self.collection.get(self.request.kwargs.get('id')) + + def get_context(self): + context = super(UpdateMixin, self).get_context() + context['form'] = self.get_form() + return context + + def get_form_class(self): + """ Acquires or builds the form class to use for updating the object """ + if self.form_class: + return self.form_class + return self.build_form_class() + + def build_form_class(self): + """ Builds the form class to use for updating the object """ + + class Meta: + collection = self.collection + + form_class = type(f"{self.collection.get_collection_name()}Form", (CollectionForm,), { + # constructor + + "Meta": Meta, + }) + return form_class + + def get_form_kwargs(self): + """ Acquires the kwargs for the form """ + data = self.request.form_data + if self.action == 'update': + if not data: + data = dict() + data['_instance'] = self.get_object() + return data + + def form_valid(self, form): + """ Called when the form is valid """ + q = self.get_query() + return HttpRedirectResponse( + location=self.get_success_url(), + ) + + def get_success_url(self): + """ Acquires the success url for the form """ + return self.success_url.format( + collection=self.collection.get_collection_name().lower(), + action='list') + + def form_invalid(self, form): + """ Called when the form is invalid """ + print(self.action, "Form Invalid: Got Here") + return self.error_response(form._errors) + + def get_response(self, form=None): + if self.request.method == 'POST': + form = self.get_form() + form.validate() + if form._is_valid: + return self.form_valid(form) + return self.response_class( + payload=self.get_template().render(**self.get_context()), + headers=self.get_headers() + ) def get_m2m_kwargs(self, obj): """ Acquires the keyword-arguments for the many-to-many relationship @@ -59,7 +125,7 @@ def get_m2m_kwargs(self, obj): obj (dict, required): """ - data = self.request.get_json() + data = self.get_data() fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') for k, v in fields.items(): current_value = data.get(k) @@ -72,45 +138,38 @@ def get_m2m_kwargs(self, obj): ) -class HTMLUpdateView(UpdateMixin, IDMixin, HTMLView): +class HTMLCreateView(FormMixin, ActionMixin, HTMLView): """ - Define a view to allow `Update` operations + Define a `Create` view that allows `creation` of an entity in the collection """ - action = 'update' - http_methods = ['put'] + action = 'create' + http_methods = ['post'] login_required = True def get_query(self): - """ Entity updated in a collection """ - obj = self.collection.update(**self.get_query_kwargs(), _token=self.request.token) + """ Entity created in a collection """ + obj = self.collection.create(**self.get_query_kwargs(), _token=self.request.token) return obj - def get_m2m_kwargs(self, obj): - """ Acquires the keyword-arguments for the many-to-many relationship - FaunaDB is only able to create a many-to-many relationship - by creating a collection that references both of the object. - So, when creating an entity, it is needed to create an entity to - make them related to each other. - - Args: - obj (dict, required): +class HTMLUpdateView(FormMixin, IDMixin, HTMLView): + """ + Define a view to allow `Update` operations + """ + action = 'update' + http_methods = ['post'] + login_required = True - """ - data = self.request.get_json() - fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') - for k, v in fields.items(): - current_value = data.get(k) - col = v.get('foreign_class')() - client = col().client() - client.query( - q.create( - data={ - '_class': col.get_class_name(), - '_ref': obj['_ref'] - } - ) - ) + def get_query(self): + obj = self.collection.get(self.request.kwargs.get('id'), _token=self.request.token) + kwargs = self.get_query_kwargs() + try: + kwargs.pop('_instance') + except KeyError: + pass + obj._data.update(kwargs) + obj.save() + return obj class HTMLDetailView(ObjectMixin, IDMixin, HTMLView): @@ -129,19 +188,37 @@ def get_context(self): class HTMLDeleteView(ObjectMixin, IDMixin, HTMLView): """ Define a view to allow `Delete` entity operations """ action = 'delete' - http_methods = ['delete'] + http_methods = ['get', 'post'] login_required = True + success_url = '/{collection}/{action}/' def get_query(self): """ Deleted an entity in the specified collection """ return self.collection.delete_from_id(self.request.kwargs.get('id'), _token=self.request.token) + def get_object(self): + """ Acquires the object for the request """ + return self.collection.get(self.request.kwargs.get('id'), _token=self.request.token) + def get_context(self): """ Context for the view """ context = super(HTMLDeleteView, self).get_context() - context['object'] = self.get_query() + context['object'] = self.get_object() return context + def get_success_url(self): + """ Acquires the success url for the form """ + return self.success_url.format( + collection=self.collection.get_collection_name().lower(), + action='list') + + def post(self, **kwargs): + self.get_query() + return HttpRedirectResponse( + location=self.get_success_url(), + ) + + class HTMLListView(QuerysetMixin, ActionMixin, HTMLView): """ Define a view to allow `All/List` entity operations """ restrict_content_type = False @@ -152,4 +229,4 @@ def get_context(self): """ Context for the view """ context = super(HTMLListView, self).get_context() context['object_list'] = self.get_query() - return context \ No newline at end of file + return context diff --git a/poetry.lock b/poetry.lock index 333249c..d898edf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,3 +1,20 @@ +[[package]] +name = "anyio" +version = "3.6.1" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] +test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] +trio = ["trio (>=0.16)"] + [[package]] name = "appnope" version = "0.1.3" @@ -39,7 +56,7 @@ tests = ["pytest"] [[package]] name = "asttokens" -version = "2.0.5" +version = "2.0.8" description = "Annotate AST trees with source code positions" category = "dev" optional = false @@ -49,7 +66,7 @@ python-versions = "*" six = "*" [package.extras] -test = ["astroid", "pytest"] +test = ["astroid (<=2.5.3)", "pytest"] [[package]] name = "astunparse" @@ -64,17 +81,17 @@ six = ">=1.6.1,<2.0" [[package]] name = "attrs" -version = "21.4.0" +version = "22.1.0" description = "Classes Without Boilerplate" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"] [[package]] name = "backcall" @@ -114,27 +131,27 @@ webencodings = "*" [[package]] name = "boto3" -version = "1.23.8" +version = "1.24.92" description = "The AWS SDK for Python" category = "main" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" [package.dependencies] -botocore = ">=1.26.8,<1.27.0" +botocore = ">=1.27.92,<1.28.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.5.0,<0.6.0" +s3transfer = ">=0.6.0,<0.7.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.26.8" +version = "1.27.92" description = "Low-level, data-driven core of boto 3." category = "main" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" [package.dependencies] jmespath = ">=0.7.1,<2.0.0" @@ -142,7 +159,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.13.8)"] +crt = ["awscrt (==0.14.0)"] [[package]] name = "cachetools" @@ -154,7 +171,7 @@ python-versions = "~=3.5" [[package]] name = "certifi" -version = "2022.5.18.1" +version = "2022.9.24" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false @@ -162,7 +179,7 @@ python-versions = ">=3.6" [[package]] name = "cffi" -version = "1.15.0" +version = "1.15.1" description = "Foreign Function Interface for Python calling C code." category = "main" optional = false @@ -173,11 +190,11 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "2.0.12" +version = "2.1.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = ">=3.5.0" +python-versions = ">=3.6.0" [package.extras] unicode_backport = ["unicodedata2"] @@ -195,7 +212,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "colorama" -version = "0.4.4" +version = "0.4.5" description = "Cross-platform colored terminal text." category = "main" optional = false @@ -233,7 +250,7 @@ test = ["pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pret [[package]] name = "debugpy" -version = "1.6.0" +version = "1.6.3" description = "An implementation of the Debug Adapter Protocol for Python" category = "dev" optional = false @@ -276,15 +293,18 @@ cli = ["terminaltables[cli] (>=3.1.10,<4.0.0)", "Jinja2[cli] (>=3.0.3,<4.0.0)", [[package]] name = "executing" -version = "0.8.3" +version = "1.1.1" description = "Get the currently executing AST node of a frame, and other information" category = "dev" optional = false python-versions = "*" +[package.extras] +tests = ["rich", "littleutils", "pytest", "asttokens"] + [[package]] name = "fastjsonschema" -version = "2.15.3" +version = "2.16.2" description = "Fastest Python implementation of JSON schema" category = "dev" optional = false @@ -295,7 +315,7 @@ devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benc [[package]] name = "faunadb" -version = "4.2.0" +version = "4.3.1" description = "FaunaDB Python driver" category = "main" optional = false @@ -387,15 +407,31 @@ python-versions = "*" [[package]] name = "idna" -version = "3.3" +version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false python-versions = ">=3.5" +[[package]] +name = "importlib-metadata" +version = "5.0.0" +description = "Read metadata from Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "jaraco.tidelift (>=1.4)"] +perf = ["ipython"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] + [[package]] name = "importlib-resources" -version = "5.7.1" +version = "5.10.0" description = "Read resources from Python packages" category = "dev" optional = false @@ -405,12 +441,12 @@ python-versions = ">=3.7" zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "jaraco.tidelift (>=1.4)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] [[package]] name = "ipykernel" -version = "6.13.0" +version = "6.16.0" description = "IPython Kernel for Jupyter" category = "dev" optional = false @@ -425,15 +461,16 @@ matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" psutil = "*" +pyzmq = ">=17" tornado = ">=6.1" traitlets = ">=5.1.0" [package.extras] -test = ["pytest (>=6.0)", "pytest-cov", "flaky", "ipyparallel", "pre-commit", "pytest-timeout"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest-cov", "pytest-timeout", "pytest (>=6.0)"] [[package]] name = "ipython" -version = "8.3.0" +version = "8.5.0" description = "IPython: Productive Interactive Computing" category = "dev" optional = false @@ -448,7 +485,7 @@ jedi = ">=0.16" matplotlib-inline = "*" pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} pickleshare = "*" -prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" +prompt-toolkit = ">3.0.1,<3.1.0" pygments = ">=2.4.0" stack-data = "*" traitlets = ">=5" @@ -476,27 +513,25 @@ python-versions = "*" [[package]] name = "ipywidgets" -version = "7.7.0" -description = "IPython HTML widgets for Jupyter" +version = "8.0.2" +description = "Jupyter interactive widgets" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" [package.dependencies] ipykernel = ">=4.5.1" -ipython = {version = ">=4.0.0", markers = "python_version >= \"3.3\""} -ipython-genutils = ">=0.2.0,<0.3.0" -jupyterlab-widgets = {version = ">=1.0.0", markers = "python_version >= \"3.6\""} -nbformat = ">=4.2.0" +ipython = ">=6.1.0" +jupyterlab-widgets = ">=3.0,<4.0" traitlets = ">=4.3.1" -widgetsnbextension = ">=3.6.0,<3.7.0" +widgetsnbextension = ">=4.0,<5.0" [package.extras] -test = ["pytest (>=3.6.0)", "pytest-cov", "mock"] +test = ["jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] [[package]] name = "iso8601" -version = "1.0.2" +version = "1.1.0" description = "Simple module to parse ISO 8601 dates" category = "main" optional = false @@ -533,7 +568,7 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jmespath" -version = "1.0.0" +version = "1.0.1" description = "JSON Matching Expressions" category = "main" optional = false @@ -541,7 +576,7 @@ python-versions = ">=3.7" [[package]] name = "jsonschema" -version = "4.5.1" +version = "4.16.0" description = "An implementation of JSON Schema validation for Python" category = "dev" optional = false @@ -550,11 +585,12 @@ python-versions = ">=3.7" [package.dependencies] attrs = ">=17.4.0" importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format_nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] [[package]] name = "jupyter" @@ -574,7 +610,7 @@ qtconsole = "*" [[package]] name = "jupyter-client" -version = "7.3.1" +version = "7.4.2" description = "Jupyter protocol implementation and client libraries" category = "dev" optional = false @@ -585,21 +621,21 @@ entrypoints = "*" jupyter-core = ">=4.9.2" nest-asyncio = ">=1.5.4" python-dateutil = ">=2.8.2" -pyzmq = ">=22.3" -tornado = ">=6.0" +pyzmq = ">=23.0" +tornado = ">=6.2" traitlets = "*" [package.extras] -doc = ["ipykernel", "myst-parser", "sphinx (>=1.3.6)", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] +doc = ["ipykernel", "myst-parser", "sphinx-rtd-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt"] test = ["codecov", "coverage", "ipykernel (>=6.5)", "ipython", "mypy", "pre-commit", "pytest", "pytest-asyncio (>=0.18)", "pytest-cov", "pytest-timeout"] [[package]] name = "jupyter-console" -version = "6.4.3" +version = "6.4.4" description = "Jupyter terminal console" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] ipykernel = "*" @@ -613,7 +649,7 @@ test = ["pexpect"] [[package]] name = "jupyter-core" -version = "4.10.0" +version = "4.11.1" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "dev" optional = false @@ -626,6 +662,35 @@ traitlets = "*" [package.extras] test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] +[[package]] +name = "jupyter-server" +version = "1.21.0" +description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +anyio = ">=3.1.0,<4" +argon2-cffi = "*" +jinja2 = "*" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.7.0" +nbconvert = ">=6.4.4" +nbformat = ">=5.2.0" +packaging = "*" +prometheus-client = "*" +pywinpty = {version = "*", markers = "os_name == \"nt\""} +pyzmq = ">=17" +Send2Trash = "*" +terminado = ">=0.8.3" +tornado = ">=6.1.0" +traitlets = ">=5.1" +websocket-client = "*" + +[package.extras] +test = ["coverage", "ipykernel", "pre-commit", "pytest-console-scripts", "pytest-cov", "pytest-mock", "pytest-timeout", "pytest-tornasync", "pytest (>=7.0)", "requests"] + [[package]] name = "jupyterlab-pygments" version = "0.2.2" @@ -636,11 +701,11 @@ python-versions = ">=3.7" [[package]] name = "jupyterlab-widgets" -version = "1.1.0" -description = "A JupyterLab extension." +version = "3.0.3" +description = "Jupyter interactive widgets for JupyterLab" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "markupsafe" @@ -652,7 +717,7 @@ python-versions = ">=3.7" [[package]] name = "matplotlib-inline" -version = "0.1.3" +version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" category = "dev" optional = false @@ -663,15 +728,47 @@ traitlets = "*" [[package]] name = "mistune" -version = "0.8.4" -description = "The fastest markdown parser in pure Python" +version = "2.0.4" +description = "A sane Markdown parser with useful plugins and renderers" category = "dev" optional = false python-versions = "*" +[[package]] +name = "nbclassic" +version = "0.4.5" +description = "A web-based notebook environment for interactive computing" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +argon2-cffi = "*" +ipykernel = "*" +ipython-genutils = "*" +jinja2 = "*" +jupyter-client = ">=6.1.1" +jupyter-core = ">=4.6.1" +jupyter-server = ">=1.8" +nbconvert = ">=5" +nbformat = "*" +nest-asyncio = ">=1.5" +notebook-shim = ">=0.1.0" +prometheus-client = "*" +pyzmq = ">=17" +Send2Trash = ">=1.8.0" +terminado = ">=0.8.3" +tornado = ">=6.1" +traitlets = ">=4.2.1" + +[package.extras] +docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"] +json-logging = ["json-logging"] +test = ["pytest", "coverage", "requests", "testpath", "nbval", "selenium (==4.1.5)", "pytest-cov", "pytest-tornasync", "requests-unixsocket"] + [[package]] name = "nbclient" -version = "0.6.3" +version = "0.7.0" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." category = "dev" optional = false @@ -681,15 +778,15 @@ python-versions = ">=3.7.0" jupyter-client = ">=6.1.5" nbformat = ">=5.0" nest-asyncio = "*" -traitlets = ">=5.0.0" +traitlets = ">=5.2.2" [package.extras] -test = ["xmltodict", "twine (>=1.11.0)", "testpath", "setuptools (>=60.0)", "pytest-cov (>=2.6.1)", "pytest-asyncio", "pytest (>=4.1)", "pre-commit", "pip (>=18.1)", "mypy", "ipywidgets (<8.0.0)", "ipython (<8.0.0)", "ipykernel", "flake8", "check-manifest", "black"] +test = ["xmltodict", "twine (>=1.11.0)", "testpath", "setuptools (>=60.0)", "pytest-cov (>=2.6.1)", "pytest-asyncio", "pytest (>=4.1)", "pre-commit", "pip (>=18.1)", "nbconvert", "mypy", "ipywidgets", "ipython", "ipykernel", "flake8", "check-manifest", "black"] sphinx = ["sphinx-book-theme", "Sphinx (>=1.7)", "myst-parser", "moto", "mock", "autodoc-traits"] [[package]] name = "nbconvert" -version = "6.5.0" +version = "7.2.1" description = "Converting Jupyter Notebooks" category = "dev" optional = false @@ -699,12 +796,12 @@ python-versions = ">=3.7" beautifulsoup4 = "*" bleach = "*" defusedxml = "*" -entrypoints = ">=0.2.2" +importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} jinja2 = ">=3.0" jupyter-core = ">=4.7" jupyterlab-pygments = "*" -MarkupSafe = ">=2.0" -mistune = ">=0.8.1,<2" +markupsafe = ">=2.0" +mistune = ">=2.0.3,<3" nbclient = ">=0.5.0" nbformat = ">=5.1" packaging = "*" @@ -714,15 +811,17 @@ tinycss2 = "*" traitlets = ">=5.0" [package.extras] -all = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pre-commit", "pyppeteer (>=1,<1.1)", "tornado (>=6.1)", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] -docs = ["sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] +all = ["ipykernel", "ipython", "ipywidgets (>=7)", "myst-parser", "nbsphinx (>=0.2.12)", "pre-commit", "pyppeteer (>=1,<1.1)", "pyqtwebengine (>=5.15)", "pytest", "pytest-cov", "pytest-dependency", "sphinx-rtd-theme", "sphinx (==5.0.2)", "tornado (>=6.1)"] +docs = ["ipython", "myst-parser", "nbsphinx (>=0.2.12)", "sphinx-rtd-theme", "sphinx (==5.0.2)"] +qtpdf = ["pyqtwebengine (>=5.15)"] +qtpng = ["pyqtwebengine (>=5.15)"] serve = ["tornado (>=6.1)"] -test = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pre-commit", "pyppeteer (>=1,<1.1)"] +test = ["ipykernel", "ipywidgets (>=7)", "pre-commit", "pyppeteer (>=1,<1.1)", "pytest", "pytest-cov", "pytest-dependency"] webpdf = ["pyppeteer (>=1,<1.1)"] [[package]] name = "nbformat" -version = "5.4.0" +version = "5.7.0" description = "The Jupyter Notebook format" category = "dev" optional = false @@ -735,11 +834,11 @@ jupyter-core = "*" traitlets = ">=5.1" [package.extras] -test = ["check-manifest", "testpath", "pytest", "pre-commit"] +test = ["check-manifest", "pep440", "pre-commit", "pytest", "testpath"] [[package]] name = "nest-asyncio" -version = "1.5.5" +version = "1.5.6" description = "Patch asyncio to allow nested event loops" category = "dev" optional = false @@ -747,7 +846,7 @@ python-versions = ">=3.5" [[package]] name = "notebook" -version = "6.4.11" +version = "6.5.1" description = "A web-based notebook environment for interactive computing" category = "dev" optional = false @@ -760,6 +859,7 @@ ipython-genutils = "*" jinja2 = "*" jupyter-client = ">=5.3.4" jupyter-core = ">=4.6.1" +nbclassic = "0.4.5" nbconvert = ">=5" nbformat = "*" nest-asyncio = ">=1.5" @@ -773,7 +873,21 @@ traitlets = ">=4.2.1" [package.extras] docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"] json-logging = ["json-logging"] -test = ["pytest", "coverage", "requests", "testpath", "nbval", "selenium", "pytest-cov", "requests-unixsocket"] +test = ["pytest", "coverage", "requests", "testpath", "nbval", "selenium (==4.1.5)", "pytest-cov", "requests-unixsocket"] + +[[package]] +name = "notebook-shim" +version = "0.2.0" +description = "A shim layer for notebook traits and config" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +jupyter-server = ">=1.8,<3" + +[package.extras] +test = ["pytest-tornasync", "pytest-console-scripts", "pytest"] [[package]] name = "packaging" @@ -842,6 +956,14 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "pkgutil-resolve-name" +version = "1.3.10" +description = "Resolve a name to an object." +category = "dev" +optional = false +python-versions = ">=3.6" + [[package]] name = "ply" version = "3.11" @@ -852,7 +974,7 @@ python-versions = "*" [[package]] name = "prometheus-client" -version = "0.14.1" +version = "0.15.0" description = "Python client for the Prometheus monitoring system." category = "dev" optional = false @@ -863,7 +985,7 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.29" +version = "3.0.31" description = "Library for building powerful interactive command lines in Python" category = "dev" optional = false @@ -874,7 +996,7 @@ wcwidth = "*" [[package]] name = "psutil" -version = "5.9.1" +version = "5.9.2" description = "Cross-platform lib for process and system monitoring in Python." category = "dev" optional = false @@ -920,24 +1042,27 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.12.0" +version = "2.13.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false python-versions = ">=3.6" +[package.extras] +plugins = ["importlib-metadata"] + [[package]] name = "pyjwt" -version = "2.4.0" +version = "2.5.0" description = "JSON Web Token implementation in Python" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] -crypto = ["cryptography (>=3.3.1)"] -dev = ["sphinx", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.3.1)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "mypy", "pre-commit"] -docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +crypto = ["cryptography (>=3.3.1)", "types-cryptography (>=3.3.21)"] +dev = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.3.1)", "types-cryptography (>=3.3.21)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "pre-commit"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"] [[package]] @@ -988,7 +1113,7 @@ python-versions = "*" [[package]] name = "pywinpty" -version = "2.0.5" +version = "2.0.8" description = "Pseudo terminal support for Windows from Python." category = "dev" optional = false @@ -1004,7 +1129,7 @@ python-versions = ">=3.6" [[package]] name = "pyzmq" -version = "23.0.0" +version = "24.0.1" description = "Python bindings for 0MQ" category = "dev" optional = false @@ -1016,7 +1141,7 @@ py = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "qtconsole" -version = "5.3.0" +version = "5.3.2" description = "Jupyter Qt console" category = "dev" optional = false @@ -1030,7 +1155,7 @@ jupyter-core = "*" pygments = "*" pyzmq = ">=17.1" qtpy = ">=2.0.1" -traitlets = "*" +traitlets = "<5.2.1 || >5.2.1,<5.2.2 || >5.2.2" [package.extras] doc = ["Sphinx (>=1.3)"] @@ -1038,7 +1163,7 @@ test = ["flaky", "pytest", "pytest-qt"] [[package]] name = "qtpy" -version = "2.1.0" +version = "2.2.1" description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." category = "dev" optional = false @@ -1048,33 +1173,33 @@ python-versions = ">=3.7" packaging = "*" [package.extras] -test = ["pytest-qt", "pytest-cov (>=3.0.0)", "pytest (>=6,!=7.0.0,!=7.0.1)"] +test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] [[package]] name = "requests" -version = "2.27.1" +version = "2.28.1" description = "Python HTTP for Humans." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.7, <4" [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} -idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +charset-normalizer = ">=2,<3" +idna = ">=2.5,<4" urllib3 = ">=1.21.1,<1.27" [package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "s3transfer" -version = "0.5.2" +version = "0.6.0" description = "An Amazon S3 Transfer Manager" category = "main" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" [package.dependencies] botocore = ">=1.12.36,<2.0a.0" @@ -1104,9 +1229,9 @@ optional = false python-versions = "*" [package.extras] -nativelib = ["pyobjc-framework-cocoa", "pywin32"] -objc = ["pyobjc-framework-cocoa"] win32 = ["pywin32"] +objc = ["pyobjc-framework-cocoa"] +nativelib = ["pywin32", "pyobjc-framework-cocoa"] [[package]] name = "six" @@ -1116,6 +1241,14 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +category = "dev" +optional = false +python-versions = ">=3.7" + [[package]] name = "soupsieve" version = "2.3.2.post1" @@ -1126,7 +1259,7 @@ python-versions = ">=3.6" [[package]] name = "stack-data" -version = "0.2.0" +version = "0.5.1" description = "Extract data from python stack frames and tracebacks for informative displays" category = "dev" optional = false @@ -1165,7 +1298,7 @@ valley = ">=1.5.6,<2.0.0" [[package]] name = "terminado" -version = "0.15.0" +version = "0.16.0" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." category = "dev" optional = false @@ -1181,49 +1314,50 @@ test = ["pre-commit", "pytest-timeout", "pytest (>=6.0)"] [[package]] name = "tinycss2" -version = "1.1.1" +version = "1.2.1" description = "A tiny CSS parser" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] webencodings = ">=0.4" [package.extras] -test = ["coverage", "pytest-isort", "pytest-flake8", "pytest-cov", "pytest"] +test = ["flake8", "isort", "pytest"] doc = ["sphinx-rtd-theme", "sphinx"] [[package]] name = "tornado" -version = "6.1" +version = "6.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." category = "dev" optional = false -python-versions = ">= 3.5" +python-versions = ">= 3.7" [[package]] name = "traitlets" -version = "5.2.1.post0" +version = "5.5.0" description = "" category = "dev" optional = false python-versions = ">=3.7" [package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] test = ["pre-commit", "pytest"] [[package]] name = "urllib3" -version = "1.26.9" +version = "1.26.12" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -1253,46 +1387,60 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "websocket-client" +version = "1.4.1" +description = "WebSocket client for Python with low level API options" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["websockets"] +optional = ["wsaccel", "python-socks"] +docs = ["sphinx-rtd-theme (>=0.5)", "Sphinx (>=3.4)"] + [[package]] name = "werkzeug" -version = "2.1.2" +version = "2.2.2" description = "The comprehensive WSGI web application library." category = "main" optional = false python-versions = ">=3.7" +[package.dependencies] +MarkupSafe = ">=2.1.1" + [package.extras] watchdog = ["watchdog"] [[package]] name = "widgetsnbextension" -version = "3.6.0" -description = "IPython HTML widgets for Jupyter" +version = "4.0.3" +description = "Jupyter interactive widgets for Jupyter Notebook" category = "dev" optional = false -python-versions = "*" - -[package.dependencies] -notebook = ">=4.4.1" +python-versions = ">=3.7" [[package]] name = "zipp" -version = "3.8.0" +version = "3.9.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "jaraco.tidelift (>=1.4)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "jaraco.functools", "more-itertools", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "324e8aabc4bf0157d9126fe65c85ec227eba1c16ced513b206ca6f02129494d9" +content-hash = "6808ad24f73c3549501769eb7c5dca32503dc360738549cc7fdfc63847f9a38c" [metadata.files] +anyio = [] appnope = [] argon2-cffi = [] argon2-cffi-bindings = [] @@ -1328,6 +1476,7 @@ hpack = [] hyper = [] hyperframe = [] idna = [] +importlib-metadata = [] importlib-resources = [] ipykernel = [] ipython = [] @@ -1342,22 +1491,26 @@ jupyter = [] jupyter-client = [] jupyter-console = [] jupyter-core = [] +jupyter-server = [] jupyterlab-pygments = [] jupyterlab-widgets = [] markupsafe = [] matplotlib-inline = [] mistune = [] +nbclassic = [] nbclient = [] nbconvert = [] nbformat = [] nest-asyncio = [] notebook = [] +notebook-shim = [] packaging = [] pandocfilters = [] parso = [] pdoc = [] pexpect = [] pickleshare = [] +pkgutil-resolve-name = [] ply = [] prometheus-client = [] prompt-toolkit = [] @@ -1383,6 +1536,7 @@ s3transfer = [] sammy = [] send2trash = [] six = [] +sniffio = [] soupsieve = [] stack-data = [] stripe = [] @@ -1395,6 +1549,7 @@ urllib3 = [] valley = [] wcwidth = [] webencodings = [] +websocket-client = [] werkzeug = [] widgetsnbextension = [] zipp = [] diff --git a/pyproject.toml b/pyproject.toml index 7edd45a..dee44b6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,6 @@ license = "Apache-2.0" python = "^3.8" faunadb = "^4.0.1" valley = "1.5.8" -envs = "^1.3" requests = "^2.23.0" pytz = "^2021.1" decorator = "^5.0.9" @@ -27,6 +26,7 @@ bleach = "^4.1.0" swaggyp = "^0.2.0" formy = "1.3.1" Jinja2 = "^3.1.2" +envs = "^1.4" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From 7a9f0fa64cd7da659428e70b256bb375778dcd5d Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 22 Nov 2022 16:27:19 +0800 Subject: [PATCH 104/214] Fixed GenericGroupBasedPerms not properly acquiring user table in membership role --- pfunk/contrib/auth/collections.py | 3 +- pfunk/contrib/auth/resources.py | 12 +++--- pfunk/tests/test_group_subclass.py | 59 +++++++++++++----------------- pfunk/tests/test_user_subclass.py | 9 +---- 4 files changed, 35 insertions(+), 48 deletions(-) diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index ba93f10..2d64d5f 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -328,9 +328,10 @@ def get_groups(self, _token=None): """ Returns the groups (collections) that the user is bound with """ if not self.group_class: raise NotImplementedError + group_class_field = self.get_group_field() user_class = self.__class__.__name__.lower() group_class = self.group_class.__name__.lower() - relation_name = self._base_properties.get("groups").relation_name + relation_name = self._base_properties.get(group_class_field).relation_name index_name = f'{user_class}s_{group_class}s_by_{user_class}' if relation_name: index_name = f'{relation_name}_by_{user_class}' diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 6240904..3888371 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -8,6 +8,7 @@ USER_CLASS = env('USER_COLLECTION', 'User') GROUP_CLASS = env('GROUP_COLLECTION', 'Group') + class AuthFunction(Function): def get_role(self): @@ -340,6 +341,9 @@ class GenericGroupBasedRole(GenericAuthorizationRole): def get_name_suffix(self): return f'{self.group_table.lower()}_based_crud_role' + + def get_user_table(self): + return USER_CLASS def get_lambda(self, resource_type): """ Returns the lambda function for giving the permission to Group-based entities @@ -349,9 +353,7 @@ def get_lambda(self, resource_type): 2. You have the create permission to perform the action (create, read, write, and delete) """ current_group_field = self.collection.get_group_field().lower() - # group_slug = self.collection. - # TODO: perm won't match with the entity that is being queried - perm = f'{self.collection.get_collection_name()}-{resource_type}'.lower() + perm = f'{resource_type}'.lower() if resource_type == 'write': group_ref = q.select(current_group_field, @@ -373,7 +375,7 @@ def get_lambda(self, resource_type): q.current_identity(), group_ref ) - ))))), + ))))), perm ), q.equals( @@ -420,7 +422,7 @@ def get_lambda(self, resource_type): # """ Generic set of permissions for many-to-many entity to user relationship """ # def get_name_suffix(self): -# # TODO: return suffix: +# # TODO: return suffix: # return f'{self.get_group_table().lower()}_based_crud_role' # def get_relation_index_name(self): diff --git a/pfunk/tests/test_group_subclass.py b/pfunk/tests/test_group_subclass.py index a0792cd..d179861 100644 --- a/pfunk/tests/test_group_subclass.py +++ b/pfunk/tests/test_group_subclass.py @@ -34,35 +34,28 @@ def __unicode__(self): # Test case to see if user-group is working -class TestUserGroupError(APITestCase): +class TestCustomGroupBasedPerms(APITestCase): collections = [Newuser, Newgroup, UserGroups, Blog] def setUp(self) -> None: super().setUp() self.group = Newgroup.create(name='Power Users', slug='power-users') - self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', + self.user = Newuser.create(username='test_user', email='tlasso@example.org', first_name='Ted', last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.group]) - print(f'\n\nALL INDEXES: {self.project.indexes}\n\n') perms = self.user.add_permissions( self.group, ['create', 'read', 'write', 'delete']) - - p(f'\n\nest setup: Added User permissions: {perms}\n\n') - p(f'@test setup: User permissions: {self.user.permissions()}') - p(f'@Test Setup: User Created: {self.user.__dict__}') + self.token, self.exp = Newuser.api_login("test_user", "abc123") + self.raw_token = Newuser.login("test_user", "abc123") self.blog = Blog.create( title='test_blog', content='test content', group=self.group) - self.token, self.exp = Newuser.api_login("test", "abc123") - # p(f'@Test Setup: Blog Created: {self.blog.__dict__}\n') - # p(f'@Test Setup: User Created: {self.user.__dict__}') - - def test_read(self): - res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', - headers={ - "Authorization": self.token}) - print(f'\n\nRESPONSE: {res.json}\n\n') - self.assertTrue(res.status_code, 200) - self.assertEqual("test_blog", res.json['data']['data']['title']) + + # def test_read(self): + # res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', + # headers={ + # "Authorization": self.token}) + # self.assertTrue(res.status_code, 200) + # self.assertEqual("test_blog", res.json['data']['data']['title']) # def test_read_all(self): # res = self.c.get(f'/json/blog/list/', @@ -70,20 +63,19 @@ def test_read(self): # "Authorization": self.token}) # self.assertTrue(res.status_code, 200) - # def test_create(self): - # self.assertNotIn("new blog", [ - # blog.title for blog in Blog.all()]) - # res = self.c.post('/json/blog/create/', - # json={ - # "title": "new blog", - # "content": "I created a new blog.", - # "user": self.user.ref.id()}, - # headers={ - # "Authorization": self.token}) - - # self.assertTrue(res.status_code, 200) - # self.assertIn("new blog", [ - # blog.title for blog in Blog.all()]) + def test_create(self): + self.assertNotIn("new blog", [ + blog.title for blog in Blog.all()]) + res = self.c.post('/json/blog/create/', + json={ + "title": "new blog", + "content": "I created a new blog."}, + headers={ + "Authorization": self.token}) + print(f'\n\nRESPONSE: {res.json}\n\n') + self.assertTrue(res.status_code, 200) + self.assertIn("new blog", [ + blog.title for blog in Blog.all()]) # def test_update(self): # self.assertNotIn("the updated street somewhere", [ @@ -91,8 +83,7 @@ def test_read(self): # res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', # json={ # "title": "updated blog", - # "content": "I updated my blog.", - # "user": self.user.ref.id()}, + # "content": "I updated my blog."}, # headers={ # "Authorization": self.token}) diff --git a/pfunk/tests/test_user_subclass.py b/pfunk/tests/test_user_subclass.py index ae37c1f..46eb5ad 100644 --- a/pfunk/tests/test_user_subclass.py +++ b/pfunk/tests/test_user_subclass.py @@ -4,20 +4,13 @@ from valley.utils import import_util from pprint import pprint as p -from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, UserGroups from pfunk.testcase import APITestCase from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole -class UserGroups(Collection): - collection_name = 'users_groups' - userID = ReferenceField('pfunk.tests.test_user_subclass.Newuser') - groupID = ReferenceField('pfunk.tests.test_user_subclass.Newgroup') - permissions = ListField() - - class Newgroup(BaseGroup): users = ManyToManyField('pfunk.tests.test_user_subclass.Newuser', relation_name='custom_users_groups') From 9ace3cd9a3f698bc92432a9bc251c169e7cffee4 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 24 Nov 2022 16:50:44 +0800 Subject: [PATCH 105/214] refactored auth perms for cleaner usage. Refactored unittests for clearer name. --- pfunk/contrib/auth/resources.py | 55 ++++-------- ... => test_custom_user_group_group_perms.py} | 84 ++++++++++--------- ... => test_custom_user_group_users_perms.py} | 18 ++-- 3 files changed, 70 insertions(+), 87 deletions(-) rename pfunk/tests/{test_group_subclass.py => test_custom_user_group_group_perms.py} (51%) rename pfunk/tests/{test_user_subclass.py => test_custom_user_group_users_perms.py} (81%) diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 3888371..7cf9252 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -182,45 +182,15 @@ def get_lambda(self, resource_type): class GenericAuthorizationRole(Role): - def get_user_collection(self): - """ Acquires User collection type """ - user_field = self.collection.get_user_field() - if user_field: - user_field = user_field.lower() - col = self.collection._base_properties.get(user_field) - if col: - return col.get_foreign_class() - else: - return None - - def get_group_collection(self): - """ Acquires Group collection type from User's fields """ - user_col = self.get_user_collection() - col = user_col() - group_field = col.get_group_field() - user_groups = user_col._base_properties.get(group_field) - if user_groups: - return user_groups.get_foreign_class() - else: - return None + def get_relation_index_name(self): + """ Returns the index name of the created permission index of group and user -> 'usergroups_by_userID_and_groupID' """ + return 'usergroups_by_userID_and_groupID' def get_user_table(self): - """ Acquires User's class name """ - col = self.get_user_collection() - if col: - return col.__name__ - return None + return USER_CLASS def get_group_table(self): - """ Acquires group class name from the user's fields """ - group_col = self.get_group_collection() - if group_col: - return group_col.__name__ - return None - - def get_relation_index_name(self): - """ Returns the index name of the created permission index of group and user -> 'usergroups_by_userID_and_groupID' """ - return 'usergroups_by_userID_and_groupID' + return GROUP_CLASS def get_name_suffix(self): return f'{self.collection.get_user_field().lower()}_based_crud_role' @@ -282,7 +252,13 @@ def get_relation_index_name(self): Formatted as: {user_group_relation_name}_by_{user_class} """ # Acquires the `groups` field from the user collection - user_col = self.get_user_collection() + user_field = self.collection.get_user_field() + if user_field: + user_field = user_field.lower() + else: + return None + user_col = self.collection._base_properties.get(user_field) + user_col = user_col.get_foreign_class() user_groups = user_col._base_properties.get("groups") if user_groups: @@ -342,9 +318,6 @@ class GenericGroupBasedRole(GenericAuthorizationRole): def get_name_suffix(self): return f'{self.group_table.lower()}_based_crud_role' - def get_user_table(self): - return USER_CLASS - def get_lambda(self, resource_type): """ Returns the lambda function for giving the permission to Group-based entities @@ -388,13 +361,13 @@ def get_lambda(self, resource_type): ) ) elif resource_type == 'create': + lambda_args = ["new_object"] group_ref = q.select(current_group_field, q.select('data', q.var('new_object'))) - lambda_args = ["new_object"] elif resource_type == 'read' or resource_type == 'delete': + lambda_args = ["object_ref"] group_ref = q.select(current_group_field, q.select('data', q.get(q.var('object_ref')))) - lambda_args = ["object_ref"] return q.query( q.lambda_( diff --git a/pfunk/tests/test_group_subclass.py b/pfunk/tests/test_custom_user_group_group_perms.py similarity index 51% rename from pfunk/tests/test_group_subclass.py rename to pfunk/tests/test_custom_user_group_group_perms.py index d179861..2f36603 100644 --- a/pfunk/tests/test_group_subclass.py +++ b/pfunk/tests/test_custom_user_group_group_perms.py @@ -12,21 +12,21 @@ class Newgroup(BaseGroup): - users = ManyToManyField('pfunk.tests.test_group_subclass.Newuser', + users = ManyToManyField('pfunk.tests.test_custom_user_group_group_perms.Newuser', relation_name='custom_users_groups') class Newuser(ExtendedUser): - group_class = import_util('pfunk.tests.test_group_subclass.Newgroup') + group_class = import_util('pfunk.tests.test_custom_user_group_group_perms.Newgroup') groups = ManyToManyField( - 'pfunk.tests.test_group_subclass.Newgroup', relation_name='custom_users_groups') + 'pfunk.tests.test_custom_user_group_group_perms.Newgroup', relation_name='custom_users_groups') class Blog(Collection): collection_roles = [GenericGroupBasedRole] title = StringField(required=True) content = StringField(required=True) - group = ReferenceField('pfunk.tests.test_group_subclass.Newgroup', + group = ReferenceField('pfunk.tests.test_custom_user_group_group_perms.Newgroup', relation_name='newgroup_blogs') def __unicode__(self): @@ -38,6 +38,10 @@ class TestCustomGroupBasedPerms(APITestCase): collections = [Newuser, Newgroup, UserGroups, Blog] def setUp(self) -> None: + os.environ['USER_COLLECTION'] = 'Newuser' + os.environ['GROUP_COLLECTION'] = 'Newgroup' + os.environ['USER_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_group_perms.Newuser' + os.environ['GROUP_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_group_perms.Newgroup' super().setUp() self.group = Newgroup.create(name='Power Users', slug='power-users') self.user = Newuser.create(username='test_user', email='tlasso@example.org', first_name='Ted', @@ -50,18 +54,18 @@ def setUp(self) -> None: self.blog = Blog.create( title='test_blog', content='test content', group=self.group) - # def test_read(self): - # res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', - # headers={ - # "Authorization": self.token}) - # self.assertTrue(res.status_code, 200) - # self.assertEqual("test_blog", res.json['data']['data']['title']) + def test_read(self): + res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) + self.assertEqual("test_blog", res.json['data']['data']['title']) - # def test_read_all(self): - # res = self.c.get(f'/json/blog/list/', - # headers={ - # "Authorization": self.token}) - # self.assertTrue(res.status_code, 200) + def test_read_all(self): + res = self.c.get(f'/json/blog/list/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) def test_create(self): self.assertNotIn("new blog", [ @@ -69,33 +73,35 @@ def test_create(self): res = self.c.post('/json/blog/create/', json={ "title": "new blog", - "content": "I created a new blog."}, + "content": "I created a new blog.", + "group": self.group.ref.id()}, headers={ "Authorization": self.token}) - print(f'\n\nRESPONSE: {res.json}\n\n') self.assertTrue(res.status_code, 200) self.assertIn("new blog", [ blog.title for blog in Blog.all()]) - # def test_update(self): - # self.assertNotIn("the updated street somewhere", [ - # house.address for house in Blog.all()]) - # res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', - # json={ - # "title": "updated blog", - # "content": "I updated my blog."}, - # headers={ - # "Authorization": self.token}) - - # self.assertTrue(res.status_code, 200) - # self.assertIn("updated blog", [ - # blog.title for blog in Blog.all()]) - - # def test_delete(self): - # res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', - # headers={ - # "Authorization": self.token, - # "Content-Type": "application/json" - # }) - - # self.assertTrue(res.status_code, 200) + def test_update(self): + self.assertNotIn("the updated street somewhere", [ + house.address for house in Blog.all()]) + res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', + json={ + "title": "updated blog", + "content": "I updated my blog."}, + headers={ + "Authorization": self.token}) + + self.assertTrue(res.status_code, 200) + self.assertIn("updated blog", [ + blog.title for blog in Blog.all()]) + + def test_delete(self): + res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) + + self.assertTrue(res.status_code, 200) + self.assertNotIn("test_blog", [ + blog.title for blog in Blog.all()]) diff --git a/pfunk/tests/test_user_subclass.py b/pfunk/tests/test_custom_user_group_users_perms.py similarity index 81% rename from pfunk/tests/test_user_subclass.py rename to pfunk/tests/test_custom_user_group_users_perms.py index 46eb5ad..45495ec 100644 --- a/pfunk/tests/test_user_subclass.py +++ b/pfunk/tests/test_custom_user_group_users_perms.py @@ -12,16 +12,16 @@ class Newgroup(BaseGroup): - users = ManyToManyField('pfunk.tests.test_user_subclass.Newuser', + users = ManyToManyField('pfunk.tests.test_custom_user_group_users_perms.Newuser', relation_name='custom_users_groups') class Newuser(ExtendedUser): - user_group_class = import_util('pfunk.tests.test_user_subclass.UserGroups') - group_class = import_util('pfunk.tests.test_user_subclass.Newgroup') + user_group_class = import_util('pfunk.tests.test_custom_user_group_users_perms.UserGroups') + group_class = import_util('pfunk.tests.test_custom_user_group_users_perms.Newgroup') groups = ManyToManyField( - 'pfunk.tests.test_user_subclass.Newgroup', relation_name='custom_users_groups') - blogs = ManyToManyField('pfunk.tests.test_user_subclass.Blog', + 'pfunk.tests.test_custom_user_group_users_perms.Newgroup', relation_name='custom_users_groups') + blogs = ManyToManyField('pfunk.tests.test_custom_user_group_users_perms.Blog', relation_name='users_blogs') @@ -29,7 +29,7 @@ class Blog(Collection): collection_roles = [GenericUserBasedRole] title = StringField(required=True) content = StringField(required=True) - user = ReferenceField('pfunk.tests.test_user_subclass.Newuser', + user = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newuser', relation_name='users_blogs') def __unicode__(self): @@ -37,10 +37,14 @@ def __unicode__(self): # Test case to see if user-group is working -class TestUserGroupError(APITestCase): +class TestCustomUserBasedPerms(APITestCase): collections = [Newuser, Newgroup, UserGroups, Blog] def setUp(self) -> None: + os.environ['USER_COLLECTION'] = 'Newuser' + os.environ['GROUP_COLLECTION'] = 'Newgroup' + os.environ['USER_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_users_perms.Newuser' + os.environ['GROUP_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_users_perms.Newgroup' super().setUp() self.group = Newgroup.create(name='Power Users', slug='power-users') self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', From 20163e3d1ae1834a4f5f12602dad8f8064130d49 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 1 Dec 2022 18:32:55 +0800 Subject: [PATCH 106/214] Added genericM2MUser perms --- pfunk/contrib/auth/resources.py | 142 ++++++++++++++++++-------------- pfunk/tests/test_m2m.py | 109 ++++++++++++++++++++++++ 2 files changed, 189 insertions(+), 62 deletions(-) create mode 100644 pfunk/tests/test_m2m.py diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 7cf9252..00848ea 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -391,65 +391,83 @@ def get_lambda(self, resource_type): ) -# class GenericUserBasedRoleM2M(GenericAuthorizationRole): -# """ Generic set of permissions for many-to-many entity to user relationship """ - -# def get_name_suffix(self): -# # TODO: return suffix: -# return f'{self.get_group_table().lower()}_based_crud_role' - -# def get_relation_index_name(self): -# # TODO: return index name: `users_blogs_by_blog_and_newuser` -# """ Returns the index name of the m2m index of group and user e.g. 'users_groups_by_group_and_user' """ -# user_col = self.get_user_collection() -# user_groups = user_col._base_properties.get("groups") -# group_table = self.get_group_table().lower() -# if group_table: -# relation_index_name = (user_groups.relation_name -# + '_by_' -# + group_table -# + '_and_' -# + self.get_user_table().lower()) -# return relation_index_name -# return None - -# def get_lambda(self, resource_type): -# # TODO: refactor to look for the M2M index and see if the user has permission for the entity -# current_user_field = self.collection.get_user_field() -# if resource_type == 'write': -# lambda_args = ["old_object", "new_object", "object_ref"] -# user_ref = q.select(current_user_field, -# q.select('data', q.var('old_object'))) -# return q.query( -# q.lambda_(lambda_args, -# q.and_( -# q.equals( -# user_ref, -# q.current_identity() -# ), -# q.equals( -# q.select(current_user_field, q.select( -# 'data', q.var('new_object'))), -# q.current_identity() -# ) -# ) - -# ) -# ) -# elif resource_type == 'create': -# lambda_args = ["new_object"] -# user_ref = q.select(current_user_field, -# q.select('data', q.var('new_object'))) -# elif resource_type == 'read' or resource_type == 'delete': -# lambda_args = ["object_ref"] -# user_ref = q.select(current_user_field, -# q.select('data', q.get(q.var('object_ref')))) - -# return q.query( -# q.lambda_(lambda_args, -# q.equals( -# user_ref, -# q.current_identity() -# ) -# ) -# ) +class GenericUserBasedRoleM2M(GenericAuthorizationRole): + """ Generic set of permissions for many-to-many entity to user relationship """ + + def get_name_suffix(self): + return f'{self.collection.get_user_field().lower()}_based_crud_role' + + def get_relation_index_name(self): + """ Returns the index name of the m2m index of group and user e.g. 'users_blogs_by_blog_and_newuser' """ + user_field = self.collection.get_user_field() + if user_field: + user_field = user_field.lower() + else: + return None + user_col = self.collection._base_properties.get(user_field) + user_col_relation = user_col.relation_name + + group_table = self.get_group_table().lower() + if group_table: + relation_index_name = (user_col_relation + + '_by_' + + self.collection.get_collection_name().lower() + + '_and_' + + self.get_user_table().lower()) + return relation_index_name + return None + + + def get_lambda(self, resource_type): + current_user_field = self.collection.get_user_field() + if resource_type == 'write': + lambda_args = ["old_object", "new_object", "object_ref"] + obj_ref = q.var('old_object') + # BUG: Returning error 'NoneType' object has no attribute 'relation_field' + return q.query( + q.lambda_(lambda_args, + q.and_( + q.equals( + q.select(f'{USER_CLASS.lower()}ID', + q.select("data", + q.get(q.match( + q.index( + self.get_relation_index_name()), + obj_ref, + q.current_identity() + ))) + ), + q.current_identity() + ), + q.equals( + q.select(current_user_field, q.select( + 'data', q.var('new_object'))), + q.current_identity() + ) + ) + ) + ) + elif resource_type == 'create': + lambda_args = ["new_object"] + obj_ref = q.var('new_object') + elif resource_type == 'read' or resource_type == 'delete': + lambda_args = ["object_ref"] + obj_ref = q.var('object_ref') + + return q.query( + q.lambda_( + lambda_args, + q.equals( + q.select(f'{USER_CLASS.lower()}ID', + q.select("data", + q.get(q.match( + q.index( + self.get_relation_index_name()), + obj_ref, + q.current_identity() + ))) + ), + q.current_identity() + ) + ) + ) diff --git a/pfunk/tests/test_m2m.py b/pfunk/tests/test_m2m.py new file mode 100644 index 0000000..a200517 --- /dev/null +++ b/pfunk/tests/test_m2m.py @@ -0,0 +1,109 @@ +# test_dev.py - a placeholder test for fixing User - Group circular import errors + +import os +from valley.utils import import_util +from pprint import pprint as p + +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, UserGroups +from pfunk.testcase import APITestCase +from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField +from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField +from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole, GenericUserBasedRoleM2M + + +class Newgroup(BaseGroup): + users = ManyToManyField('pfunk.tests.test_m2m.Newuser', + relation_name='custom_users_groups') + + +class Newuser(ExtendedUser): + user_group_class = import_util('pfunk.tests.test_m2m.UserGroups') + group_class = import_util('pfunk.tests.test_m2m.Newgroup') + groups = ManyToManyField( + 'pfunk.tests.test_m2m.Newgroup', relation_name='custom_users_groups') + blogs = ManyToManyField('pfunk.tests.test_m2m.Blog', + relation_name='users_blogs') + + +class Blog(Collection): + collection_roles = [GenericUserBasedRoleM2M] + title = StringField(required=True) + content = StringField(required=True) + users = ManyToManyField('pfunk.tests.test_m2m.Newuser', + relation_name='users_blogs') + + def __unicode__(self): + return self.title + + +# Test case to see if user-group is working +class TestCustomUserM2M(APITestCase): + collections = [Newuser, Newgroup, UserGroups, Blog] + + def setUp(self) -> None: + os.environ['USER_COLLECTION'] = 'Newuser' + os.environ['GROUP_COLLECTION'] = 'Newgroup' + os.environ['USER_COLLECTION_DIR'] = 'pfunk.tests.test_m2m.Newuser' + os.environ['GROUP_COLLECTION_DIR'] = 'pfunk.tests.test_m2m.Newgroup' + super().setUp() + self.group = Newgroup.create(name='Power Users', slug='power-users') + self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.group]) + self.blog = Blog.create( + title='test_blog', content='test content', users=[self.user], token=self.secret) + self.token, self.exp = Newuser.api_login("test", "abc123") + + + def test_read(self): + res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) + self.assertEqual("test_blog", res.json['data']['data']['title']) + + def test_read_all(self): + res = self.c.get(f'/json/blog/list/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) + + def test_create(self): + self.assertNotIn("new blog", [ + blog.title for blog in Blog.all()]) + res = self.c.post('/json/blog/create/', + json={ + "title": "new blog", + "content": "I created a new blog.", + "user": self.user.ref.id()}, + headers={ + "Authorization": self.token}) + + self.assertTrue(res.status_code, 200) + self.assertIn("new blog", [ + blog.title for blog in Blog.all()]) + + # def test_update(self): + # self.assertNotIn("the updated street somewhere", [ + # house.address for house in Blog.all()]) + # res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', + # json={ + # "title": "updated blog", + # "content": "I updated my blog.", + # "user": self.user.ref.id()}, + # headers={ + # "Authorization": self.token}) + + # print(f'\n\nRESPONSE: {res.json}\n\n') + # self.assertTrue(res.status_code, 200) + # self.assertIn("updated blog", [ + # blog.title for blog in Blog.all()]) + + def test_delete(self): + res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) + + self.assertTrue(res.status_code, 200) From f6997bb4153095ae46a4076ba7cdfd3d8cab94d8 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 12 Dec 2022 10:39:19 +0800 Subject: [PATCH 107/214] Fixed create permissions on m2m operations --- pfunk/collection.py | 2 +- pfunk/contrib/auth/resources.py | 9 +- .../test_custom_user_group_users_perms.py | 4 +- pfunk/tests/test_m2m.py | 90 ++++++++++--------- pfunk/web/views/json.py | 43 ++++----- 5 files changed, 75 insertions(+), 73 deletions(-) diff --git a/pfunk/collection.py b/pfunk/collection.py index 3af0c40..44e14a8 100644 --- a/pfunk/collection.py +++ b/pfunk/collection.py @@ -81,7 +81,7 @@ class Collection(BaseSchema, metaclass=PFunkDeclarativeVariablesMetaclass): def __str__(self): try: return self.__unicode__() # pragma: no cover - except AttributeError: + except (AttributeError, TypeError): return f"{self.__class__.__name__} object" # pragma: no cover diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 00848ea..2a8be01 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -212,7 +212,10 @@ def get_privileges(self): { "resource": q.index(self.get_relation_index_name()), "actions": { - "read": True + "read": True, + "create": True, + "update": True, + "delete": True } }, { @@ -448,8 +451,8 @@ def get_lambda(self, resource_type): ) ) elif resource_type == 'create': - lambda_args = ["new_object"] - obj_ref = q.var('new_object') + # Create ops will always be allowed + return True elif resource_type == 'read' or resource_type == 'delete': lambda_args = ["object_ref"] obj_ref = q.var('object_ref') diff --git a/pfunk/tests/test_custom_user_group_users_perms.py b/pfunk/tests/test_custom_user_group_users_perms.py index 45495ec..fb2c58c 100644 --- a/pfunk/tests/test_custom_user_group_users_perms.py +++ b/pfunk/tests/test_custom_user_group_users_perms.py @@ -83,8 +83,8 @@ def test_create(self): blog.title for blog in Blog.all()]) def test_update(self): - self.assertNotIn("the updated street somewhere", [ - house.address for house in Blog.all()]) + self.assertNotIn("updated blog", [ + blog.title for blog in Blog.all()]) res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', json={ "title": "updated blog", diff --git a/pfunk/tests/test_m2m.py b/pfunk/tests/test_m2m.py index a200517..5eecedd 100644 --- a/pfunk/tests/test_m2m.py +++ b/pfunk/tests/test_m2m.py @@ -50,60 +50,64 @@ def setUp(self) -> None: self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.group]) + self.user2 = Newuser.create(username='test2', email='tlasso2@example.org', first_name='Juliuz', + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.group]) self.blog = Blog.create( title='test_blog', content='test content', users=[self.user], token=self.secret) self.token, self.exp = Newuser.api_login("test", "abc123") - def test_read(self): - res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', - headers={ - "Authorization": self.token}) - self.assertTrue(res.status_code, 200) - self.assertEqual("test_blog", res.json['data']['data']['title']) - - def test_read_all(self): - res = self.c.get(f'/json/blog/list/', - headers={ - "Authorization": self.token}) - self.assertTrue(res.status_code, 200) - - def test_create(self): - self.assertNotIn("new blog", [ - blog.title for blog in Blog.all()]) - res = self.c.post('/json/blog/create/', - json={ - "title": "new blog", - "content": "I created a new blog.", - "user": self.user.ref.id()}, - headers={ - "Authorization": self.token}) - - self.assertTrue(res.status_code, 200) - self.assertIn("new blog", [ - blog.title for blog in Blog.all()]) + # def test_read(self): + # res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', + # headers={ + # "Authorization": self.token}) + # self.assertTrue(res.status_code, 200) + # self.assertEqual("test_blog", res.json['data']['data']['title']) - # def test_update(self): - # self.assertNotIn("the updated street somewhere", [ - # house.address for house in Blog.all()]) - # res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', - # json={ - # "title": "updated blog", - # "content": "I updated my blog.", - # "user": self.user.ref.id()}, + # def test_read_all(self): + # res = self.c.get(f'/json/blog/list/', # headers={ # "Authorization": self.token}) + # self.assertTrue(res.status_code, 200) + + # def test_create(self): + # self.assertNotIn("new blog", [ + # blog.title for blog in Blog.all()]) + # res = self.c.post('/json/blog/create/', + # json={ + # "title": "new blog", + # "content": "I created a new blog.", + # "users": [self.user.ref.id(), self.user2.ref.id()]}, + # headers={ + # "Authorization": self.token}) - # print(f'\n\nRESPONSE: {res.json}\n\n') # self.assertTrue(res.status_code, 200) - # self.assertIn("updated blog", [ + # self.assertIn("new blog", [ # blog.title for blog in Blog.all()]) - def test_delete(self): - res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', - headers={ - "Authorization": self.token, - "Content-Type": "application/json" - }) + def test_update(self): + self.assertNotIn("updated blog", [ + blog.title for blog in Blog.all()]) + res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', + json={ + "title": "updated blog", + "content": "I updated my blog.", + "users": [self.user.ref.id()] + }, + headers={ + "Authorization": self.token}) + print(f'\n\nRESPONSE: {res.json}\n\n') self.assertTrue(res.status_code, 200) + self.assertIn("updated blog", [ + blog.title for blog in Blog.all()]) + + # def test_delete(self): + # res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', + # headers={ + # "Authorization": self.token, + # "Content-Type": "application/json" + # }) + + # self.assertTrue(res.status_code, 200) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 93b957a..b6ea2f9 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -1,3 +1,5 @@ +from valley.utils import import_util + from pfunk.client import q from pfunk.web.response import JSONResponse, JSONNotFoundResponse, JSONBadRequestResponse, \ JSONMethodNotAllowedResponse, JSONUnauthorizedResponse, JSONForbiddenResponse @@ -35,32 +37,25 @@ class CreateView(UpdateMixin, JSONActionMixin, JSONView): def get_query(self): """ Entity created in a collection """ - obj = self.collection.create(**self.get_query_kwargs(), _token=self.request.token) - return obj - - def get_m2m_kwargs(self, obj): - """ Acquires the keyword-arguments for the many-to-many relationship - - FaunaDB is only able to create a many-to-many relationship - by creating a collection that references both of the object. - So, when creating an entity, it is needed to create an entity to - make them related to each other. - - Args: - obj (dict, required): - - """ - data = self.request.get_json() + data = self.get_query_kwargs() fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') for k, v in fields.items(): - current_value = data.get(k) - col = v.get('foreign_class')() - client = col().client() - client.query( - q.create( - - ) - ) + col = import_util(v['foreign_class']) + entities = [] + for ref in data[k]: + c = col.get(ref) + + # # TODO: instantiate collection by just passsing the ref + # col_data = {'_ref': ref} + # c = col(**col_data) + # # print(f'\n\nCOLLECTION AND REF {c.get_collection_name()} -> {ref}\n\n') + # # c._ref = q.ref(q.collection(c.get_collection_name()), ref) + # # print(f'\n\nCOLLECTION ID: {c._id}\n\n') + # print(f'\n\nCOLLECTION REF ID: {c.ref}\n\n') + entities.append(c) + data[k] = entities + obj = self.collection.create(**data, _token=self.request.token) + return obj class UpdateView(UpdateMixin, JSONIDMixin, JSONView): From 070c7848528039bd772bc53545e182b4408ab0aa Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 12 Dec 2022 11:25:26 +0800 Subject: [PATCH 108/214] Fixed update ops on m2m not working when adding m2m entities --- pfunk/contrib/auth/resources.py | 1 - pfunk/tests/test_m2m.py | 69 ++++++++++++++++----------------- pfunk/web/views/json.py | 20 +++++++++- 3 files changed, 53 insertions(+), 37 deletions(-) diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 2a8be01..38395f6 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -426,7 +426,6 @@ def get_lambda(self, resource_type): if resource_type == 'write': lambda_args = ["old_object", "new_object", "object_ref"] obj_ref = q.var('old_object') - # BUG: Returning error 'NoneType' object has no attribute 'relation_field' return q.query( q.lambda_(lambda_args, q.and_( diff --git a/pfunk/tests/test_m2m.py b/pfunk/tests/test_m2m.py index 5eecedd..b1520a4 100644 --- a/pfunk/tests/test_m2m.py +++ b/pfunk/tests/test_m2m.py @@ -58,33 +58,33 @@ def setUp(self) -> None: self.token, self.exp = Newuser.api_login("test", "abc123") - # def test_read(self): - # res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', - # headers={ - # "Authorization": self.token}) - # self.assertTrue(res.status_code, 200) - # self.assertEqual("test_blog", res.json['data']['data']['title']) - - # def test_read_all(self): - # res = self.c.get(f'/json/blog/list/', - # headers={ - # "Authorization": self.token}) - # self.assertTrue(res.status_code, 200) - - # def test_create(self): - # self.assertNotIn("new blog", [ - # blog.title for blog in Blog.all()]) - # res = self.c.post('/json/blog/create/', - # json={ - # "title": "new blog", - # "content": "I created a new blog.", - # "users": [self.user.ref.id(), self.user2.ref.id()]}, - # headers={ - # "Authorization": self.token}) - - # self.assertTrue(res.status_code, 200) - # self.assertIn("new blog", [ - # blog.title for blog in Blog.all()]) + def test_read(self): + res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) + self.assertEqual("test_blog", res.json['data']['data']['title']) + + def test_read_all(self): + res = self.c.get(f'/json/blog/list/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) + + def test_create(self): + self.assertNotIn("new blog", [ + blog.title for blog in Blog.all()]) + res = self.c.post('/json/blog/create/', + json={ + "title": "new blog", + "content": "I created a new blog.", + "users": [self.user.ref.id(), self.user2.ref.id()]}, + headers={ + "Authorization": self.token}) + + self.assertTrue(res.status_code, 200) + self.assertIn("new blog", [ + blog.title for blog in Blog.all()]) def test_update(self): self.assertNotIn("updated blog", [ @@ -98,16 +98,15 @@ def test_update(self): headers={ "Authorization": self.token}) - print(f'\n\nRESPONSE: {res.json}\n\n') self.assertTrue(res.status_code, 200) self.assertIn("updated blog", [ blog.title for blog in Blog.all()]) - # def test_delete(self): - # res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', - # headers={ - # "Authorization": self.token, - # "Content-Type": "application/json" - # }) + def test_delete(self): + res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) - # self.assertTrue(res.status_code, 200) + self.assertTrue(res.status_code, 200) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index b6ea2f9..d6c25c3 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -66,8 +66,26 @@ class UpdateView(UpdateMixin, JSONIDMixin, JSONView): def get_query(self): """ Entity in collection updated by an ID """ + data = self.get_query_kwargs() obj = self.collection.get(self.request.kwargs.get('id'), _token=self.request.token) - obj._data.update(self.get_query_kwargs()) + fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') + for k, v in fields.items(): + col = import_util(v['foreign_class']) + entities = [] + for ref in data[k]: + c = col.get(ref) + + # # TODO: instantiate collection by just passsing the ref + # col_data = {'_ref': ref} + # c = col(**col_data) + # # print(f'\n\nCOLLECTION AND REF {c.get_collection_name()} -> {ref}\n\n') + # # c._ref = q.ref(q.collection(c.get_collection_name()), ref) + # # print(f'\n\nCOLLECTION ID: {c._id}\n\n') + # print(f'\n\nCOLLECTION REF ID: {c.ref}\n\n') + entities.append(c) + data[k] = entities + + obj._data.update(data) obj.save() return obj From 982f0afa27022500b4a59445acc74d721c66793f Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 15 Dec 2022 11:35:44 +0800 Subject: [PATCH 109/214] Found a better fix for permission error on m2m relation saving --- pfunk/collection.py | 4 +- pfunk/contrib/auth/resources.py | 29 ++++++++++-- ...t_m2m.py => test_custom_user_group_m2m.py} | 1 + pfunk/web/views/json.py | 47 ++++++------------- 4 files changed, 41 insertions(+), 40 deletions(-) rename pfunk/tests/{test_m2m.py => test_custom_user_group_m2m.py} (99%) diff --git a/pfunk/collection.py b/pfunk/collection.py index 44e14a8..3171507 100644 --- a/pfunk/collection.py +++ b/pfunk/collection.py @@ -1,5 +1,5 @@ from envs import env -from faunadb.errors import BadRequest +from faunadb.errors import BadRequest, Unauthorized, PermissionDenied from valley.contrib import Schema from valley.declarative import DeclaredVars, DeclarativeVariablesMetaclass from valley.properties import BaseProperty, CharProperty, ListProperty @@ -423,7 +423,7 @@ def _save_related(self, relational_data, _token=None) -> None: } ) ) - except BadRequest: + except (BadRequest) as err: pass def call_signals(self, name): diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 38395f6..01f04e2 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -1,3 +1,4 @@ +from valley.utils import import_util from tokenize import group from envs import env @@ -212,10 +213,7 @@ def get_privileges(self): { "resource": q.index(self.get_relation_index_name()), "actions": { - "read": True, - "create": True, - "update": True, - "delete": True + "read": True } }, { @@ -397,11 +395,32 @@ def get_lambda(self, resource_type): class GenericUserBasedRoleM2M(GenericAuthorizationRole): """ Generic set of permissions for many-to-many entity to user relationship """ + def get_privileges(self): + """ Usage of parent `get_privileges()` with addition of access to M2M collection """ + priv_list = super().get_privileges() + fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') + for k, v in fields.items(): + foreign_col = self.collection._base_properties.get(k) + relation_name = foreign_col.relation_name + if relation_name: + priv_list.extend([ + { + "resource": q.collection(relation_name), + "actions": { + 'read': True, + 'create': True, + 'update': False, + 'delete': False + } + } + ]) + return priv_list + def get_name_suffix(self): return f'{self.collection.get_user_field().lower()}_based_crud_role' def get_relation_index_name(self): - """ Returns the index name of the m2m index of group and user e.g. 'users_blogs_by_blog_and_newuser' """ + """ Returns the index name of the m2m index of an entity and user e.g. 'users_blogs_by_blog_and_newuser' """ user_field = self.collection.get_user_field() if user_field: user_field = user_field.lower() diff --git a/pfunk/tests/test_m2m.py b/pfunk/tests/test_custom_user_group_m2m.py similarity index 99% rename from pfunk/tests/test_m2m.py rename to pfunk/tests/test_custom_user_group_m2m.py index b1520a4..3d7f2d1 100644 --- a/pfunk/tests/test_m2m.py +++ b/pfunk/tests/test_custom_user_group_m2m.py @@ -82,6 +82,7 @@ def test_create(self): headers={ "Authorization": self.token}) + print(f'\n\nRESPONSE: {res.json}\n\n') self.assertTrue(res.status_code, 200) self.assertIn("new blog", [ blog.title for blog in Blog.all()]) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index d6c25c3..d663584 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -28,6 +28,18 @@ def get_response(self): headers=self.get_headers() ) + def get_req_with_m2m(self, data): + """ Returns request with updated params that has the proper m2m entities """ + fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') + for k, v in fields.items(): + col = import_util(v['foreign_class']) + entities = [] + for ref in data[k]: + c = col.get(ref) + entities.append(c) + data[k] = entities + return data + class CreateView(UpdateMixin, JSONActionMixin, JSONView): """ Define a `Create` view that allows `creation` of an entity in the collection """ @@ -38,22 +50,7 @@ class CreateView(UpdateMixin, JSONActionMixin, JSONView): def get_query(self): """ Entity created in a collection """ data = self.get_query_kwargs() - fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') - for k, v in fields.items(): - col = import_util(v['foreign_class']) - entities = [] - for ref in data[k]: - c = col.get(ref) - - # # TODO: instantiate collection by just passsing the ref - # col_data = {'_ref': ref} - # c = col(**col_data) - # # print(f'\n\nCOLLECTION AND REF {c.get_collection_name()} -> {ref}\n\n') - # # c._ref = q.ref(q.collection(c.get_collection_name()), ref) - # # print(f'\n\nCOLLECTION ID: {c._id}\n\n') - # print(f'\n\nCOLLECTION REF ID: {c.ref}\n\n') - entities.append(c) - data[k] = entities + data = self.get_req_with_m2m(data) obj = self.collection.create(**data, _token=self.request.token) return obj @@ -67,24 +64,8 @@ class UpdateView(UpdateMixin, JSONIDMixin, JSONView): def get_query(self): """ Entity in collection updated by an ID """ data = self.get_query_kwargs() + data = self.get_req_with_m2m(data) obj = self.collection.get(self.request.kwargs.get('id'), _token=self.request.token) - fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') - for k, v in fields.items(): - col = import_util(v['foreign_class']) - entities = [] - for ref in data[k]: - c = col.get(ref) - - # # TODO: instantiate collection by just passsing the ref - # col_data = {'_ref': ref} - # c = col(**col_data) - # # print(f'\n\nCOLLECTION AND REF {c.get_collection_name()} -> {ref}\n\n') - # # c._ref = q.ref(q.collection(c.get_collection_name()), ref) - # # print(f'\n\nCOLLECTION ID: {c._id}\n\n') - # print(f'\n\nCOLLECTION REF ID: {c.ref}\n\n') - entities.append(c) - data[k] = entities - obj._data.update(data) obj.save() return obj From 50422c52c69c8cd40e4682bbf868f4265b631fbb Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 15 Dec 2022 11:40:55 +0800 Subject: [PATCH 110/214] Fixed wrong imports on test m2m --- pfunk/tests/test_custom_user_group_m2m.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/pfunk/tests/test_custom_user_group_m2m.py b/pfunk/tests/test_custom_user_group_m2m.py index 3d7f2d1..a92663e 100644 --- a/pfunk/tests/test_custom_user_group_m2m.py +++ b/pfunk/tests/test_custom_user_group_m2m.py @@ -12,16 +12,16 @@ class Newgroup(BaseGroup): - users = ManyToManyField('pfunk.tests.test_m2m.Newuser', + users = ManyToManyField('pfunk.tests.test_custom_user_group_m2m.Newuser', relation_name='custom_users_groups') class Newuser(ExtendedUser): - user_group_class = import_util('pfunk.tests.test_m2m.UserGroups') - group_class = import_util('pfunk.tests.test_m2m.Newgroup') + user_group_class = import_util('pfunk.tests.test_custom_user_group_m2m.UserGroups') + group_class = import_util('pfunk.tests.test_custom_user_group_m2m.Newgroup') groups = ManyToManyField( - 'pfunk.tests.test_m2m.Newgroup', relation_name='custom_users_groups') - blogs = ManyToManyField('pfunk.tests.test_m2m.Blog', + 'pfunk.tests.test_custom_user_group_m2m.Newgroup', relation_name='custom_users_groups') + blogs = ManyToManyField('pfunk.tests.test_custom_user_group_m2m.Blog', relation_name='users_blogs') @@ -29,7 +29,7 @@ class Blog(Collection): collection_roles = [GenericUserBasedRoleM2M] title = StringField(required=True) content = StringField(required=True) - users = ManyToManyField('pfunk.tests.test_m2m.Newuser', + users = ManyToManyField('pfunk.tests.test_custom_user_group_m2m.Newuser', relation_name='users_blogs') def __unicode__(self): @@ -43,8 +43,8 @@ class TestCustomUserM2M(APITestCase): def setUp(self) -> None: os.environ['USER_COLLECTION'] = 'Newuser' os.environ['GROUP_COLLECTION'] = 'Newgroup' - os.environ['USER_COLLECTION_DIR'] = 'pfunk.tests.test_m2m.Newuser' - os.environ['GROUP_COLLECTION_DIR'] = 'pfunk.tests.test_m2m.Newgroup' + os.environ['USER_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_m2m.Newuser' + os.environ['GROUP_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_m2m.Newgroup' super().setUp() self.group = Newgroup.create(name='Power Users', slug='power-users') self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', @@ -82,7 +82,6 @@ def test_create(self): headers={ "Authorization": self.token}) - print(f'\n\nRESPONSE: {res.json}\n\n') self.assertTrue(res.status_code, 200) self.assertIn("new blog", [ blog.title for blog in Blog.all()]) From c5d359fb246203278c1852c59038d2052b9e8f26 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 23 Dec 2022 10:26:12 +0800 Subject: [PATCH 111/214] updated unittests to work with latest contrib collections --- pfunk/contrib/auth/collections.py | 2 +- pfunk/contrib/auth/resources.py | 2 +- pfunk/project.py | 5 +- pfunk/tests/test_auth.py | 5 +- .../test_custom_user_group_group_perms.py | 3 +- pfunk/tests/test_deployment.py | 9 +- pfunk/tests/test_email.py | 7 +- pfunk/tests/test_jwt.py | 5 +- pfunk/tests/test_user_subclass_m2m.py | 116 ------------------ pfunk/tests/test_web_crud.py | 5 +- pfunk/tests/test_web_json_change_password.py | 5 +- pfunk/tests/test_web_json_crud.py | 5 +- pfunk/tests/test_web_json_forgot_password.py | 5 +- pfunk/tests/test_web_json_login.py | 5 +- pfunk/tests/test_web_json_signup.py | 5 +- pfunk/tests/test_web_json_stripe.py | 9 +- 16 files changed, 32 insertions(+), 161 deletions(-) delete mode 100644 pfunk/tests/test_user_subclass_m2m.py diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index 2d64d5f..8ce669b 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -393,7 +393,7 @@ def add_permissions(self, group, permissions: list, _token=None): index_name = 'usergroups_by_userID_and_groupID' for i in permissions: - perm_list.append(i) + perm_list.extend(i.permissions) if not self.user_group_class: raise NotImplementedError diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 01f04e2..f637f10 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -327,7 +327,7 @@ def get_lambda(self, resource_type): 2. You have the create permission to perform the action (create, read, write, and delete) """ current_group_field = self.collection.get_group_field().lower() - perm = f'{resource_type}'.lower() + perm = f'{self.collection.get_collection_name()}-{resource_type}'.lower() if resource_type == 'write': group_ref = q.select(current_group_field, diff --git a/pfunk/project.py b/pfunk/project.py index 54a6469..4d11507 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -205,10 +205,7 @@ def publish(self, mode: str = 'merge') -> int: auth=BearerAuth(secret), data=gql_io ) - - print(f'\n') - print(self.render()) - print('----------------------------------------\n') + if resp.status_code == 200: test_mode = env('PFUNK_TEST_MODE', False, var_type='boolean') if not test_mode: diff --git a/pfunk/tests/test_auth.py b/pfunk/tests/test_auth.py index 9162aa4..91b2e47 100644 --- a/pfunk/tests/test_auth.py +++ b/pfunk/tests/test_auth.py @@ -1,15 +1,14 @@ from faunadb.errors import PermissionDenied from pfunk.contrib.auth.key import PermissionGroup -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.exceptions import LoginFailed from pfunk.testcase import CollectionTestCase from pfunk.tests import Sport, Person, House class AuthTestCase(CollectionTestCase): - collections = [User, Group, + collections = [User, Group, UserGroups, Sport, Person, House] def setUp(self) -> None: diff --git a/pfunk/tests/test_custom_user_group_group_perms.py b/pfunk/tests/test_custom_user_group_group_perms.py index 2f36603..6f5dd33 100644 --- a/pfunk/tests/test_custom_user_group_group_perms.py +++ b/pfunk/tests/test_custom_user_group_group_perms.py @@ -4,6 +4,7 @@ from valley.utils import import_util from pprint import pprint as p +from pfunk.contrib.auth.key import PermissionGroup from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, UserGroups from pfunk.testcase import APITestCase from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField @@ -48,7 +49,7 @@ def setUp(self) -> None: last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.group]) perms = self.user.add_permissions( - self.group, ['create', 'read', 'write', 'delete']) + self.group, [PermissionGroup(Blog, ['create', 'read', 'write', 'delete'])]) self.token, self.exp = Newuser.api_login("test_user", "abc123") self.raw_token = Newuser.login("test_user", "abc123") self.blog = Blog.create( diff --git a/pfunk/tests/test_deployment.py b/pfunk/tests/test_deployment.py index b354061..a85ae03 100644 --- a/pfunk/tests/test_deployment.py +++ b/pfunk/tests/test_deployment.py @@ -1,6 +1,5 @@ from pfunk.client import q -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.project import Project from pfunk.testcase import PFunkTestCase from pfunk.tests import Sport, Person @@ -11,7 +10,7 @@ class DeploymentTestCase(PFunkTestCase): def setUp(self) -> None: super(DeploymentTestCase, self).setUp() self.project = Project() - self.project.add_resources([User, Group, Sport, Person]) + self.project.add_resources([User, Group, Sport, Person, UserGroups]) def test_project_publish(self): # Make sure collections are created @@ -24,7 +23,7 @@ def test_project_publish(self): q.paginate(q.collections(q.database(self.db_name))) ).get('data') - self.assertEqual(5, len(collections_after)) + self.assertEqual(6, len(collections_after)) # Make sure functions are created functions = self.client.query( q.paginate(q.functions(q.database(self.db_name))) @@ -36,7 +35,7 @@ def test_project_publish(self): indexes = self.client.query( q.paginate(q.indexes(q.database(self.db_name))) ).get('data') - self.assertEqual(13, len(indexes)) + self.assertEqual(15, len(indexes)) # Add User and Group to the project self.project.add_resources([User, Group]) # Publish twice more to make sure there are no errors with create_or_update_role or create_or_update_function diff --git a/pfunk/tests/test_email.py b/pfunk/tests/test_email.py index 27e899c..a841463 100644 --- a/pfunk/tests/test_email.py +++ b/pfunk/tests/test_email.py @@ -4,15 +4,14 @@ from jinja2.exceptions import TemplateNotFound from werkzeug.test import Client -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.contrib.email.base import EmailBackend from pfunk.contrib.email.ses import SESBackend from pfunk.testcase import APITestCase class TestEmailBackend(APITestCase): - collections = [User, Group] + collections = [User, Group, UserGroups] def setUp(self) -> None: super(TestEmailBackend, self).setUp() @@ -44,7 +43,7 @@ def test_get_body(self): class TestEmailSES(APITestCase): - collections = [User, Group] + collections = [User, Group, UserGroups] def setUp(self) -> None: super(TestEmailSES, self).setUp() diff --git a/pfunk/tests/test_jwt.py b/pfunk/tests/test_jwt.py index 15c3bdd..7d57c7f 100644 --- a/pfunk/tests/test_jwt.py +++ b/pfunk/tests/test_jwt.py @@ -1,12 +1,11 @@ from pfunk.contrib.auth.key import Key -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.exceptions import LoginFailed from pfunk.testcase import APITestCase class AuthToken(APITestCase): - collections = [User, Group] + collections = [User, Group, UserGroups] def setUp(self) -> None: super(AuthToken, self).setUp() diff --git a/pfunk/tests/test_user_subclass_m2m.py b/pfunk/tests/test_user_subclass_m2m.py deleted file mode 100644 index a3d4972..0000000 --- a/pfunk/tests/test_user_subclass_m2m.py +++ /dev/null @@ -1,116 +0,0 @@ -# test_dev.py - a placeholder test for fixing User - Group circular import errors - -import os -from valley.utils import import_util -from pprint import pprint as p - -from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser -from pfunk.testcase import APITestCase -from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField -from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField -from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole - - -class UserGroups(Collection): - collection_name = 'users_groups' - userID = ReferenceField('pfunk.tests.test_user_subclass.Newuser') - groupID = ReferenceField('pfunk.tests.test_user_subclass.Newgroup') - permissions = ListField() - - -class Newgroup(BaseGroup): - users = ManyToManyField('pfunk.tests.test_user_subclass.Newuser', - relation_name='custom_users_groups') - blogs = ReferenceField('pfunk.tests.test_user_subclass.Blog', - relation_name='newgroup_blogs') - - - -class Newuser(ExtendedUser): - user_group_class = import_util('pfunk.tests.test_user_subclass.UserGroups') - group_class = import_util('pfunk.tests.test_user_subclass.Newgroup') - groups = ManyToManyField( - 'pfunk.tests.test_user_subclass.Newgroup', relation_name='custom_users_groups') - - -class Blog(Collection): - collection_roles = [GenericUserBasedRole] - title = StringField(required=True) - content = StringField(required=True) - group = ReferenceField('pfunk.tests.test_user_subclass.Newgroup', - relation_name='newgroup_blogs') - - def __unicode__(self): - return self.title - - -# Test case to see if user-group is working -class TestUserGroupError(APITestCase): - collections = [Newuser, Newgroup, UserGroups, Blog] - - def setUp(self) -> None: - super().setUp() - self.group = Newgroup.create(name='Power Users', slug='power-users') - self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', - last_name='Lasso', _credentials='abc123', account_status='ACTIVE', - groups=[self.group]) - p(f'@Test Setup: User Created: {self.user.__dict__}') - self.blog = Blog.create( - title='test_blog', content='test content', group=[self.group], token=self.secret) - self.token, self.exp = Newuser.api_login("test", "abc123") - # p(f'@Test Setup: Blog Created: {self.blog.__dict__}\n') - # p(f'@Test Setup: User Created: {self.user.__dict__}') - - - def test_read(self): - res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', - headers={ - "Authorization": self.token}) - print(f'\n\nRESPONSE: {res.json}\n\n') - self.assertTrue(res.status_code, 200) - self.assertEqual("test_blog", res.json['data']['data']['title']) - - # def test_read_all(self): - # res = self.c.get(f'/json/blog/list/', - # headers={ - # "Authorization": self.token}) - # self.assertTrue(res.status_code, 200) - - # def test_create(self): - # self.assertNotIn("new blog", [ - # blog.title for blog in Blog.all()]) - # res = self.c.post('/json/blog/create/', - # json={ - # "title": "new blog", - # "content": "I created a new blog.", - # "user": self.user.ref.id()}, - # headers={ - # "Authorization": self.token}) - - # self.assertTrue(res.status_code, 200) - # self.assertIn("new blog", [ - # blog.title for blog in Blog.all()]) - - # def test_update(self): - # self.assertNotIn("the updated street somewhere", [ - # house.address for house in Blog.all()]) - # res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', - # json={ - # "title": "updated blog", - # "content": "I updated my blog.", - # "user": self.user.ref.id()}, - # headers={ - # "Authorization": self.token}) - - # self.assertTrue(res.status_code, 200) - # self.assertIn("updated blog", [ - # blog.title for blog in Blog.all()]) - - # def test_delete(self): - # res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', - # headers={ - # "Authorization": self.token, - # "Content-Type": "application/json" - # }) - - # self.assertTrue(res.status_code, 200) diff --git a/pfunk/tests/test_web_crud.py b/pfunk/tests/test_web_crud.py index 065f56e..30f9f3a 100644 --- a/pfunk/tests/test_web_crud.py +++ b/pfunk/tests/test_web_crud.py @@ -1,13 +1,12 @@ from werkzeug.test import Client -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.testcase import APITestCase from pfunk.tests import House class TestWebCrud(APITestCase): - collections = [User, Group, House] + collections = [User, Group, UserGroups, House] def setUp(self) -> None: super(TestWebCrud, self).setUp() diff --git a/pfunk/tests/test_web_json_change_password.py b/pfunk/tests/test_web_json_change_password.py index 6b9a9f0..cb8dae0 100644 --- a/pfunk/tests/test_web_json_change_password.py +++ b/pfunk/tests/test_web_json_change_password.py @@ -1,10 +1,9 @@ -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.testcase import APITestCase class TestWebChangePassword(APITestCase): - collections = [User, Group] + collections = [User, Group, UserGroups] def setUp(self) -> None: super(TestWebChangePassword, self).setUp() diff --git a/pfunk/tests/test_web_json_crud.py b/pfunk/tests/test_web_json_crud.py index 48d77bf..6179e41 100644 --- a/pfunk/tests/test_web_json_crud.py +++ b/pfunk/tests/test_web_json_crud.py @@ -1,13 +1,12 @@ from werkzeug.test import Client -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.testcase import APITestCase from pfunk.tests import House class TestWebCrud(APITestCase): - collections = [User, Group, House] + collections = [User, Group, House, UserGroups] def setUp(self) -> None: super(TestWebCrud, self).setUp() diff --git a/pfunk/tests/test_web_json_forgot_password.py b/pfunk/tests/test_web_json_forgot_password.py index 64f8603..6dbdaa2 100644 --- a/pfunk/tests/test_web_json_forgot_password.py +++ b/pfunk/tests/test_web_json_forgot_password.py @@ -1,12 +1,11 @@ from werkzeug.test import Client -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.testcase import APITestCase class TestWebForgotPassword(APITestCase): - collections = [User, Group] + collections = [User, Group, UserGroups] def setUp(self) -> None: super(TestWebForgotPassword, self).setUp() diff --git a/pfunk/tests/test_web_json_login.py b/pfunk/tests/test_web_json_login.py index f67ce73..862b7d0 100644 --- a/pfunk/tests/test_web_json_login.py +++ b/pfunk/tests/test_web_json_login.py @@ -1,13 +1,12 @@ from werkzeug.test import Client -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.exceptions import LoginFailed from pfunk.testcase import APITestCase class TestWebLogin(APITestCase): - collections = [User, Group] + collections = [User, Group, UserGroups] def setUp(self) -> None: super(TestWebLogin, self).setUp() diff --git a/pfunk/tests/test_web_json_signup.py b/pfunk/tests/test_web_json_signup.py index b175ee2..0964414 100644 --- a/pfunk/tests/test_web_json_signup.py +++ b/pfunk/tests/test_web_json_signup.py @@ -1,12 +1,11 @@ from werkzeug.test import Client -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.testcase import APITestCase class TestWebSignup(APITestCase): - collections = [User, Group] + collections = [User, Group, UserGroups] def setUp(self) -> None: super(TestWebSignup, self).setUp() diff --git a/pfunk/tests/test_web_json_stripe.py b/pfunk/tests/test_web_json_stripe.py index 1158a1d..be45dec 100644 --- a/pfunk/tests/test_web_json_stripe.py +++ b/pfunk/tests/test_web_json_stripe.py @@ -3,8 +3,7 @@ from werkzeug.test import Client -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.contrib.auth.key import PermissionGroup from pfunk.contrib.ecommerce.collections import StripePackage, StripeCustomer from pfunk.contrib.ecommerce.views import BaseWebhookView @@ -13,7 +12,7 @@ class TestWebStripeCrud(APITestCase): - collections = [User, Group, StripePackage, StripeCustomer] + collections = [User, Group, UserGroups, StripePackage, StripeCustomer] def setUp(self) -> None: super(TestWebStripeCrud, self).setUp() @@ -177,7 +176,7 @@ def test_delete_customer(self): class TestStripeWebhook(APITestCase): - collections = [User, Group, StripeCustomer] + collections = [User, Group, UserGroups, StripeCustomer] def setUp(self) -> None: super(TestStripeWebhook, self).setUp() @@ -262,7 +261,7 @@ def test_receive_post_req(self, mocked): class TestStripeCheckoutView(APITestCase): - collections = [User, Group, StripePackage] + collections = [User, Group, UserGroups, StripePackage] def setUp(self) -> None: super(TestStripeCheckoutView, self).setUp() From c2fa09588f7a736128e5782d3ee7682d0988ba38 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 20 Jan 2023 16:16:46 +0800 Subject: [PATCH 112/214] first stab at using class var instead of env var initially when accessing custom user and group class --- pfunk/collection.py | 11 +- pfunk/contrib/auth/collections.py | 19 ++- pfunk/contrib/auth/resources.py | 17 +-- pfunk/project.py | 7 +- .../test_custom_user_group_group_perms.py | 127 +++++++++--------- pfunk/tests/test_sandbox.py | 38 ++++++ pfunk/tests/unittest_keys.py | 2 + 7 files changed, 139 insertions(+), 82 deletions(-) create mode 100644 pfunk/tests/test_sandbox.py create mode 100644 pfunk/tests/unittest_keys.py diff --git a/pfunk/collection.py b/pfunk/collection.py index 3171507..f728c01 100644 --- a/pfunk/collection.py +++ b/pfunk/collection.py @@ -78,6 +78,12 @@ class Collection(BaseSchema, metaclass=PFunkDeclarativeVariablesMetaclass): 'collection_name'] """List of class variables that are not allowed a field names. """ + """ Optional in-line definition user and group class """ + user_collection = None + group_collection = None + user_collection_dir = None + group_collection_dir = None + def __str__(self): try: return self.__unicode__() # pragma: no cover @@ -132,7 +138,7 @@ def get_user_field(self) -> str: user will be used. This is to ensure the permissions to work properly """ fields = self._base_properties.items() - user_class = env('USER_COLLECTION', 'User') + user_class = self.user_collection or env('USER_COLLECTION', 'User') user_field = None for k, v in fields: if user_class in v.get_graphql_type(): @@ -148,7 +154,8 @@ def get_group_field(self) -> str: """ fields = self._base_properties.items() - group_class = env('GROUP_COLLECTION', 'Group') + # TODO: fix not being able to acquire self.group_collection properly and taking env default -> Group instead + group_class = self.group_collection or env('GROUP_COLLECTION', 'Group') group_field = None for k, v in fields: if group_class in v.get_graphql_type(): diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index 8ce669b..80c9c2a 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -1,5 +1,6 @@ from cmath import log import uuid +import os from envs import env from faunadb.errors import BadRequest @@ -42,7 +43,16 @@ class UserGroupByUserAndGroupIndex(Index): ] -class UserGroups(Collection): +class BaseUserGroup(Collection): + """ Base UserGroup Collection to subclass from when using custom User and Group """ + collection_indexes = [UserGroupByUserAndGroupIndex] + permissions = ListField() + + def __unicode__(self): + return f"{self.userID}, {self.groupID}, {self.permissions}" + + +class UserGroups(BaseUserGroup): """ Many-to-many collection of the user-group relationship The native fauna-way of holding many-to-many relationship @@ -63,15 +73,10 @@ class UserGroups(Collection): permissions (str[]): List of permissions, `['create', 'read', 'delete', 'write']` """ - collection_indexes = [UserGroupByUserAndGroupIndex] userID = ReferenceField( env('USER_COLLECTION_DIR', 'pfunk.contrib.auth.collections.User')) groupID = ReferenceField( env('GROUP_COLLECTION_DIR', 'pfunk.contrib.auth.collections.Group')) - permissions = ListField() - - def __unicode__(self): - return f"{self.userID}, {self.groupID}, {self.permissions}" AccountStatus = Enum(name='AccountStatus', choices=['ACTIVE', 'INACTIVE']) @@ -318,7 +323,7 @@ class ExtendedUser(BaseUser): Provides base methods for group-user permissions. If there are no supplied `groups` property, will raise `NotImplementedErrror` """ - user_group_class = import_util('pfunk.contrib.auth.collections.UserGroups') + # user_group_class = import_util('pfunk.contrib.auth.collections.UserGroups') @classmethod def get_permissions(cls, ref, _token=None): diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index f637f10..36c2064 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -6,8 +6,8 @@ from pfunk.resources import Function, Role # Global collections -USER_CLASS = env('USER_COLLECTION', 'User') -GROUP_CLASS = env('GROUP_COLLECTION', 'Group') +# USER_CLASS = env('USER_COLLECTION', 'User') +# GROUP_CLASS = env('GROUP_COLLECTION', 'Group') class AuthFunction(Function): @@ -188,10 +188,10 @@ def get_relation_index_name(self): return 'usergroups_by_userID_and_groupID' def get_user_table(self): - return USER_CLASS + return self.collection.user_collection or env('USER_COLLECTION', 'User') def get_group_table(self): - return GROUP_CLASS + return self.collection.group_collection or env('GROUP_COLLECTION', 'Group') def get_name_suffix(self): return f'{self.collection.get_user_field().lower()}_based_crud_role' @@ -312,12 +312,9 @@ def get_lambda(self, resource_type): class GenericGroupBasedRole(GenericAuthorizationRole): permissions_field = 'permissions' - user_table = USER_CLASS - group_table = GROUP_CLASS - through_user_field = USER_CLASS.lower() + 'ID' def get_name_suffix(self): - return f'{self.group_table.lower()}_based_crud_role' + return f'{self.get_group_table().lower()}_based_crud_role' def get_lambda(self, resource_type): """ Returns the lambda function for giving the permission to Group-based entities @@ -449,7 +446,7 @@ def get_lambda(self, resource_type): q.lambda_(lambda_args, q.and_( q.equals( - q.select(f'{USER_CLASS.lower()}ID', + q.select(f'{self.get_user_table().lower()}ID', q.select("data", q.get(q.match( q.index( @@ -479,7 +476,7 @@ def get_lambda(self, resource_type): q.lambda_( lambda_args, q.equals( - q.select(f'{USER_CLASS.lower()}ID', + q.select(f'{self.get_user_table().lower()}ID', q.select("data", q.get(q.match( q.index( diff --git a/pfunk/project.py b/pfunk/project.py index 4d11507..b561059 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -17,7 +17,7 @@ from pfunk.web.request import HTTPRequest, RESTRequest, WSGIRequest from pfunk.web.response import HttpNotFoundResponse, JSONMethodNotAllowedResponse -from .contrib.auth.collections import User, Group, UserGroups, BaseGroup, BaseUser, ExtendedUser +from .contrib.auth.collections import User, Group, UserGroups, BaseGroup, BaseUser, ExtendedUser, BaseUserGroup from .collection import Collection from .fields import ForeignList from .template import graphql_template @@ -193,6 +193,8 @@ def publish(self, mode: str = 'merge') -> int: """ gql_io = BytesIO(self.render().encode()) + print(f'\n\nPUBLISHING PROJ...\n\n') + print(f'\n\n{self.render()}\n\n') if self.client: secret = self.client.secret @@ -224,7 +226,8 @@ def publish(self, mode: str = 'merge') -> int: or issubclass(col, BaseGroup) or issubclass(col, ExtendedUser) or issubclass(col, BaseUser) - or issubclass(col, UserGroups)): + or issubclass(col, UserGroups) + or issubclass(col, BaseUserGroup)): col.publish() collections.remove(col) for col in collections: diff --git a/pfunk/tests/test_custom_user_group_group_perms.py b/pfunk/tests/test_custom_user_group_group_perms.py index 6f5dd33..858bc9c 100644 --- a/pfunk/tests/test_custom_user_group_group_perms.py +++ b/pfunk/tests/test_custom_user_group_group_perms.py @@ -1,15 +1,20 @@ -# test_dev.py - a placeholder test for fixing User - Group circular import errors - -import os from valley.utils import import_util from pprint import pprint as p +from unittest import mock +from envs import env +from importlib import reload from pfunk.contrib.auth.key import PermissionGroup -from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, UserGroups from pfunk.testcase import APITestCase +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, BaseUserGroup as ug from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField -from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole +from pfunk.contrib.auth.resources import GenericGroupBasedRole + + +class UserGroups(ug): + userID = ReferenceField('pfunk.tests.test_custom_user_group_group_perms.Newuser') + groupID = ReferenceField('pfunk.tests.test_custom_user_group_group_perms.Newgroup') class Newgroup(BaseGroup): @@ -17,13 +22,11 @@ class Newgroup(BaseGroup): relation_name='custom_users_groups') -class Newuser(ExtendedUser): - group_class = import_util('pfunk.tests.test_custom_user_group_group_perms.Newgroup') - groups = ManyToManyField( - 'pfunk.tests.test_custom_user_group_group_perms.Newgroup', relation_name='custom_users_groups') - - class Blog(Collection): + user_collection = 'Newuser' + group_collection = 'Newgroup' + user_collection_dir = 'pfunk.tests.test_custom_user_group_group_perms.Newuser' + group_collection_dir = 'pfunk.tests.test_custom_user_group_group_perms.Newgroup' collection_roles = [GenericGroupBasedRole] title = StringField(required=True) content = StringField(required=True) @@ -34,15 +37,17 @@ def __unicode__(self): return self.title -# Test case to see if user-group is working +class Newuser(ExtendedUser): + user_group_class = import_util('pfunk.tests.test_custom_user_group_group_perms.UserGroups') + group_class = import_util('pfunk.tests.test_custom_user_group_group_perms.Newgroup') + groups = ManyToManyField( + 'pfunk.tests.test_custom_user_group_group_perms.Newgroup', relation_name='custom_users_groups') + + class TestCustomGroupBasedPerms(APITestCase): - collections = [Newuser, Newgroup, UserGroups, Blog] + collections = [Newuser, Newgroup, Blog, UserGroups] def setUp(self) -> None: - os.environ['USER_COLLECTION'] = 'Newuser' - os.environ['GROUP_COLLECTION'] = 'Newgroup' - os.environ['USER_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_group_perms.Newuser' - os.environ['GROUP_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_group_perms.Newgroup' super().setUp() self.group = Newgroup.create(name='Power Users', slug='power-users') self.user = Newuser.create(username='test_user', email='tlasso@example.org', first_name='Ted', @@ -62,47 +67,47 @@ def test_read(self): self.assertTrue(res.status_code, 200) self.assertEqual("test_blog", res.json['data']['data']['title']) - def test_read_all(self): - res = self.c.get(f'/json/blog/list/', - headers={ - "Authorization": self.token}) - self.assertTrue(res.status_code, 200) - - def test_create(self): - self.assertNotIn("new blog", [ - blog.title for blog in Blog.all()]) - res = self.c.post('/json/blog/create/', - json={ - "title": "new blog", - "content": "I created a new blog.", - "group": self.group.ref.id()}, - headers={ - "Authorization": self.token}) - self.assertTrue(res.status_code, 200) - self.assertIn("new blog", [ - blog.title for blog in Blog.all()]) - - def test_update(self): - self.assertNotIn("the updated street somewhere", [ - house.address for house in Blog.all()]) - res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', - json={ - "title": "updated blog", - "content": "I updated my blog."}, - headers={ - "Authorization": self.token}) - - self.assertTrue(res.status_code, 200) - self.assertIn("updated blog", [ - blog.title for blog in Blog.all()]) - - def test_delete(self): - res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', - headers={ - "Authorization": self.token, - "Content-Type": "application/json" - }) - - self.assertTrue(res.status_code, 200) - self.assertNotIn("test_blog", [ - blog.title for blog in Blog.all()]) + # def test_read_all(self): + # res = self.c.get(f'/json/blog/list/', + # headers={ + # "Authorization": self.token}) + # self.assertTrue(res.status_code, 200) + + # def test_create(self): + # self.assertNotIn("new blog", [ + # blog.title for blog in Blog.all()]) + # res = self.c.post('/json/blog/create/', + # json={ + # "title": "new blog", + # "content": "I created a new blog.", + # "group": self.group.ref.id()}, + # headers={ + # "Authorization": self.token}) + # self.assertTrue(res.status_code, 200) + # self.assertIn("new blog", [ + # blog.title for blog in Blog.all()]) + + # def test_update(self): + # self.assertNotIn("the updated street somewhere", [ + # house.address for house in Blog.all()]) + # res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', + # json={ + # "title": "updated blog", + # "content": "I updated my blog."}, + # headers={ + # "Authorization": self.token}) + + # self.assertTrue(res.status_code, 200) + # self.assertIn("updated blog", [ + # blog.title for blog in Blog.all()]) + + # def test_delete(self): + # res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', + # headers={ + # "Authorization": self.token, + # "Content-Type": "application/json" + # }) + + # self.assertTrue(res.status_code, 200) + # self.assertNotIn("test_blog", [ + # blog.title for blog in Blog.all()]) diff --git a/pfunk/tests/test_sandbox.py b/pfunk/tests/test_sandbox.py new file mode 100644 index 0000000..91b9426 --- /dev/null +++ b/pfunk/tests/test_sandbox.py @@ -0,0 +1,38 @@ +import unittest +import os +from unittest import mock +from importlib import reload +import sys + +from pfunk.fields import ReferenceField, ManyToManyField + +env_vars = { + 'USER_COLLECTION': 'Newuser', + 'GROUP_COLLECTION': 'Newgroup', + 'USER_COLLECTION_DIR': 'pfunk.tests.test_custom_user_group_group_perms.Newuser', + 'GROUP_COLLECTION_DIR': 'pfunk.tests.test_custom_user_group_group_perms.Newgroup' +} + + +class TestReloadModule(unittest.TestCase): + + def test_reload_pfunk(self): + import pfunk + ug = pfunk.contrib.auth.collections.UserGroups + for k,v in ug._base_properties.items(): + print(f'K: {k}, V: {dir(v)}\n') + print(f'') + + # mock.patch.dict(os.environ, env_vars) + # pfunk = reload(pfunk) + # # del sys.modules['pfunk'] + # # for x in sys.modules: + # # if 'pfunk' in x: + # # del x + # # del pfunk + + + # ug = pfunk.contrib.auth.collections.UserGroups + # for k,v in ug._base_properties.items(): + # print(f'K: {k}, V: {v.get_graphql_type()}\n') + # print(f'') \ No newline at end of file diff --git a/pfunk/tests/unittest_keys.py b/pfunk/tests/unittest_keys.py new file mode 100644 index 0000000..6458d5d --- /dev/null +++ b/pfunk/tests/unittest_keys.py @@ -0,0 +1,2 @@ + +KEYS = {'62be9435-dcfd-4b17-902c-c658ad3c1a3d': {'signature_key': 'EjrvrIfA48q1QzwORDFSOYr4pC_wmb4otQXVkrfC9N0=', 'payload_key': 't8D_JPHVZdxLv3gJJlXkV4qNe0Tu8k47rxzUPyEI6Wo=', 'kid': '62be9435-dcfd-4b17-902c-c658ad3c1a3d'}, 'cc89cd63-dbec-4583-b447-282089fb3226': {'signature_key': '3vDXNhE2ulCYC7sh1k9H33U61Eeze-WOsEH__mSOzU4=', 'payload_key': 'YukAkmvpjTwnSl5EI7jry9hcb74OTgp2vS9edYhDpzA=', 'kid': 'cc89cd63-dbec-4583-b447-282089fb3226'}, '490ada85-0ad2-43ad-a6ae-71a59b45e9b1': {'signature_key': '2x3_9dftvlYqEtx3A_RkCwhxlYumdbaUhYM_UJXOmUY=', 'payload_key': 'iy2QIeSdzXByke1pQiHIdLFcChxYJBpp2xt1Z1Y-QI4=', 'kid': '490ada85-0ad2-43ad-a6ae-71a59b45e9b1'}, 'f31a2030-53f4-497f-89a3-ae89cb7465d8': {'signature_key': 'ldUYldicrQ4vHsueYG76YTjsPheV3i8YtpnWQMt60ac=', 'payload_key': '6D-5qwF3qanco2DOoy89q5H741VslLZ6HsdFI7fLQ-s=', 'kid': 'f31a2030-53f4-497f-89a3-ae89cb7465d8'}, 'a2fdd59c-603e-4da1-991a-bdde170a5d6f': {'signature_key': 'LSP9hYwl9Ys5pCzJJ--sDTOyCZij16UPOH9Wf5jAQVo=', 'payload_key': '_dUk0D3p-Ygxyu6SgeUM-yfd-ed-Fyd-t6MikDx9FuQ=', 'kid': 'a2fdd59c-603e-4da1-991a-bdde170a5d6f'}, '3a8cd167-8ae1-4570-96cb-dc876dec4085': {'signature_key': 'EZ35xa4rlf26AKI0-rjhNmAfeqtlSzMR1TD3Q5-1LL8=', 'payload_key': 'm2rXuWNU5uaDmCsP9t0usj3QYMLluR11s_wQz3S34wg=', 'kid': '3a8cd167-8ae1-4570-96cb-dc876dec4085'}, '7beb1a45-2c7d-4d92-a348-538e106c774d': {'signature_key': '52RZafI4c3k-RGYpN_0tWIyrnNFESlxZJDezecGCm3A=', 'payload_key': 'VZynSo0lR2abNH-Dn1EJr_rgLBJCkZNMF0sxmFq6emM=', 'kid': '7beb1a45-2c7d-4d92-a348-538e106c774d'}, '5429e083-7951-40cc-8b52-474e8103df0c': {'signature_key': 'p2K6-AcfExgybkBX1SDeRDO7Z3EXXN7c78tSwiEWI2w=', 'payload_key': 'Mhy6lWX7Ax3nctQZGVDsI9HhjkbECD3bCTuQrpodqRQ=', 'kid': '5429e083-7951-40cc-8b52-474e8103df0c'}, '29e5666f-b5eb-4cad-8010-f097bd732fa6': {'signature_key': '1eb5kJZapr30y78HYciVDoKPWERVIjE9uFDui4UdJYQ=', 'payload_key': 'iOAss1h8Y0LtFSAAkFR5xSUiSRriZqAjExHQIX539AY=', 'kid': '29e5666f-b5eb-4cad-8010-f097bd732fa6'}, '0c0c822d-3784-4158-a101-fd3fee1d2844': {'signature_key': 'FM0YbWTsiay-tK0vuptbNJxkvzHojOQNa-NqDl0howk=', 'payload_key': 'mqBX0y5skZmBvL1GtZk28Y4161XxUNyE8FJ0Ejs0wuU=', 'kid': '0c0c822d-3784-4158-a101-fd3fee1d2844'}} \ No newline at end of file From 4b49a6e0ccb3a026187a71aea142f05f2ecf2b3b Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 23 Jan 2023 15:25:44 +0800 Subject: [PATCH 113/214] Refactored tests to enable use of optional custom user and group in unittests --- pfunk/collection.py | 1 - pfunk/contrib/auth/collections.py | 1 + pfunk/project.py | 2 - .../test_custom_user_group_group_perms.py | 97 +++++++++---------- pfunk/tests/test_custom_user_group_m2m.py | 14 ++- .../test_custom_user_group_users_perms.py | 18 +++- pfunk/tests/unittest_keys.py | 2 - 7 files changed, 71 insertions(+), 64 deletions(-) delete mode 100644 pfunk/tests/unittest_keys.py diff --git a/pfunk/collection.py b/pfunk/collection.py index f728c01..500b573 100644 --- a/pfunk/collection.py +++ b/pfunk/collection.py @@ -154,7 +154,6 @@ def get_group_field(self) -> str: """ fields = self._base_properties.items() - # TODO: fix not being able to acquire self.group_collection properly and taking env default -> Group instead group_class = self.group_collection or env('GROUP_COLLECTION', 'Group') group_field = None for k, v in fields: diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index 80c9c2a..b7c9f2c 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -423,6 +423,7 @@ class Group(BaseGroup): class User(ExtendedUser): """ A default user that already has predefined M2M relationship with `pfunk.contrib.auth.collections.Group` """ + user_group_class = import_util('pfunk.contrib.auth.collections.UserGroups') group_class = import_util('pfunk.contrib.auth.collections.Group') groups = ManyToManyField( 'pfunk.contrib.auth.collections.Group', 'users_groups') diff --git a/pfunk/project.py b/pfunk/project.py index b561059..655ed5b 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -193,8 +193,6 @@ def publish(self, mode: str = 'merge') -> int: """ gql_io = BytesIO(self.render().encode()) - print(f'\n\nPUBLISHING PROJ...\n\n') - print(f'\n\n{self.render()}\n\n') if self.client: secret = self.client.secret diff --git a/pfunk/tests/test_custom_user_group_group_perms.py b/pfunk/tests/test_custom_user_group_group_perms.py index 858bc9c..179a0e4 100644 --- a/pfunk/tests/test_custom_user_group_group_perms.py +++ b/pfunk/tests/test_custom_user_group_group_perms.py @@ -1,14 +1,10 @@ from valley.utils import import_util -from pprint import pprint as p -from unittest import mock -from envs import env -from importlib import reload from pfunk.contrib.auth.key import PermissionGroup from pfunk.testcase import APITestCase from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, BaseUserGroup as ug -from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField -from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField +from pfunk import Collection, StringField, ReferenceField, ManyToManyField +from pfunk.fields import ManyToManyField, StringField from pfunk.contrib.auth.resources import GenericGroupBasedRole @@ -38,6 +34,7 @@ def __unicode__(self): class Newuser(ExtendedUser): + group_collection = 'Newgroup' user_group_class = import_util('pfunk.tests.test_custom_user_group_group_perms.UserGroups') group_class = import_util('pfunk.tests.test_custom_user_group_group_perms.Newgroup') groups = ManyToManyField( @@ -67,47 +64,47 @@ def test_read(self): self.assertTrue(res.status_code, 200) self.assertEqual("test_blog", res.json['data']['data']['title']) - # def test_read_all(self): - # res = self.c.get(f'/json/blog/list/', - # headers={ - # "Authorization": self.token}) - # self.assertTrue(res.status_code, 200) - - # def test_create(self): - # self.assertNotIn("new blog", [ - # blog.title for blog in Blog.all()]) - # res = self.c.post('/json/blog/create/', - # json={ - # "title": "new blog", - # "content": "I created a new blog.", - # "group": self.group.ref.id()}, - # headers={ - # "Authorization": self.token}) - # self.assertTrue(res.status_code, 200) - # self.assertIn("new blog", [ - # blog.title for blog in Blog.all()]) - - # def test_update(self): - # self.assertNotIn("the updated street somewhere", [ - # house.address for house in Blog.all()]) - # res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', - # json={ - # "title": "updated blog", - # "content": "I updated my blog."}, - # headers={ - # "Authorization": self.token}) - - # self.assertTrue(res.status_code, 200) - # self.assertIn("updated blog", [ - # blog.title for blog in Blog.all()]) - - # def test_delete(self): - # res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', - # headers={ - # "Authorization": self.token, - # "Content-Type": "application/json" - # }) - - # self.assertTrue(res.status_code, 200) - # self.assertNotIn("test_blog", [ - # blog.title for blog in Blog.all()]) + def test_read_all(self): + res = self.c.get(f'/json/blog/list/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) + + def test_create(self): + self.assertNotIn("new blog", [ + blog.title for blog in Blog.all()]) + res = self.c.post('/json/blog/create/', + json={ + "title": "new blog", + "content": "I created a new blog.", + "group": self.group.ref.id()}, + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) + self.assertIn("new blog", [ + blog.title for blog in Blog.all()]) + + def test_update(self): + self.assertNotIn("the updated street somewhere", [ + house.address for house in Blog.all()]) + res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', + json={ + "title": "updated blog", + "content": "I updated my blog."}, + headers={ + "Authorization": self.token}) + + self.assertTrue(res.status_code, 200) + self.assertIn("updated blog", [ + blog.title for blog in Blog.all()]) + + def test_delete(self): + res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) + + self.assertTrue(res.status_code, 200) + self.assertNotIn("test_blog", [ + blog.title for blog in Blog.all()]) diff --git a/pfunk/tests/test_custom_user_group_m2m.py b/pfunk/tests/test_custom_user_group_m2m.py index a92663e..379f8bc 100644 --- a/pfunk/tests/test_custom_user_group_m2m.py +++ b/pfunk/tests/test_custom_user_group_m2m.py @@ -2,13 +2,17 @@ import os from valley.utils import import_util -from pprint import pprint as p -from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, UserGroups +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, BaseUserGroup as ug from pfunk.testcase import APITestCase -from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField -from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField -from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole, GenericUserBasedRoleM2M +from pfunk import Collection, StringField, ReferenceField, ManyToManyField +from pfunk.fields import ManyToManyField, StringField +from pfunk.contrib.auth.resources import GenericUserBasedRoleM2M + + +class UserGroups(ug): + userID = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newuser') + groupID = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newgroup') class Newgroup(BaseGroup): diff --git a/pfunk/tests/test_custom_user_group_users_perms.py b/pfunk/tests/test_custom_user_group_users_perms.py index fb2c58c..ddc60b5 100644 --- a/pfunk/tests/test_custom_user_group_users_perms.py +++ b/pfunk/tests/test_custom_user_group_users_perms.py @@ -4,11 +4,16 @@ from valley.utils import import_util from pprint import pprint as p -from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, UserGroups +from pfunk.contrib.auth.collections import BaseGroup , ExtendedUser, BaseUserGroup as ug from pfunk.testcase import APITestCase -from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField -from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField -from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole +from pfunk import Collection, StringField, ReferenceField, ManyToManyField +from pfunk.fields import ManyToManyField, StringField +from pfunk.contrib.auth.resources import GenericUserBasedRole + + +class UserGroups(ug): + userID = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newuser') + groupID = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newgroup') class Newgroup(BaseGroup): @@ -17,6 +22,7 @@ class Newgroup(BaseGroup): class Newuser(ExtendedUser): + group_collection = 'Newgroup' user_group_class = import_util('pfunk.tests.test_custom_user_group_users_perms.UserGroups') group_class = import_util('pfunk.tests.test_custom_user_group_users_perms.Newgroup') groups = ManyToManyField( @@ -26,6 +32,10 @@ class Newuser(ExtendedUser): class Blog(Collection): + user_collection = 'Newuser' + group_collection = 'Newgroup' + user_collection_dir = 'pfunk.tests.test_custom_user_group_users_perms.Newuser' + group_collection_dir = 'pfunk.tests.test_custom_user_group_users_perms.Newgroup' collection_roles = [GenericUserBasedRole] title = StringField(required=True) content = StringField(required=True) diff --git a/pfunk/tests/unittest_keys.py b/pfunk/tests/unittest_keys.py deleted file mode 100644 index 6458d5d..0000000 --- a/pfunk/tests/unittest_keys.py +++ /dev/null @@ -1,2 +0,0 @@ - -KEYS = {'62be9435-dcfd-4b17-902c-c658ad3c1a3d': {'signature_key': 'EjrvrIfA48q1QzwORDFSOYr4pC_wmb4otQXVkrfC9N0=', 'payload_key': 't8D_JPHVZdxLv3gJJlXkV4qNe0Tu8k47rxzUPyEI6Wo=', 'kid': '62be9435-dcfd-4b17-902c-c658ad3c1a3d'}, 'cc89cd63-dbec-4583-b447-282089fb3226': {'signature_key': '3vDXNhE2ulCYC7sh1k9H33U61Eeze-WOsEH__mSOzU4=', 'payload_key': 'YukAkmvpjTwnSl5EI7jry9hcb74OTgp2vS9edYhDpzA=', 'kid': 'cc89cd63-dbec-4583-b447-282089fb3226'}, '490ada85-0ad2-43ad-a6ae-71a59b45e9b1': {'signature_key': '2x3_9dftvlYqEtx3A_RkCwhxlYumdbaUhYM_UJXOmUY=', 'payload_key': 'iy2QIeSdzXByke1pQiHIdLFcChxYJBpp2xt1Z1Y-QI4=', 'kid': '490ada85-0ad2-43ad-a6ae-71a59b45e9b1'}, 'f31a2030-53f4-497f-89a3-ae89cb7465d8': {'signature_key': 'ldUYldicrQ4vHsueYG76YTjsPheV3i8YtpnWQMt60ac=', 'payload_key': '6D-5qwF3qanco2DOoy89q5H741VslLZ6HsdFI7fLQ-s=', 'kid': 'f31a2030-53f4-497f-89a3-ae89cb7465d8'}, 'a2fdd59c-603e-4da1-991a-bdde170a5d6f': {'signature_key': 'LSP9hYwl9Ys5pCzJJ--sDTOyCZij16UPOH9Wf5jAQVo=', 'payload_key': '_dUk0D3p-Ygxyu6SgeUM-yfd-ed-Fyd-t6MikDx9FuQ=', 'kid': 'a2fdd59c-603e-4da1-991a-bdde170a5d6f'}, '3a8cd167-8ae1-4570-96cb-dc876dec4085': {'signature_key': 'EZ35xa4rlf26AKI0-rjhNmAfeqtlSzMR1TD3Q5-1LL8=', 'payload_key': 'm2rXuWNU5uaDmCsP9t0usj3QYMLluR11s_wQz3S34wg=', 'kid': '3a8cd167-8ae1-4570-96cb-dc876dec4085'}, '7beb1a45-2c7d-4d92-a348-538e106c774d': {'signature_key': '52RZafI4c3k-RGYpN_0tWIyrnNFESlxZJDezecGCm3A=', 'payload_key': 'VZynSo0lR2abNH-Dn1EJr_rgLBJCkZNMF0sxmFq6emM=', 'kid': '7beb1a45-2c7d-4d92-a348-538e106c774d'}, '5429e083-7951-40cc-8b52-474e8103df0c': {'signature_key': 'p2K6-AcfExgybkBX1SDeRDO7Z3EXXN7c78tSwiEWI2w=', 'payload_key': 'Mhy6lWX7Ax3nctQZGVDsI9HhjkbECD3bCTuQrpodqRQ=', 'kid': '5429e083-7951-40cc-8b52-474e8103df0c'}, '29e5666f-b5eb-4cad-8010-f097bd732fa6': {'signature_key': '1eb5kJZapr30y78HYciVDoKPWERVIjE9uFDui4UdJYQ=', 'payload_key': 'iOAss1h8Y0LtFSAAkFR5xSUiSRriZqAjExHQIX539AY=', 'kid': '29e5666f-b5eb-4cad-8010-f097bd732fa6'}, '0c0c822d-3784-4158-a101-fd3fee1d2844': {'signature_key': 'FM0YbWTsiay-tK0vuptbNJxkvzHojOQNa-NqDl0howk=', 'payload_key': 'mqBX0y5skZmBvL1GtZk28Y4161XxUNyE8FJ0Ejs0wuU=', 'kid': '0c0c822d-3784-4158-a101-fd3fee1d2844'}} \ No newline at end of file From 43d57a41eb3a7fe2c2078e710681cea7de2c1c6c Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 24 Jan 2023 14:58:13 +0800 Subject: [PATCH 114/214] Updated faunadb module. Removed unnecessary tests. --- pfunk/tests/test_sandbox.py | 38 ------------------------------------- poetry.lock | 16 ++++++++-------- 2 files changed, 8 insertions(+), 46 deletions(-) delete mode 100644 pfunk/tests/test_sandbox.py diff --git a/pfunk/tests/test_sandbox.py b/pfunk/tests/test_sandbox.py deleted file mode 100644 index 91b9426..0000000 --- a/pfunk/tests/test_sandbox.py +++ /dev/null @@ -1,38 +0,0 @@ -import unittest -import os -from unittest import mock -from importlib import reload -import sys - -from pfunk.fields import ReferenceField, ManyToManyField - -env_vars = { - 'USER_COLLECTION': 'Newuser', - 'GROUP_COLLECTION': 'Newgroup', - 'USER_COLLECTION_DIR': 'pfunk.tests.test_custom_user_group_group_perms.Newuser', - 'GROUP_COLLECTION_DIR': 'pfunk.tests.test_custom_user_group_group_perms.Newgroup' -} - - -class TestReloadModule(unittest.TestCase): - - def test_reload_pfunk(self): - import pfunk - ug = pfunk.contrib.auth.collections.UserGroups - for k,v in ug._base_properties.items(): - print(f'K: {k}, V: {dir(v)}\n') - print(f'') - - # mock.patch.dict(os.environ, env_vars) - # pfunk = reload(pfunk) - # # del sys.modules['pfunk'] - # # for x in sys.modules: - # # if 'pfunk' in x: - # # del x - # # del pfunk - - - # ug = pfunk.contrib.auth.collections.UserGroups - # for k,v in ug._base_properties.items(): - # print(f'K: {k}, V: {v.get_graphql_type()}\n') - # print(f'') \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 333249c..62c20aa 100644 --- a/poetry.lock +++ b/poetry.lock @@ -272,7 +272,7 @@ optional = false python-versions = ">=3.6,<4.0" [package.extras] -cli = ["terminaltables[cli] (>=3.1.10,<4.0.0)", "Jinja2[cli] (>=3.0.3,<4.0.0)", "click[cli] (>=8.0.3,<9.0.0)"] +cli = ["click[cli] (>=8.0.3,<9.0.0)", "Jinja2[cli] (>=3.0.3,<4.0.0)", "terminaltables[cli] (>=3.1.10,<4.0.0)"] [[package]] name = "executing" @@ -295,7 +295,7 @@ devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benc [[package]] name = "faunadb" -version = "4.2.0" +version = "4.3.1" description = "FaunaDB Python driver" category = "main" optional = false @@ -684,8 +684,8 @@ nest-asyncio = "*" traitlets = ">=5.0.0" [package.extras] -test = ["xmltodict", "twine (>=1.11.0)", "testpath", "setuptools (>=60.0)", "pytest-cov (>=2.6.1)", "pytest-asyncio", "pytest (>=4.1)", "pre-commit", "pip (>=18.1)", "mypy", "ipywidgets (<8.0.0)", "ipython (<8.0.0)", "ipykernel", "flake8", "check-manifest", "black"] -sphinx = ["sphinx-book-theme", "Sphinx (>=1.7)", "myst-parser", "moto", "mock", "autodoc-traits"] +sphinx = ["autodoc-traits", "mock", "moto", "myst-parser", "Sphinx (>=1.7)", "sphinx-book-theme"] +test = ["black", "check-manifest", "flake8", "ipykernel", "ipython (<8.0.0)", "ipywidgets (<8.0.0)", "mypy", "pip (>=18.1)", "pre-commit", "pytest (>=4.1)", "pytest-asyncio", "pytest-cov (>=2.6.1)", "setuptools (>=60.0)", "testpath", "twine (>=1.11.0)", "xmltodict"] [[package]] name = "nbconvert" @@ -1048,7 +1048,7 @@ python-versions = ">=3.7" packaging = "*" [package.extras] -test = ["pytest-qt", "pytest-cov (>=3.0.0)", "pytest (>=6,!=7.0.0,!=7.0.1)"] +test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] [[package]] name = "requests" @@ -1138,7 +1138,7 @@ executing = "*" pure-eval = "*" [package.extras] -tests = ["cython", "littleutils", "pygments", "typeguard", "pytest"] +tests = ["pytest", "typeguard", "pygments", "littleutils", "cython"] [[package]] name = "stripe" @@ -1191,8 +1191,8 @@ python-versions = ">=3.6" webencodings = ">=0.4" [package.extras] -test = ["coverage", "pytest-isort", "pytest-flake8", "pytest-cov", "pytest"] -doc = ["sphinx-rtd-theme", "sphinx"] +doc = ["sphinx", "sphinx-rtd-theme"] +test = ["pytest", "pytest-cov", "pytest-flake8", "pytest-isort", "coverage"] [[package]] name = "tornado" From e2d892118b4917235d109a9e39ba0896e748de99 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 26 Jan 2023 16:27:42 +0800 Subject: [PATCH 115/214] Refactored m2m custom user and group tests. Refactored names of custom user and group tests --- pfunk/contrib/auth/collections.py | 2 +- ...test_web_custom_user_group_group_perms.py} | 18 ++++++------- ...m.py => test_web_custom_user_group_m2m.py} | 25 ++++++++++--------- ...test_web_custom_user_group_users_perms.py} | 24 ++++++++---------- 4 files changed, 33 insertions(+), 36 deletions(-) rename pfunk/tests/{test_custom_user_group_group_perms.py => test_web_custom_user_group_group_perms.py} (82%) rename pfunk/tests/{test_custom_user_group_m2m.py => test_web_custom_user_group_m2m.py} (80%) rename pfunk/tests/{test_custom_user_group_users_perms.py => test_web_custom_user_group_users_perms.py} (76%) diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index b7c9f2c..fec6f3a 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -331,7 +331,7 @@ def get_permissions(cls, ref, _token=None): def get_groups(self, _token=None): """ Returns the groups (collections) that the user is bound with """ - if not self.group_class: + if not self.group_class or not self.user_group_class: raise NotImplementedError group_class_field = self.get_group_field() user_class = self.__class__.__name__.lower() diff --git a/pfunk/tests/test_custom_user_group_group_perms.py b/pfunk/tests/test_web_custom_user_group_group_perms.py similarity index 82% rename from pfunk/tests/test_custom_user_group_group_perms.py rename to pfunk/tests/test_web_custom_user_group_group_perms.py index 179a0e4..38274e5 100644 --- a/pfunk/tests/test_custom_user_group_group_perms.py +++ b/pfunk/tests/test_web_custom_user_group_group_perms.py @@ -9,24 +9,24 @@ class UserGroups(ug): - userID = ReferenceField('pfunk.tests.test_custom_user_group_group_perms.Newuser') - groupID = ReferenceField('pfunk.tests.test_custom_user_group_group_perms.Newgroup') + userID = ReferenceField('pfunk.tests.test_web_custom_user_group_group_perms.Newuser') + groupID = ReferenceField('pfunk.tests.test_web_custom_user_group_group_perms.Newgroup') class Newgroup(BaseGroup): - users = ManyToManyField('pfunk.tests.test_custom_user_group_group_perms.Newuser', + users = ManyToManyField('pfunk.tests.test_web_custom_user_group_group_perms.Newuser', relation_name='custom_users_groups') class Blog(Collection): user_collection = 'Newuser' group_collection = 'Newgroup' - user_collection_dir = 'pfunk.tests.test_custom_user_group_group_perms.Newuser' - group_collection_dir = 'pfunk.tests.test_custom_user_group_group_perms.Newgroup' + user_collection_dir = 'pfunk.tests.test_web_custom_user_group_group_perms.Newuser' + group_collection_dir = 'pfunk.tests.test_web_custom_user_group_group_perms.Newgroup' collection_roles = [GenericGroupBasedRole] title = StringField(required=True) content = StringField(required=True) - group = ReferenceField('pfunk.tests.test_custom_user_group_group_perms.Newgroup', + group = ReferenceField('pfunk.tests.test_web_custom_user_group_group_perms.Newgroup', relation_name='newgroup_blogs') def __unicode__(self): @@ -35,10 +35,10 @@ def __unicode__(self): class Newuser(ExtendedUser): group_collection = 'Newgroup' - user_group_class = import_util('pfunk.tests.test_custom_user_group_group_perms.UserGroups') - group_class = import_util('pfunk.tests.test_custom_user_group_group_perms.Newgroup') + user_group_class = import_util('pfunk.tests.test_web_custom_user_group_group_perms.UserGroups') + group_class = import_util('pfunk.tests.test_web_custom_user_group_group_perms.Newgroup') groups = ManyToManyField( - 'pfunk.tests.test_custom_user_group_group_perms.Newgroup', relation_name='custom_users_groups') + 'pfunk.tests.test_web_custom_user_group_group_perms.Newgroup', relation_name='custom_users_groups') class TestCustomGroupBasedPerms(APITestCase): diff --git a/pfunk/tests/test_custom_user_group_m2m.py b/pfunk/tests/test_web_custom_user_group_m2m.py similarity index 80% rename from pfunk/tests/test_custom_user_group_m2m.py rename to pfunk/tests/test_web_custom_user_group_m2m.py index 379f8bc..3f86e4a 100644 --- a/pfunk/tests/test_custom_user_group_m2m.py +++ b/pfunk/tests/test_web_custom_user_group_m2m.py @@ -11,29 +11,34 @@ class UserGroups(ug): - userID = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newuser') - groupID = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newgroup') + userID = ReferenceField('pfunk.tests.test_web_custom_user_group_m2m.Newuser') + groupID = ReferenceField('pfunk.tests.test_web_custom_user_group_m2m.Newgroup') class Newgroup(BaseGroup): - users = ManyToManyField('pfunk.tests.test_custom_user_group_m2m.Newuser', + users = ManyToManyField('pfunk.tests.test_web_custom_user_group_m2m.Newuser', relation_name='custom_users_groups') class Newuser(ExtendedUser): - user_group_class = import_util('pfunk.tests.test_custom_user_group_m2m.UserGroups') - group_class = import_util('pfunk.tests.test_custom_user_group_m2m.Newgroup') + group_collection = 'Newgroup' + user_group_class = import_util('pfunk.tests.test_web_custom_user_group_m2m.UserGroups') + group_class = import_util('pfunk.tests.test_web_custom_user_group_m2m.Newgroup') groups = ManyToManyField( - 'pfunk.tests.test_custom_user_group_m2m.Newgroup', relation_name='custom_users_groups') - blogs = ManyToManyField('pfunk.tests.test_custom_user_group_m2m.Blog', + 'pfunk.tests.test_web_custom_user_group_m2m.Newgroup', relation_name='custom_users_groups') + blogs = ManyToManyField('pfunk.tests.test_web_custom_user_group_m2m.Blog', relation_name='users_blogs') class Blog(Collection): + user_collection = 'Newuser' + group_collection = 'Newgroup' + user_collection_dir = 'pfunk.tests.test_web_custom_user_group_m2m.Newuser' + group_collection_dir = 'pfunk.tests.test_web_custom_user_group_m2m.Newgroup' collection_roles = [GenericUserBasedRoleM2M] title = StringField(required=True) content = StringField(required=True) - users = ManyToManyField('pfunk.tests.test_custom_user_group_m2m.Newuser', + users = ManyToManyField('pfunk.tests.test_web_custom_user_group_m2m.Newuser', relation_name='users_blogs') def __unicode__(self): @@ -45,10 +50,6 @@ class TestCustomUserM2M(APITestCase): collections = [Newuser, Newgroup, UserGroups, Blog] def setUp(self) -> None: - os.environ['USER_COLLECTION'] = 'Newuser' - os.environ['GROUP_COLLECTION'] = 'Newgroup' - os.environ['USER_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_m2m.Newuser' - os.environ['GROUP_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_m2m.Newgroup' super().setUp() self.group = Newgroup.create(name='Power Users', slug='power-users') self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', diff --git a/pfunk/tests/test_custom_user_group_users_perms.py b/pfunk/tests/test_web_custom_user_group_users_perms.py similarity index 76% rename from pfunk/tests/test_custom_user_group_users_perms.py rename to pfunk/tests/test_web_custom_user_group_users_perms.py index ddc60b5..a1b7b0c 100644 --- a/pfunk/tests/test_custom_user_group_users_perms.py +++ b/pfunk/tests/test_web_custom_user_group_users_perms.py @@ -12,34 +12,34 @@ class UserGroups(ug): - userID = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newuser') - groupID = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newgroup') + userID = ReferenceField('pfunk.tests.test_web_custom_user_group_users_perms.Newuser') + groupID = ReferenceField('pfunk.tests.test_web_custom_user_group_users_perms.Newgroup') class Newgroup(BaseGroup): - users = ManyToManyField('pfunk.tests.test_custom_user_group_users_perms.Newuser', + users = ManyToManyField('pfunk.tests.test_web_custom_user_group_users_perms.Newuser', relation_name='custom_users_groups') class Newuser(ExtendedUser): group_collection = 'Newgroup' - user_group_class = import_util('pfunk.tests.test_custom_user_group_users_perms.UserGroups') - group_class = import_util('pfunk.tests.test_custom_user_group_users_perms.Newgroup') + user_group_class = import_util('pfunk.tests.test_web_custom_user_group_users_perms.UserGroups') + group_class = import_util('pfunk.tests.test_web_custom_user_group_users_perms.Newgroup') groups = ManyToManyField( - 'pfunk.tests.test_custom_user_group_users_perms.Newgroup', relation_name='custom_users_groups') - blogs = ManyToManyField('pfunk.tests.test_custom_user_group_users_perms.Blog', + 'pfunk.tests.test_web_custom_user_group_users_perms.Newgroup', relation_name='custom_users_groups') + blogs = ManyToManyField('pfunk.tests.test_web_custom_user_group_users_perms.Blog', relation_name='users_blogs') class Blog(Collection): user_collection = 'Newuser' group_collection = 'Newgroup' - user_collection_dir = 'pfunk.tests.test_custom_user_group_users_perms.Newuser' - group_collection_dir = 'pfunk.tests.test_custom_user_group_users_perms.Newgroup' + user_collection_dir = 'pfunk.tests.test_web_custom_user_group_users_perms.Newuser' + group_collection_dir = 'pfunk.tests.test_web_custom_user_group_users_perms.Newgroup' collection_roles = [GenericUserBasedRole] title = StringField(required=True) content = StringField(required=True) - user = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newuser', + user = ReferenceField('pfunk.tests.test_web_custom_user_group_users_perms.Newuser', relation_name='users_blogs') def __unicode__(self): @@ -51,10 +51,6 @@ class TestCustomUserBasedPerms(APITestCase): collections = [Newuser, Newgroup, UserGroups, Blog] def setUp(self) -> None: - os.environ['USER_COLLECTION'] = 'Newuser' - os.environ['GROUP_COLLECTION'] = 'Newgroup' - os.environ['USER_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_users_perms.Newuser' - os.environ['GROUP_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_users_perms.Newgroup' super().setUp() self.group = Newgroup.create(name='Power Users', slug='power-users') self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', From 3b6dcfcfd7cbd46dcfbd6c5a64fd08790f77fd74 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 1 Mar 2023 21:46:29 +0800 Subject: [PATCH 116/214] updated current branch to reflect latest changes in swagger feature. updated poetry dependencies to be much cleaner --- pfunk/tests/test_project.py | 8 +- pfunk/utils/swagger.py | 4 + pfunk/web/views/json.py | 1 + poetry.lock | 815 ++++++++++++++++++++++++++---------- pyproject.toml | 6 +- 5 files changed, 612 insertions(+), 222 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index 2665808..f7e97e0 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -29,10 +29,10 @@ def setUp(self) -> None: def tearDown(self) -> None: os.remove("pfunk.json") - try: - os.remove('swagger.yaml') - except FileNotFoundError: - pass + # try: + # os.remove('swagger.yaml') + # except FileNotFoundError: + # pass def test_add_resource(self): self.project.add_resource(Person) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 2820efe..61f2aee 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -4,6 +4,7 @@ import swaggyp as sw from werkzeug.routing import Map, parse_rule +from pfunk.web.views.html import HTMLView from pfunk.collection import Collection GRAPHQL_TO_YAML_TYPES = { @@ -163,6 +164,9 @@ def get_operations(self, col: Collection): ``` """ for view in col.collection_views: + # We skip HTML Views + if issubclass(view, HTMLView): + continue route = view.url(col) rule = route.rule methods = route.methods diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index b0e6b4a..5063000 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -67,6 +67,7 @@ def _payload_docs(self): ``` """ return {} + def get_req_with_m2m(self, data): """ Returns request with updated params that has the proper m2m entities """ fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') diff --git a/poetry.lock b/poetry.lock index 62c20aa..4c4aafa 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,3 +1,20 @@ +[[package]] +name = "anyio" +version = "3.6.2" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] +test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] +trio = ["trio (>=0.16,<0.22)"] + [[package]] name = "appnope" version = "0.1.3" @@ -37,9 +54,20 @@ cffi = ">=1.0.1" dev = ["pytest", "cogapp", "pre-commit", "wheel"] tests = ["pytest"] +[[package]] +name = "arrow" +version = "1.2.3" +description = "Better dates & times for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +python-dateutil = ">=2.7.0" + [[package]] name = "asttokens" -version = "2.0.5" +version = "2.2.1" description = "Annotate AST trees with source code positions" category = "dev" optional = false @@ -64,17 +92,19 @@ six = ">=1.6.1,<2.0" [[package]] name = "attrs" -version = "21.4.0" +version = "22.2.0" description = "Classes Without Boilerplate" -category = "dev" +category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.6" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +cov = ["attrs", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs"] +docs = ["furo", "sphinx", "myst-parser", "zope.interface", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] +tests = ["attrs", "zope.interface"] +tests-no-zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"] +tests_no_zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"] [[package]] name = "backcall" @@ -86,7 +116,7 @@ python-versions = "*" [[package]] name = "beautifulsoup4" -version = "4.11.1" +version = "4.11.2" description = "Screen-scraping library" category = "dev" optional = false @@ -114,27 +144,27 @@ webencodings = "*" [[package]] name = "boto3" -version = "1.23.8" +version = "1.26.81" description = "The AWS SDK for Python" category = "main" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" [package.dependencies] -botocore = ">=1.26.8,<1.27.0" +botocore = ">=1.29.81,<1.30.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.5.0,<0.6.0" +s3transfer = ">=0.6.0,<0.7.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.26.8" +version = "1.29.81" description = "Low-level, data-driven core of boto 3." category = "main" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" [package.dependencies] jmespath = ">=0.7.1,<2.0.0" @@ -142,7 +172,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.13.8)"] +crt = ["awscrt (==0.16.9)"] [[package]] name = "cachetools" @@ -154,7 +184,7 @@ python-versions = "~=3.5" [[package]] name = "certifi" -version = "2022.5.18.1" +version = "2022.12.7" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false @@ -162,7 +192,7 @@ python-versions = ">=3.6" [[package]] name = "cffi" -version = "1.15.0" +version = "1.15.1" description = "Foreign Function Interface for Python calling C code." category = "main" optional = false @@ -173,14 +203,11 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "2.0.12" +version = "3.0.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = ">=3.5.0" - -[package.extras] -unicode_backport = ["unicodedata2"] +python-versions = "*" [[package]] name = "click" @@ -195,11 +222,25 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "colorama" -version = "0.4.4" +version = "0.4.6" description = "Cross-platform colored terminal text." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" + +[[package]] +name = "comm" +version = "0.1.2" +description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +traitlets = ">=5.3" + +[package.extras] +test = ["pytest"] [[package]] name = "coverage" @@ -233,7 +274,7 @@ test = ["pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pret [[package]] name = "debugpy" -version = "1.6.0" +version = "1.6.6" description = "An implementation of the Debug Adapter Protocol for Python" category = "dev" optional = false @@ -255,14 +296,6 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -[[package]] -name = "entrypoints" -version = "0.4" -description = "Discover and load entry points from installed packages." -category = "dev" -optional = false -python-versions = ">=3.6" - [[package]] name = "envs" version = "1.4" @@ -276,15 +309,18 @@ cli = ["click[cli] (>=8.0.3,<9.0.0)", "Jinja2[cli] (>=3.0.3,<4.0.0)", "terminalt [[package]] name = "executing" -version = "0.8.3" +version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" category = "dev" optional = false python-versions = "*" +[package.extras] +tests = ["asttokens", "pytest", "littleutils", "rich"] + [[package]] name = "fastjsonschema" -version = "2.15.3" +version = "2.16.3" description = "Fastest Python implementation of JSON schema" category = "dev" optional = false @@ -295,7 +331,7 @@ devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benc [[package]] name = "faunadb" -version = "4.3.1" +version = "4.5.0" description = "FaunaDB Python driver" category = "main" optional = false @@ -303,7 +339,7 @@ python-versions = "*" [package.dependencies] future = "*" -hyper = "*" +httpx = {version = "*", extras = ["http2"]} iso8601 = "*" requests = "*" @@ -323,9 +359,17 @@ python-versions = ">=3.8,<4.0" Jinja2 = ">=3.1.2,<4.0.0" valley = ">=1.5.8,<2.0.0" +[[package]] +name = "fqdn" +version = "1.5.1" +description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" + [[package]] name = "future" -version = "0.18.2" +version = "0.18.3" description = "Clean single-source support for Python 3 and 2" category = "main" optional = false @@ -342,62 +386,110 @@ python-versions = "*" [package.dependencies] ply = ">=3.6" +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" +optional = false +python-versions = ">=3.7" + [[package]] name = "h2" -version = "2.6.2" +version = "4.1.0" description = "HTTP/2 State-Machine based protocol implementation" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6.1" [package.dependencies] -hpack = ">=2.2,<4" -hyperframe = ">=3.1,<4.0.0 || >4.0.0,<6" +hpack = ">=4.0,<5" +hyperframe = ">=6.0,<7" [[package]] name = "hpack" -version = "3.0.0" +version = "4.0.0" description = "Pure-Python HPACK header compression" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6.1" [[package]] -name = "hyper" -version = "0.7.0" -description = "HTTP/2 Client for Python" +name = "httpcore" +version = "0.16.3" +description = "A minimal low-level HTTP client." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7" + +[package.dependencies] +anyio = ">=3.0,<5.0" +certifi = "*" +h11 = ">=0.13,<0.15" +sniffio = ">=1.0.0,<2.0.0" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + +[[package]] +name = "httpx" +version = "0.23.3" +description = "The next generation HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" [package.dependencies] -h2 = ">=2.4,<3.0" -hyperframe = ">=3.2,<4.0" +certifi = "*" +h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""} +httpcore = ">=0.15.0,<0.17.0" +rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +sniffio = "*" [package.extras] -fast = ["pycohttpparser"] +brotli = ["brotli", "brotlicffi"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "hyperframe" -version = "3.2.0" +version = "6.0.1" description = "HTTP/2 framing layer for Python" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6.1" [[package]] name = "idna" -version = "3.3" +version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false python-versions = ">=3.5" +[[package]] +name = "importlib-metadata" +version = "6.0.0" +description = "Read metadata from Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"] +perf = ["ipython"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8", "importlib-resources (>=1.3)"] + [[package]] name = "importlib-resources" -version = "5.7.1" +version = "5.12.0" description = "Read resources from Python packages" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -405,35 +497,42 @@ python-versions = ">=3.7" zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"] [[package]] name = "ipykernel" -version = "6.13.0" +version = "6.21.2" description = "IPython Kernel for Jupyter" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" [package.dependencies] appnope = {version = "*", markers = "platform_system == \"Darwin\""} -debugpy = ">=1.0" +comm = ">=0.1.1" +debugpy = ">=1.6.5" ipython = ">=7.23.1" jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" psutil = "*" +pyzmq = ">=20" tornado = ">=6.1" -traitlets = ">=5.1.0" +traitlets = ">=5.4.0" [package.extras] -test = ["pytest (>=6.0)", "pytest-cov", "flaky", "ipyparallel", "pre-commit", "pytest-timeout"] +cov = ["coverage", "curio", "matplotlib", "pytest-cov", "trio"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] +pyqt5 = ["pyqt5"] +pyside6 = ["pyside6"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest-asyncio", "pytest-cov", "pytest-timeout", "pytest (>=7.0)"] [[package]] name = "ipython" -version = "8.3.0" +version = "8.11.0" description = "IPython: Productive Interactive Computing" category = "dev" optional = false @@ -448,15 +547,15 @@ jedi = ">=0.16" matplotlib-inline = "*" pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} pickleshare = "*" -prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" +prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" pygments = ">=2.4.0" stack-data = "*" traitlets = ">=5" [package.extras] -all = ["black", "Sphinx (>=1.3)", "ipykernel", "nbconvert", "nbformat", "ipywidgets", "notebook", "ipyparallel", "qtconsole", "pytest (<7.1)", "pytest-asyncio", "testpath", "curio", "matplotlib (!=3.2.0)", "numpy (>=1.19)", "pandas", "trio"] +all = ["black", "ipykernel", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "docrepr", "matplotlib", "stack-data", "pytest (<7)", "typing-extensions", "pytest (<7.1)", "pytest-asyncio", "testpath", "nbconvert", "nbformat", "ipywidgets", "notebook", "ipyparallel", "qtconsole", "curio", "matplotlib (!=3.2.0)", "numpy (>=1.21)", "pandas", "trio"] black = ["black"] -doc = ["Sphinx (>=1.3)"] +doc = ["ipykernel", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "docrepr", "matplotlib", "stack-data", "pytest (<7)", "typing-extensions", "pytest (<7.1)", "pytest-asyncio", "testpath"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] @@ -464,7 +563,7 @@ notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test_extra = ["pytest (<7.1)", "pytest-asyncio", "testpath", "curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.19)", "pandas", "trio"] +test_extra = ["pytest (<7.1)", "pytest-asyncio", "testpath", "curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "trio"] [[package]] name = "ipython-genutils" @@ -476,35 +575,44 @@ python-versions = "*" [[package]] name = "ipywidgets" -version = "7.7.0" -description = "IPython HTML widgets for Jupyter" +version = "8.0.4" +description = "Jupyter interactive widgets" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" [package.dependencies] ipykernel = ">=4.5.1" -ipython = {version = ">=4.0.0", markers = "python_version >= \"3.3\""} -ipython-genutils = ">=0.2.0,<0.3.0" -jupyterlab-widgets = {version = ">=1.0.0", markers = "python_version >= \"3.6\""} -nbformat = ">=4.2.0" +ipython = ">=6.1.0" +jupyterlab-widgets = ">=3.0,<4.0" traitlets = ">=4.3.1" -widgetsnbextension = ">=3.6.0,<3.7.0" +widgetsnbextension = ">=4.0,<5.0" [package.extras] -test = ["pytest (>=3.6.0)", "pytest-cov", "mock"] +test = ["jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] [[package]] name = "iso8601" -version = "1.0.2" +version = "1.1.0" description = "Simple module to parse ISO 8601 dates" category = "main" optional = false python-versions = ">=3.6.2,<4.0" +[[package]] +name = "isoduration" +version = "20.11.0" +description = "Operations with ISO 8601 durations" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +arrow = ">=0.15.0" + [[package]] name = "jedi" -version = "0.18.1" +version = "0.18.2" description = "An autocompletion tool for Python that can be used for text editors." category = "dev" optional = false @@ -514,8 +622,9 @@ python-versions = ">=3.6" parso = ">=0.8.0,<0.9.0" [package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx-rtd-theme (==0.4.3)", "sphinx (==1.8.5)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<7.0.0)"] +testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] name = "jinja2" @@ -533,28 +642,45 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jmespath" -version = "1.0.0" +version = "1.0.1" description = "JSON Matching Expressions" category = "main" optional = false python-versions = ">=3.7" +[[package]] +name = "jsonpointer" +version = "2.3" +description = "Identify specific nodes in a JSON document (RFC 6901)" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + [[package]] name = "jsonschema" -version = "4.5.1" +version = "4.17.3" description = "An implementation of JSON Schema validation for Python" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" [package.dependencies] attrs = ">=17.4.0" +fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} +pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" +rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} +uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format_nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] [[package]] name = "jupyter" @@ -574,58 +700,131 @@ qtconsole = "*" [[package]] name = "jupyter-client" -version = "7.3.1" +version = "8.0.3" description = "Jupyter protocol implementation and client libraries" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" [package.dependencies] -entrypoints = "*" -jupyter-core = ">=4.9.2" -nest-asyncio = ">=1.5.4" +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" python-dateutil = ">=2.8.2" -pyzmq = ">=22.3" -tornado = ">=6.0" -traitlets = "*" +pyzmq = ">=23.0" +tornado = ">=6.2" +traitlets = ">=5.3" [package.extras] -doc = ["ipykernel", "myst-parser", "sphinx (>=1.3.6)", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] -test = ["codecov", "coverage", "ipykernel (>=6.5)", "ipython", "mypy", "pre-commit", "pytest", "pytest-asyncio (>=0.18)", "pytest-cov", "pytest-timeout"] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinx (>=4)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["codecov", "coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] [[package]] name = "jupyter-console" -version = "6.4.3" +version = "6.6.2" description = "Jupyter terminal console" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] -ipykernel = "*" +ipykernel = ">=6.14" ipython = "*" jupyter-client = ">=7.0.0" -prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +prompt-toolkit = ">=3.0.30" pygments = "*" +pyzmq = ">=17" +traitlets = ">=5.4" [package.extras] -test = ["pexpect"] +test = ["flaky", "pexpect", "pytest"] [[package]] name = "jupyter-core" -version = "4.10.0" +version = "5.2.0" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" [package.dependencies] +platformdirs = ">=2.5" pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} -traitlets = "*" +traitlets = ">=5.3" [package.extras] +docs = ["myst-parser", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] +[[package]] +name = "jupyter-events" +version = "0.6.3" +description = "Jupyter Event System library" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +jsonschema = {version = ">=3.2.0", extras = ["format-nongpl"]} +python-json-logger = ">=2.0.4" +pyyaml = ">=5.3" +rfc3339-validator = "*" +rfc3986-validator = ">=0.1.1" +traitlets = ">=5.3" + +[package.extras] +cli = ["click", "rich"] +docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] +test = ["click", "coverage", "pre-commit", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "pytest-cov", "pytest (>=7.0)", "rich"] + +[[package]] +name = "jupyter-server" +version = "2.3.0" +description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." +category = "dev" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +anyio = ">=3.1.0" +argon2-cffi = "*" +jinja2 = "*" +jupyter-client = ">=7.4.4" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-events = ">=0.4.0" +jupyter-server-terminals = "*" +nbconvert = ">=6.4.4" +nbformat = ">=5.3.0" +packaging = "*" +prometheus-client = "*" +pywinpty = {version = "*", markers = "os_name == \"nt\""} +pyzmq = ">=24" +send2trash = "*" +terminado = ">=0.8.3" +tornado = ">=6.2.0" +traitlets = ">=5.6.0" +websocket-client = "*" + +[package.extras] +docs = ["docutils (<0.20)", "ipykernel", "jinja2", "jupyter-client", "jupyter-server", "mistune (<1.0.0)", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] +test = ["ipykernel", "pre-commit", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "pytest (>=7.0)", "requests"] + +[[package]] +name = "jupyter-server-terminals" +version = "0.4.4" +description = "A Jupyter Server Extension Providing Terminals." +category = "dev" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} +terminado = ">=0.8.3" + +[package.extras] +docs = ["jinja2", "jupyter-server", "mistune (<3.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] +test = ["coverage", "jupyter-server (>=2.0.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout", "pytest (>=7.0)"] + [[package]] name = "jupyterlab-pygments" version = "0.2.2" @@ -636,15 +835,15 @@ python-versions = ">=3.7" [[package]] name = "jupyterlab-widgets" -version = "1.1.0" -description = "A JupyterLab extension." +version = "3.0.5" +description = "Jupyter interactive widgets for JupyterLab" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "markupsafe" -version = "2.1.1" +version = "2.1.2" description = "Safely add untrusted strings to HTML/XML markup." category = "main" optional = false @@ -652,7 +851,7 @@ python-versions = ">=3.7" [[package]] name = "matplotlib-inline" -version = "0.1.3" +version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" category = "dev" optional = false @@ -663,33 +862,66 @@ traitlets = "*" [[package]] name = "mistune" -version = "0.8.4" -description = "The fastest markdown parser in pure Python" +version = "2.0.5" +description = "A sane Markdown parser with useful plugins and renderers" category = "dev" optional = false python-versions = "*" +[[package]] +name = "nbclassic" +version = "0.5.2" +description = "Jupyter Notebook as a Jupyter Server extension." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +argon2-cffi = "*" +ipykernel = "*" +ipython-genutils = "*" +jinja2 = "*" +jupyter-client = ">=6.1.1" +jupyter-core = ">=4.6.1" +jupyter-server = ">=1.8" +nbconvert = ">=5" +nbformat = "*" +nest-asyncio = ">=1.5" +notebook-shim = ">=0.1.0" +prometheus-client = "*" +pyzmq = ">=17" +Send2Trash = ">=1.8.0" +terminado = ">=0.8.3" +tornado = ">=6.1" +traitlets = ">=4.2.1" + +[package.extras] +docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"] +json-logging = ["json-logging"] +test = ["pytest", "coverage", "requests", "testpath", "nbval", "pytest-playwright", "pytest-cov", "pytest-jupyter", "pytest-tornasync", "requests-unixsocket"] + [[package]] name = "nbclient" -version = "0.6.3" +version = "0.7.2" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." category = "dev" optional = false python-versions = ">=3.7.0" [package.dependencies] -jupyter-client = ">=6.1.5" -nbformat = ">=5.0" -nest-asyncio = "*" -traitlets = ">=5.0.0" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +nbformat = ">=5.1" +traitlets = ">=5.3" [package.extras] -sphinx = ["autodoc-traits", "mock", "moto", "myst-parser", "Sphinx (>=1.7)", "sphinx-book-theme"] -test = ["black", "check-manifest", "flake8", "ipykernel", "ipython (<8.0.0)", "ipywidgets (<8.0.0)", "mypy", "pip (>=18.1)", "pre-commit", "pytest (>=4.1)", "pytest-asyncio", "pytest-cov (>=2.6.1)", "setuptools (>=60.0)", "testpath", "twine (>=1.11.0)", "xmltodict"] +dev = ["pre-commit"] +docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient", "sphinx-book-theme", "sphinx (>=1.7)"] +test = ["ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "pytest (>=7.0)", "testpath", "xmltodict"] [[package]] name = "nbconvert" -version = "6.5.0" +version = "7.2.9" description = "Converting Jupyter Notebooks" category = "dev" optional = false @@ -699,12 +931,12 @@ python-versions = ">=3.7" beautifulsoup4 = "*" bleach = "*" defusedxml = "*" -entrypoints = ">=0.2.2" +importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} jinja2 = ">=3.0" jupyter-core = ">=4.7" jupyterlab-pygments = "*" -MarkupSafe = ">=2.0" -mistune = ">=0.8.1,<2" +markupsafe = ">=2.0" +mistune = ">=2.0.3,<3" nbclient = ">=0.5.0" nbformat = ">=5.1" packaging = "*" @@ -714,15 +946,17 @@ tinycss2 = "*" traitlets = ">=5.0" [package.extras] -all = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pre-commit", "pyppeteer (>=1,<1.1)", "tornado (>=6.1)", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] -docs = ["sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] +all = ["nbconvert"] +docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] +qtpdf = ["nbconvert"] +qtpng = ["pyqtwebengine (>=5.15)"] serve = ["tornado (>=6.1)"] -test = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pre-commit", "pyppeteer (>=1,<1.1)"] +test = ["ipykernel", "ipywidgets (>=7)", "pre-commit", "pytest", "pytest-dependency"] webpdf = ["pyppeteer (>=1,<1.1)"] [[package]] name = "nbformat" -version = "5.4.0" +version = "5.7.3" description = "The Jupyter Notebook format" category = "dev" optional = false @@ -735,11 +969,12 @@ jupyter-core = "*" traitlets = ">=5.1" [package.extras] -test = ["check-manifest", "testpath", "pytest", "pre-commit"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["pep440", "pre-commit", "pytest", "testpath"] [[package]] name = "nest-asyncio" -version = "1.5.5" +version = "1.5.6" description = "Patch asyncio to allow nested event loops" category = "dev" optional = false @@ -747,7 +982,7 @@ python-versions = ">=3.5" [[package]] name = "notebook" -version = "6.4.11" +version = "6.5.2" description = "A web-based notebook environment for interactive computing" category = "dev" optional = false @@ -760,6 +995,7 @@ ipython-genutils = "*" jinja2 = "*" jupyter-client = ">=5.3.4" jupyter-core = ">=4.6.1" +nbclassic = ">=0.4.7" nbconvert = ">=5" nbformat = "*" nest-asyncio = ">=1.5" @@ -773,18 +1009,61 @@ traitlets = ">=4.2.1" [package.extras] docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"] json-logging = ["json-logging"] -test = ["pytest", "coverage", "requests", "testpath", "nbval", "selenium", "pytest-cov", "requests-unixsocket"] +test = ["pytest", "coverage", "requests", "testpath", "nbval", "selenium (==4.1.5)", "pytest-cov", "requests-unixsocket"] [[package]] -name = "packaging" -version = "21.3" -description = "Core utilities for Python packages" +name = "notebook-shim" +version = "0.2.2" +description = "A shim layer for notebook traits and config" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +jupyter-server = ">=1.8,<3" + +[package.extras] +test = ["pytest", "pytest-console-scripts", "pytest-tornasync"] + +[[package]] +name = "openapi-schema-validator" +version = "0.2.3" +description = "OpenAPI schema validation for Python" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7.0,<4.0.0" [package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +jsonschema = ">=3.0.0,<5.0.0" + +[package.extras] +rfc3339-validator = ["rfc3339-validator"] +strict-rfc3339 = ["strict-rfc3339"] +isodate = ["isodate"] + +[[package]] +name = "openapi-spec-validator" +version = "0.4.0" +description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3.0 spec validator" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.2.0,<5.0.0" +openapi-schema-validator = ">=0.2.0,<0.3.0" +PyYAML = ">=5.1" + +[package.extras] +requests = ["requests"] + +[[package]] +name = "packaging" +version = "23.0" +description = "Core utilities for Python packages" +category = "main" +optional = false +python-versions = ">=3.7" [[package]] name = "pandocfilters" @@ -842,6 +1121,26 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "pkgutil-resolve-name" +version = "1.3.10" +description = "Resolve a name to an object." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "platformdirs" +version = "3.0.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)", "sphinx (>=6.1.3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest (>=7.2.1)"] + [[package]] name = "ply" version = "3.11" @@ -852,7 +1151,7 @@ python-versions = "*" [[package]] name = "prometheus-client" -version = "0.14.1" +version = "0.16.0" description = "Python client for the Prometheus monitoring system." category = "dev" optional = false @@ -863,18 +1162,18 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.29" +version = "3.0.38" description = "Library for building powerful interactive command lines in Python" category = "dev" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7.0" [package.dependencies] wcwidth = "*" [[package]] name = "psutil" -version = "5.9.1" +version = "5.9.4" description = "Cross-platform lib for process and system monitoring in Python." category = "dev" optional = false @@ -902,14 +1201,6 @@ python-versions = "*" [package.extras] tests = ["pytest"] -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - [[package]] name = "pycparser" version = "2.21" @@ -920,42 +1211,34 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.12.0" +version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false python-versions = ">=3.6" +[package.extras] +plugins = ["importlib-metadata"] + [[package]] name = "pyjwt" -version = "2.4.0" +version = "2.6.0" description = "JSON Web Token implementation in Python" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] -crypto = ["cryptography (>=3.3.1)"] -dev = ["sphinx", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.3.1)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "mypy", "pre-commit"] -docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +crypto = ["cryptography (>=3.4.0)"] +dev = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.4.0)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "pre-commit"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"] -[[package]] -name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" -optional = false -python-versions = ">=3.6.8" - -[package.extras] -diagrams = ["railroad-diagrams", "jinja2"] - [[package]] name = "pyrsistent" -version = "0.18.1" +version = "0.19.3" description = "Persistent/Functional/Immutable data structures" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -970,6 +1253,14 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" [package.dependencies] six = ">=1.5" +[[package]] +name = "python-json-logger" +version = "2.0.7" +description = "A python library adding a json log formatter" +category = "dev" +optional = false +python-versions = ">=3.6" + [[package]] name = "pytz" version = "2021.3" @@ -980,7 +1271,7 @@ python-versions = "*" [[package]] name = "pywin32" -version = "304" +version = "305" description = "Python for Window Extensions" category = "dev" optional = false @@ -988,7 +1279,7 @@ python-versions = "*" [[package]] name = "pywinpty" -version = "2.0.5" +version = "2.0.10" description = "Pseudo terminal support for Windows from Python." category = "dev" optional = false @@ -1004,7 +1295,7 @@ python-versions = ">=3.6" [[package]] name = "pyzmq" -version = "23.0.0" +version = "25.0.0" description = "Python bindings for 0MQ" category = "dev" optional = false @@ -1012,11 +1303,10 @@ python-versions = ">=3.6" [package.dependencies] cffi = {version = "*", markers = "implementation_name == \"pypy\""} -py = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "qtconsole" -version = "5.3.0" +version = "5.4.0" description = "Jupyter Qt console" category = "dev" optional = false @@ -1030,7 +1320,7 @@ jupyter-core = "*" pygments = "*" pyzmq = ">=17.1" qtpy = ">=2.0.1" -traitlets = "*" +traitlets = "<5.2.1 || >5.2.1,<5.2.2 || >5.2.2" [package.extras] doc = ["Sphinx (>=1.3)"] @@ -1038,7 +1328,7 @@ test = ["flaky", "pytest", "pytest-qt"] [[package]] name = "qtpy" -version = "2.1.0" +version = "2.3.0" description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." category = "dev" optional = false @@ -1052,29 +1342,62 @@ test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] [[package]] name = "requests" -version = "2.27.1" +version = "2.28.2" description = "Python HTTP for Humans." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.7, <4" [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} -idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" urllib3 = ">=1.21.1,<1.27" [package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rfc3339-validator" +version = "0.1.4" +description = "A pure python RFC3339 validator" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +six = "*" + +[[package]] +name = "rfc3986" +version = "1.5.0" +description = "Validating URI References per RFC 3986" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} + +[package.extras] +idna2008 = ["idna"] + +[[package]] +name = "rfc3986-validator" +version = "0.1.1" +description = "Pure python rfc3986 validator" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "s3transfer" -version = "0.5.2" +version = "0.6.0" description = "An Amazon S3 Transfer Manager" category = "main" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" [package.dependencies] botocore = ">=1.12.36,<2.0a.0" @@ -1116,25 +1439,33 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +category = "main" +optional = false +python-versions = ">=3.7" + [[package]] name = "soupsieve" -version = "2.3.2.post1" +version = "2.4" description = "A modern CSS selector implementation for Beautiful Soup." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "stack-data" -version = "0.2.0" +version = "0.6.2" description = "Extract data from python stack frames and tracebacks for informative displays" category = "dev" optional = false python-versions = "*" [package.dependencies] -asttokens = "*" -executing = "*" +asttokens = ">=2.1.0" +executing = ">=1.2.0" pure-eval = "*" [package.extras] @@ -1153,7 +1484,7 @@ requests = {version = ">=2.20", markers = "python_version >= \"3.0\""} [[package]] name = "swaggyp" -version = "0.2.0" +version = "0.3.0" description = "Python library for generating Swagger templates based on valley" category = "main" optional = false @@ -1165,7 +1496,7 @@ valley = ">=1.5.6,<2.0.0" [[package]] name = "terminado" -version = "0.15.0" +version = "0.17.1" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." category = "dev" optional = false @@ -1177,53 +1508,66 @@ pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} tornado = ">=6.1.0" [package.extras] -test = ["pre-commit", "pytest-timeout", "pytest (>=6.0)"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["pre-commit", "pytest-timeout", "pytest (>=7.0)"] [[package]] name = "tinycss2" -version = "1.1.1" +version = "1.2.1" description = "A tiny CSS parser" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] webencodings = ">=0.4" [package.extras] doc = ["sphinx", "sphinx-rtd-theme"] -test = ["pytest", "pytest-cov", "pytest-flake8", "pytest-isort", "coverage"] +test = ["pytest", "isort", "flake8"] [[package]] name = "tornado" -version = "6.1" +version = "6.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." category = "dev" optional = false -python-versions = ">= 3.5" +python-versions = ">= 3.7" [[package]] name = "traitlets" -version = "5.2.1.post0" -description = "" +version = "5.9.0" +description = "Traitlets Python configuration system" category = "dev" optional = false python-versions = ">=3.7" [package.extras] -test = ["pre-commit", "pytest"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] + +[[package]] +name = "uri-template" +version = "1.2.0" +description = "RFC 6570 URI Template Processor" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +dev = ["mypy", "flake8 (<4.0.0)", "flake8-annotations", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-noqa", "flake8-requirements", "flake8-type-annotations", "flake8-use-fstring", "pep8-naming"] [[package]] name = "urllib3" -version = "1.26.9" +version = "1.26.14" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.extras] brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -1239,12 +1583,20 @@ envs = ">=1.3,<2.0" [[package]] name = "wcwidth" -version = "0.2.5" +version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" category = "dev" optional = false python-versions = "*" +[[package]] +name = "webcolors" +version = "1.12" +description = "A library for working with color names and color values formats defined by HTML and CSS." +category = "dev" +optional = false +python-versions = ">=3.7" + [[package]] name = "webencodings" version = "0.5.1" @@ -1253,6 +1605,19 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "websocket-client" +version = "1.5.1" +description = "WebSocket client for Python with low level API options" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + [[package]] name = "werkzeug" version = "2.1.2" @@ -1266,36 +1631,35 @@ watchdog = ["watchdog"] [[package]] name = "widgetsnbextension" -version = "3.6.0" -description = "IPython HTML widgets for Jupyter" +version = "4.0.5" +description = "Jupyter interactive widgets for Jupyter Notebook" category = "dev" optional = false -python-versions = "*" - -[package.dependencies] -notebook = ">=4.4.1" +python-versions = ">=3.7" [[package]] name = "zipp" -version = "3.8.0" +version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "jaraco.functools", "more-itertools", "big-o", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "324e8aabc4bf0157d9126fe65c85ec227eba1c16ced513b206ca6f02129494d9" +content-hash = "1eed3385a9e7bfaf61e1c764485f5ebfff46418df4af9b6ee8dd0bb12b429c37" [metadata.files] +anyio = [] appnope = [] argon2-cffi = [] argon2-cffi-bindings = [] +arrow = [] asttokens = [] astunparse = [] attrs = [] @@ -1310,67 +1674,81 @@ cffi = [] charset-normalizer = [] click = [] colorama = [] +comm = [] coverage = [] cryptography = [] debugpy = [] decorator = [] defusedxml = [] -entrypoints = [] envs = [] executing = [] fastjsonschema = [] faunadb = [] formy = [] +fqdn = [] future = [] graphql-py = [] +h11 = [] h2 = [] hpack = [] -hyper = [] +httpcore = [] +httpx = [] hyperframe = [] idna = [] +importlib-metadata = [] importlib-resources = [] ipykernel = [] ipython = [] ipython-genutils = [] ipywidgets = [] iso8601 = [] +isoduration = [] jedi = [] jinja2 = [] jmespath = [] +jsonpointer = [] jsonschema = [] jupyter = [] jupyter-client = [] jupyter-console = [] jupyter-core = [] +jupyter-events = [] +jupyter-server = [] +jupyter-server-terminals = [] jupyterlab-pygments = [] jupyterlab-widgets = [] markupsafe = [] matplotlib-inline = [] mistune = [] +nbclassic = [] nbclient = [] nbconvert = [] nbformat = [] nest-asyncio = [] notebook = [] +notebook-shim = [] +openapi-schema-validator = [] +openapi-spec-validator = [] packaging = [] pandocfilters = [] parso = [] pdoc = [] pexpect = [] pickleshare = [] +pkgutil-resolve-name = [] +platformdirs = [] ply = [] prometheus-client = [] prompt-toolkit = [] psutil = [] ptyprocess = [] pure-eval = [] -py = [] pycparser = [] pygments = [] pyjwt = [] -pyparsing = [] pyrsistent = [] python-dateutil = [] +python-json-logger = [] pytz = [] pywin32 = [] pywinpty = [] @@ -1379,10 +1757,14 @@ pyzmq = [] qtconsole = [] qtpy = [] requests = [] +rfc3339-validator = [] +rfc3986 = [] +rfc3986-validator = [] s3transfer = [] sammy = [] send2trash = [] six = [] +sniffio = [] soupsieve = [] stack-data = [] stripe = [] @@ -1391,10 +1773,13 @@ terminado = [] tinycss2 = [] tornado = [] traitlets = [] +uri-template = [] urllib3 = [] valley = [] wcwidth = [] +webcolors = [] webencodings = [] +websocket-client = [] werkzeug = [] widgetsnbextension = [] zipp = [] diff --git a/pyproject.toml b/pyproject.toml index 465e10e..2e3bdbc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,9 +13,8 @@ envs = "^1.3" requests = "^2.23.0" pytz = "^2021.1" decorator = "^5.0.9" -Werkzeug = "^2.0.1" pyjwt = "^2.1.0" -pip = "^21.2.4" +pip = "^23.0.1" cryptography = "^3.4.7" cachetools = "^4.2.2" click = "^8.0.1" @@ -28,6 +27,7 @@ openapi-spec-validator = "^0.4.0" swaggyp = "^0.3.0" formy = "1.3.1" Jinja2 = "^3.1.2" +Werkzeug = "2.1.2" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" @@ -35,7 +35,7 @@ coverage = "^5.5" pdoc = "^7.2.0" [build-system] -requires = ["poetry>=0.12"] +requires = ["setuptools", "poetry>=0.12"] [tool.poetry.scripts] pfunk = 'pfunk.cli:pfunk' From 9f782092eefdd29d06060c3ea714516439f8316e Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 6 Mar 2023 14:17:32 +0800 Subject: [PATCH 117/214] added documentation for auth json views --- pfunk/contrib/auth/views.py | 86 ++++++++++++++++++++++++++++++++++++- pfunk/utils/swagger.py | 3 +- 2 files changed, 87 insertions(+), 2 deletions(-) diff --git a/pfunk/contrib/auth/views.py b/pfunk/contrib/auth/views.py index 01b6b86..495443b 100644 --- a/pfunk/contrib/auth/views.py +++ b/pfunk/contrib/auth/views.py @@ -38,6 +38,25 @@ def get_query(self): 'exp': exp } + def _payload_docs(self): + return {"data": [ + { + "name": "username", + "in": "formData", + "description": "Username of the user", + "required": True, + "type": "string" + }, + { + "name": "password", + "in": "formData", + "description": "Password of the user", + "required": True, + "type":"string", + "format": "password" + } + ]} + class LogoutView(ActionMixin, JSONAuthView): """ Creates a logout view to enable logout via endpoint @@ -64,6 +83,24 @@ class SignUpView(ActionMixin, JSONAuthView): def get_query(self): return self.collection.signup(**self.get_query_kwargs()) + def _payload_docs(self): + return {"data": [ + { + "name": "username", + "in": "formData", + "description": "username of the user", + "required": True, + "type": "string" + }, + { + "name": "password", + "in": "formData", + "description": "password of the user", + "required": True, + "type":"string", + "format": "password" + } + ]} class VerifyEmailView(ActionMixin, JSONAuthView): """ Creates a view that enables verification of a user @@ -97,6 +134,33 @@ def get_query(self): self.collection.update_password(kwargs['current_password'], kwargs['new_password'], kwargs['new_password_confirm'], _token=self.request.token) + def _payload_docs(self): + return {"data": [ + { + "name": "current_password", + "in": "formData", + "description": "current password of the user", + "required": True, + "type": "string", + "format": "password" + }, + { + "name": "new_password", + "in": "formData", + "description": "new password of the user", + "required": True, + "type":"string", + "format": "password" + }, + { + "name": "new_password_confirm", + "in": "formData", + "description": "confirm the new password of the user by entering the same string", + "required": True, + "type":"string", + "format": "password" + } + ]} class ForgotPasswordView(ActionMixin, JSONAuthView): """ Create a view to allow call of forgot password func """ @@ -106,6 +170,17 @@ class ForgotPasswordView(ActionMixin, JSONAuthView): def get_query(self): return self.collection.forgot_password(**self.get_query_kwargs()) + + def _payload_docs(self): + return {"data": [ + { + "name": "email", + "in": "formData", + "description": "email of the user", + "required": True, + "type": "string" + } + ]} class ForgotPasswordChangeView(ActionMixin, JSONAuthView): @@ -124,7 +199,16 @@ def get_query(self): verify_type='forgot', password=kwargs['password']) - + def _payload_docs(self): + return {"data": [ + { + "name": "verification_key", + "in": "formData", + "description": "hashed key for verification of forgot password event", + "required": True, + "type": "string" + } + ]} class WebhookView(JSONView): pass diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 61f2aee..a4ca383 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -237,7 +237,8 @@ def get_operations(self, col: Collection): _in=field.get('in'), description=field.get('description'), required=field.get('required'), - allowEmptyValue=False + allowEmptyValue=False, + _format=field.get('format') ) params.append(param) From d410aac7382ffb0c54fa1f8a952036f5efb8872d Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 13 Mar 2023 16:28:19 +0800 Subject: [PATCH 118/214] Fixed creation of jwt to cast to int the payload for iat. Updated packages --- pfunk/contrib/auth/key.py | 4 +- poetry.lock | 1300 +++++++++++++++++++++++++++++++------ pyproject.toml | 2 +- 3 files changed, 1093 insertions(+), 213 deletions(-) diff --git a/pfunk/contrib/auth/key.py b/pfunk/contrib/auth/key.py index 394c5f4..3f2faad 100644 --- a/pfunk/contrib/auth/key.py +++ b/pfunk/contrib/auth/key.py @@ -55,7 +55,7 @@ def create_jwt(cls, secret_claims): now = datetime.datetime.now(tz=gmt) exp = now + datetime.timedelta(days=1) payload = { - 'iat': now.timestamp(), + 'iat': int(now.timestamp()), 'exp': exp.timestamp(), 'nbf': now.timestamp(), 'iss': env('PROJECT_NAME', 'pfunk'), @@ -69,7 +69,7 @@ def decrypt_jwt(cls, encoded): keys = cls.import_keys() key = keys.get(headers.get('kid')) try: - decoded = jwt.decode(encoded, key.get('signature_key'), algorithms="HS256", verify=True, + decoded = jwt.decode(encoded, key.get('signature_key'), algorithms=["HS256"], verify=True, options={"require": ["iat", "exp", "nbf", 'iss', 'til']}) except ExpiredSignatureError: raise Unauthorized('Unauthorized') diff --git a/poetry.lock b/poetry.lock index 4c4aafa..965a56f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,8 +11,8 @@ idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] -test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] +doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] trio = ["trio (>=0.16,<0.22)"] [[package]] @@ -35,8 +35,8 @@ python-versions = ">=3.6" argon2-cffi-bindings = "*" [package.extras] -dev = ["pre-commit", "cogapp", "tomli", "coverage[toml] (>=5.0.2)", "hypothesis", "pytest", "sphinx", "sphinx-notfound-page", "furo"] -docs = ["sphinx", "sphinx-notfound-page", "furo"] +dev = ["cogapp", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "pre-commit", "pytest", "sphinx", "sphinx-notfound-page", "tomli"] +docs = ["furo", "sphinx", "sphinx-notfound-page"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] [[package]] @@ -51,7 +51,7 @@ python-versions = ">=3.6" cffi = ">=1.0.1" [package.extras] -dev = ["pytest", "cogapp", "pre-commit", "wheel"] +dev = ["cogapp", "pre-commit", "pytest", "wheel"] tests = ["pytest"] [[package]] @@ -89,6 +89,7 @@ python-versions = "*" [package.dependencies] six = ">=1.6.1,<2.0" +wheel = ">=0.23.0,<1.0" [[package]] name = "attrs" @@ -99,12 +100,12 @@ optional = false python-versions = ">=3.6" [package.extras] -cov = ["attrs", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["attrs"] -docs = ["furo", "sphinx", "myst-parser", "zope.interface", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] -tests = ["attrs", "zope.interface"] -tests-no-zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"] -tests_no_zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=0.971,<0.990)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests_no_zope = ["cloudpickle", "hypothesis", "mypy (>=0.971,<0.990)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] [[package]] name = "backcall" @@ -144,14 +145,14 @@ webencodings = "*" [[package]] name = "boto3" -version = "1.26.81" +version = "1.26.89" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.7" [package.dependencies] -botocore = ">=1.29.81,<1.30.0" +botocore = ">=1.29.89,<1.30.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -160,7 +161,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.29.81" +version = "1.29.89" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -203,11 +204,11 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.0.1" +version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7.0" [[package]] name = "click" @@ -265,12 +266,12 @@ python-versions = ">=3.6" cffi = ">=1.12" [package.extras] -docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] +docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx_rtd_theme"] +docstest = ["doc8", "pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] +test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] [[package]] name = "debugpy" @@ -305,7 +306,7 @@ optional = false python-versions = ">=3.6,<4.0" [package.extras] -cli = ["click[cli] (>=8.0.3,<9.0.0)", "Jinja2[cli] (>=3.0.3,<4.0.0)", "terminaltables[cli] (>=3.1.10,<4.0.0)"] +cli = ["Jinja2[cli] (>=3.0.3,<4.0.0)", "click[cli] (>=8.0.3,<9.0.0)", "terminaltables[cli] (>=3.1.10,<4.0.0)"] [[package]] name = "executing" @@ -316,7 +317,7 @@ optional = false python-versions = "*" [package.extras] -tests = ["asttokens", "pytest", "littleutils", "rich"] +tests = ["asttokens", "littleutils", "pytest", "rich"] [[package]] name = "fastjsonschema" @@ -327,7 +328,7 @@ optional = false python-versions = "*" [package.extras] -devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] [[package]] name = "faunadb" @@ -345,7 +346,7 @@ requests = "*" [package.extras] lint = ["pylint"] -test = ["nose2", "nose2"] +test = ["nose2", "nose2[coverage_plugin]"] [[package]] name = "formy" @@ -481,9 +482,9 @@ python-versions = ">=3.7" zipp = ">=0.5" [package.extras] -docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8", "importlib-resources (>=1.3)"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "importlib-resources" @@ -497,12 +498,12 @@ python-versions = ">=3.7" zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [[package]] name = "ipykernel" -version = "6.21.2" +version = "6.21.3" description = "IPython Kernel for Jupyter" category = "dev" optional = false @@ -524,11 +525,11 @@ tornado = ">=6.1" traitlets = ">=5.4.0" [package.extras] -cov = ["coverage", "curio", "matplotlib", "pytest-cov", "trio"] +cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] pyqt5 = ["pyqt5"] pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest-asyncio", "pytest-cov", "pytest-timeout", "pytest (>=7.0)"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] [[package]] name = "ipython" @@ -553,9 +554,9 @@ stack-data = "*" traitlets = ">=5" [package.extras] -all = ["black", "ipykernel", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "docrepr", "matplotlib", "stack-data", "pytest (<7)", "typing-extensions", "pytest (<7.1)", "pytest-asyncio", "testpath", "nbconvert", "nbformat", "ipywidgets", "notebook", "ipyparallel", "qtconsole", "curio", "matplotlib (!=3.2.0)", "numpy (>=1.21)", "pandas", "trio"] +all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] black = ["black"] -doc = ["ipykernel", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "docrepr", "matplotlib", "stack-data", "pytest (<7)", "typing-extensions", "pytest (<7.1)", "pytest-asyncio", "testpath"] +doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] @@ -563,10 +564,10 @@ notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test_extra = ["pytest (<7.1)", "pytest-asyncio", "testpath", "curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "trio"] +test_extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] [[package]] -name = "ipython-genutils" +name = "ipython_genutils" version = "0.2.0" description = "Vestigial utilities from IPython" category = "dev" @@ -622,12 +623,12 @@ python-versions = ">=3.6" parso = ">=0.8.0,<0.9.0" [package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx-rtd-theme (==0.4.3)", "sphinx (==1.8.5)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] -name = "jinja2" +name = "Jinja2" version = "3.1.2" description = "A very fast and expressive template engine." category = "main" @@ -715,12 +716,12 @@ tornado = ">=6.2" traitlets = ">=5.3" [package.extras] -docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinx (>=4)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] test = ["codecov", "coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] [[package]] name = "jupyter-console" -version = "6.6.2" +version = "6.6.3" description = "Jupyter terminal console" category = "dev" optional = false @@ -775,11 +776,11 @@ traitlets = ">=5.3" [package.extras] cli = ["click", "rich"] docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] -test = ["click", "coverage", "pre-commit", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "pytest-cov", "pytest (>=7.0)", "rich"] +test = ["click", "coverage", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "pytest-cov", "rich"] [[package]] name = "jupyter-server" -version = "2.3.0" +version = "2.4.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." category = "dev" optional = false @@ -806,8 +807,8 @@ traitlets = ">=5.6.0" websocket-client = "*" [package.extras] -docs = ["docutils (<0.20)", "ipykernel", "jinja2", "jupyter-client", "jupyter-server", "mistune (<1.0.0)", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] -test = ["ipykernel", "pre-commit", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "pytest (>=7.0)", "requests"] +docs = ["docutils (<0.20)", "ipykernel", "jinja2", "jupyter-client", "jupyter-server", "mistune (<1.0.0)", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] +test = ["ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"] [[package]] name = "jupyter-server-terminals" @@ -823,7 +824,7 @@ terminado = ">=0.8.3" [package.extras] docs = ["jinja2", "jupyter-server", "mistune (<3.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] -test = ["coverage", "jupyter-server (>=2.0.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout", "pytest (>=7.0)"] +test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] [[package]] name = "jupyterlab-pygments" @@ -842,7 +843,7 @@ optional = false python-versions = ">=3.7" [[package]] -name = "markupsafe" +name = "MarkupSafe" version = "2.1.2" description = "Safely add untrusted strings to HTML/XML markup." category = "main" @@ -870,7 +871,7 @@ python-versions = "*" [[package]] name = "nbclassic" -version = "0.5.2" +version = "0.5.3" description = "Jupyter Notebook as a Jupyter Server extension." category = "dev" optional = false @@ -896,9 +897,9 @@ tornado = ">=6.1" traitlets = ">=4.2.1" [package.extras] -docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"] +docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] json-logging = ["json-logging"] -test = ["pytest", "coverage", "requests", "testpath", "nbval", "pytest-playwright", "pytest-cov", "pytest-jupyter", "pytest-tornasync", "requests-unixsocket"] +test = ["coverage", "nbval", "pytest", "pytest-cov", "pytest-jupyter", "pytest-playwright", "pytest-tornasync", "requests", "requests-unixsocket", "testpath"] [[package]] name = "nbclient" @@ -916,8 +917,8 @@ traitlets = ">=5.3" [package.extras] dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient", "sphinx-book-theme", "sphinx (>=1.7)"] -test = ["ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "pytest (>=7.0)", "testpath", "xmltodict"] +docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme"] +test = ["ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] [[package]] name = "nbconvert" @@ -946,9 +947,9 @@ tinycss2 = "*" traitlets = ">=5.0" [package.extras] -all = ["nbconvert"] +all = ["nbconvert[docs,qtpdf,serve,test,webpdf]"] docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] -qtpdf = ["nbconvert"] +qtpdf = ["nbconvert[qtpng]"] qtpng = ["pyqtwebengine (>=5.15)"] serve = ["tornado (>=6.1)"] test = ["ipykernel", "ipywidgets (>=7)", "pre-commit", "pytest", "pytest-dependency"] @@ -982,7 +983,7 @@ python-versions = ">=3.5" [[package]] name = "notebook" -version = "6.5.2" +version = "6.5.3" description = "A web-based notebook environment for interactive computing" category = "dev" optional = false @@ -1007,9 +1008,9 @@ tornado = ">=6.1" traitlets = ">=4.2.1" [package.extras] -docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"] +docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] json-logging = ["json-logging"] -test = ["pytest", "coverage", "requests", "testpath", "nbval", "selenium (==4.1.5)", "pytest-cov", "requests-unixsocket"] +test = ["coverage", "nbval", "pytest", "pytest-cov", "requests", "requests-unixsocket", "selenium (==4.1.5)", "testpath"] [[package]] name = "notebook-shim" @@ -1037,9 +1038,9 @@ python-versions = ">=3.7.0,<4.0.0" jsonschema = ">=3.0.0,<5.0.0" [package.extras] +isodate = ["isodate"] rfc3339-validator = ["rfc3339-validator"] strict-rfc3339 = ["strict-rfc3339"] -isodate = ["isodate"] [[package]] name = "openapi-spec-validator" @@ -1053,6 +1054,7 @@ python-versions = ">=3.7.0,<4.0.0" jsonschema = ">=3.2.0,<5.0.0" openapi-schema-validator = ">=0.2.0,<0.3.0" PyYAML = ">=5.1" +setuptools = "*" [package.extras] requests = ["requests"] @@ -1122,7 +1124,15 @@ optional = false python-versions = "*" [[package]] -name = "pkgutil-resolve-name" +name = "pip" +version = "23.0.1" +description = "The PyPA recommended tool for installing Python packages." +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "pkgutil_resolve_name" version = "1.3.10" description = "Resolve a name to an object." category = "main" @@ -1131,15 +1141,15 @@ python-versions = ">=3.6" [[package]] name = "platformdirs" -version = "3.0.0" +version = "3.1.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)", "sphinx (>=6.1.3)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest (>=7.2.1)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "ply" @@ -1180,7 +1190,7 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.extras] -test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] [[package]] name = "ptyprocess" @@ -1210,7 +1220,7 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] -name = "pygments" +name = "Pygments" version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" @@ -1221,7 +1231,7 @@ python-versions = ">=3.6" plugins = ["importlib-metadata"] [[package]] -name = "pyjwt" +name = "PyJWT" version = "2.6.0" description = "JSON Web Token implementation in Python" category = "main" @@ -1230,9 +1240,9 @@ python-versions = ">=3.7" [package.extras] crypto = ["cryptography (>=3.4.0)"] -dev = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.4.0)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "pre-commit"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pyrsistent" @@ -1286,7 +1296,7 @@ optional = false python-versions = ">=3.7" [[package]] -name = "pyyaml" +name = "PyYAML" version = "6.0" description = "YAML parser and emitter for Python" category = "main" @@ -1306,7 +1316,7 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "qtconsole" -version = "5.4.0" +version = "5.4.1" description = "Jupyter Qt console" category = "dev" optional = false @@ -1317,6 +1327,7 @@ ipykernel = ">=4.1" ipython-genutils = "*" jupyter-client = ">=4.1" jupyter-core = "*" +packaging = "*" pygments = "*" pyzmq = ">=17.1" qtpy = ">=2.0.1" @@ -1327,7 +1338,7 @@ doc = ["Sphinx (>=1.3)"] test = ["flaky", "pytest", "pytest-qt"] [[package]] -name = "qtpy" +name = "QtPy" version = "2.3.0" description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." category = "dev" @@ -1419,7 +1430,7 @@ PyYAML = ">=3.12" valley = ">=1.5.2" [[package]] -name = "send2trash" +name = "Send2Trash" version = "1.8.0" description = "Send file to trash natively under Mac OS X, Windows and Linux." category = "dev" @@ -1427,10 +1438,23 @@ optional = false python-versions = "*" [package.extras] -nativelib = ["pyobjc-framework-cocoa", "pywin32"] -objc = ["pyobjc-framework-cocoa"] +nativelib = ["pyobjc-framework-Cocoa", "pywin32"] +objc = ["pyobjc-framework-Cocoa"] win32 = ["pywin32"] +[[package]] +name = "setuptools" +version = "67.6.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "six" version = "1.16.0" @@ -1469,7 +1493,7 @@ executing = ">=1.2.0" pure-eval = "*" [package.extras] -tests = ["pytest", "typeguard", "pygments", "littleutils", "cython"] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] [[package]] name = "stripe" @@ -1509,7 +1533,7 @@ tornado = ">=6.1.0" [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["pre-commit", "pytest-timeout", "pytest (>=7.0)"] +test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] [[package]] name = "tinycss2" @@ -1523,8 +1547,8 @@ python-versions = ">=3.7" webencodings = ">=0.4" [package.extras] -doc = ["sphinx", "sphinx-rtd-theme"] -test = ["pytest", "isort", "flake8"] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["flake8", "isort", "pytest"] [[package]] name = "tornado" @@ -1555,19 +1579,19 @@ optional = false python-versions = ">=3.6" [package.extras] -dev = ["mypy", "flake8 (<4.0.0)", "flake8-annotations", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-noqa", "flake8-requirements", "flake8-type-annotations", "flake8-use-fstring", "pep8-naming"] +dev = ["flake8 (<4.0.0)", "flake8-annotations", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-noqa", "flake8-requirements", "flake8-type-annotations", "flake8-use-fstring", "mypy", "pep8-naming"] [[package]] name = "urllib3" -version = "1.26.14" +version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.extras] -brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -1619,7 +1643,7 @@ optional = ["python-socks", "wsaccel"] test = ["websockets"] [[package]] -name = "werkzeug" +name = "Werkzeug" version = "2.1.2" description = "The comprehensive WSGI web application library." category = "main" @@ -1629,6 +1653,17 @@ python-versions = ">=3.7" [package.extras] watchdog = ["watchdog"] +[[package]] +name = "wheel" +version = "0.38.4" +description = "A built-package format for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=3.0.0)"] + [[package]] name = "widgetsnbextension" version = "4.0.5" @@ -1646,140 +1681,985 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "jaraco.functools", "more-itertools", "big-o", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "1eed3385a9e7bfaf61e1c764485f5ebfff46418df4af9b6ee8dd0bb12b429c37" +content-hash = "2da6450ab510552fae213960c22acc6456126a4642acf9fd9bc77062959c14f7" [metadata.files] -anyio = [] -appnope = [] -argon2-cffi = [] -argon2-cffi-bindings = [] -arrow = [] -asttokens = [] -astunparse = [] -attrs = [] -backcall = [] -beautifulsoup4 = [] -bleach = [] -boto3 = [] -botocore = [] -cachetools = [] -certifi = [] -cffi = [] -charset-normalizer = [] -click = [] -colorama = [] -comm = [] -coverage = [] -cryptography = [] -debugpy = [] -decorator = [] -defusedxml = [] -envs = [] -executing = [] -fastjsonschema = [] -faunadb = [] -formy = [] -fqdn = [] -future = [] -graphql-py = [] -h11 = [] -h2 = [] -hpack = [] -httpcore = [] -httpx = [] -hyperframe = [] -idna = [] -importlib-metadata = [] -importlib-resources = [] -ipykernel = [] -ipython = [] -ipython-genutils = [] -ipywidgets = [] -iso8601 = [] -isoduration = [] -jedi = [] -jinja2 = [] -jmespath = [] -jsonpointer = [] -jsonschema = [] -jupyter = [] -jupyter-client = [] -jupyter-console = [] -jupyter-core = [] -jupyter-events = [] -jupyter-server = [] -jupyter-server-terminals = [] -jupyterlab-pygments = [] -jupyterlab-widgets = [] -markupsafe = [] -matplotlib-inline = [] -mistune = [] -nbclassic = [] -nbclient = [] -nbconvert = [] -nbformat = [] -nest-asyncio = [] -notebook = [] -notebook-shim = [] -openapi-schema-validator = [] -openapi-spec-validator = [] -packaging = [] -pandocfilters = [] -parso = [] -pdoc = [] -pexpect = [] -pickleshare = [] -pkgutil-resolve-name = [] -platformdirs = [] -ply = [] -prometheus-client = [] -prompt-toolkit = [] -psutil = [] -ptyprocess = [] -pure-eval = [] -pycparser = [] -pygments = [] -pyjwt = [] -pyrsistent = [] -python-dateutil = [] -python-json-logger = [] -pytz = [] -pywin32 = [] -pywinpty = [] -pyyaml = [] -pyzmq = [] -qtconsole = [] -qtpy = [] -requests = [] -rfc3339-validator = [] -rfc3986 = [] -rfc3986-validator = [] -s3transfer = [] -sammy = [] -send2trash = [] -six = [] -sniffio = [] -soupsieve = [] -stack-data = [] -stripe = [] -swaggyp = [] -terminado = [] -tinycss2 = [] -tornado = [] -traitlets = [] -uri-template = [] -urllib3 = [] -valley = [] -wcwidth = [] -webcolors = [] -webencodings = [] -websocket-client = [] -werkzeug = [] -widgetsnbextension = [] -zipp = [] +anyio = [ + {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, + {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, +] +appnope = [ + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, +] +argon2-cffi = [ + {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, + {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, +] +argon2-cffi-bindings = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] +arrow = [ + {file = "arrow-1.2.3-py3-none-any.whl", hash = "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2"}, + {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"}, +] +asttokens = [ + {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, + {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, +] +astunparse = [ + {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, + {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, +] +attrs = [ + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, +] +backcall = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] +beautifulsoup4 = [ + {file = "beautifulsoup4-4.11.2-py3-none-any.whl", hash = "sha256:0e79446b10b3ecb499c1556f7e228a53e64a2bfcebd455f370d8927cb5b59e39"}, + {file = "beautifulsoup4-4.11.2.tar.gz", hash = "sha256:bc4bdda6717de5a2987436fb8d72f45dc90dd856bdfd512a1314ce90349a0106"}, +] +bleach = [ + {file = "bleach-4.1.0-py2.py3-none-any.whl", hash = "sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994"}, + {file = "bleach-4.1.0.tar.gz", hash = "sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da"}, +] +boto3 = [ + {file = "boto3-1.26.89-py3-none-any.whl", hash = "sha256:09929b24aaec4951e435d53d31f800e2ca52244af049dc11e5385ce062e106e9"}, + {file = "boto3-1.26.89.tar.gz", hash = "sha256:e819812f16fab46fadf9b2853a46aaa126e108e7f038502dde555ebbbfc80133"}, +] +botocore = [ + {file = "botocore-1.29.89-py3-none-any.whl", hash = "sha256:b757e59feca82ac62934f658918133116b4535cf66f1d72ff4935fa24e522527"}, + {file = "botocore-1.29.89.tar.gz", hash = "sha256:ac8da651f73a9d5759cf5d80beba68deda407e56aaaeb10d249fd557459f3b56"}, +] +cachetools = [ + {file = "cachetools-4.2.4-py3-none-any.whl", hash = "sha256:92971d3cb7d2a97efff7c7bb1657f21a8f5fb309a37530537c71b1774189f2d1"}, + {file = "cachetools-4.2.4.tar.gz", hash = "sha256:89ea6f1b638d5a73a4f9226be57ac5e4f399d22770b92355f92dcb0f7f001693"}, +] +certifi = [ + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, +] +cffi = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] +charset-normalizer = [ + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, +] +click = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] +colorama = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] +comm = [ + {file = "comm-0.1.2-py3-none-any.whl", hash = "sha256:9f3abf3515112fa7c55a42a6a5ab358735c9dccc8b5910a9d8e3ef5998130666"}, + {file = "comm-0.1.2.tar.gz", hash = "sha256:3e2f5826578e683999b93716285b3b1f344f157bf75fa9ce0a797564e742f062"}, +] +coverage = [ + {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, + {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, + {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, + {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, + {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, + {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, + {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, + {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, + {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, + {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, + {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, + {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, + {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, + {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, + {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, + {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, + {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, + {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, + {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, + {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, + {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, + {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, + {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, + {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, +] +cryptography = [ + {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, + {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"}, + {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c4129fc3fdc0fa8e40861b5ac0c673315b3c902bbdc05fc176764815b43dd1d"}, + {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:695104a9223a7239d155d7627ad912953b540929ef97ae0c34c7b8bf30857e89"}, + {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"}, + {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"}, + {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, +] +debugpy = [ + {file = "debugpy-1.6.6-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:0ea1011e94416e90fb3598cc3ef5e08b0a4dd6ce6b9b33ccd436c1dffc8cd664"}, + {file = "debugpy-1.6.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dff595686178b0e75580c24d316aa45a8f4d56e2418063865c114eef651a982e"}, + {file = "debugpy-1.6.6-cp310-cp310-win32.whl", hash = "sha256:87755e173fcf2ec45f584bb9d61aa7686bb665d861b81faa366d59808bbd3494"}, + {file = "debugpy-1.6.6-cp310-cp310-win_amd64.whl", hash = "sha256:72687b62a54d9d9e3fb85e7a37ea67f0e803aaa31be700e61d2f3742a5683917"}, + {file = "debugpy-1.6.6-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:78739f77c58048ec006e2b3eb2e0cd5a06d5f48c915e2fc7911a337354508110"}, + {file = "debugpy-1.6.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23c29e40e39ad7d869d408ded414f6d46d82f8a93b5857ac3ac1e915893139ca"}, + {file = "debugpy-1.6.6-cp37-cp37m-win32.whl", hash = "sha256:7aa7e103610e5867d19a7d069e02e72eb2b3045b124d051cfd1538f1d8832d1b"}, + {file = "debugpy-1.6.6-cp37-cp37m-win_amd64.whl", hash = "sha256:f6383c29e796203a0bba74a250615ad262c4279d398e89d895a69d3069498305"}, + {file = "debugpy-1.6.6-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:23363e6d2a04d726bbc1400bd4e9898d54419b36b2cdf7020e3e215e1dcd0f8e"}, + {file = "debugpy-1.6.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b5d1b13d7c7bf5d7cf700e33c0b8ddb7baf030fcf502f76fc061ddd9405d16c"}, + {file = "debugpy-1.6.6-cp38-cp38-win32.whl", hash = "sha256:70ab53918fd907a3ade01909b3ed783287ede362c80c75f41e79596d5ccacd32"}, + {file = "debugpy-1.6.6-cp38-cp38-win_amd64.whl", hash = "sha256:c05349890804d846eca32ce0623ab66c06f8800db881af7a876dc073ac1c2225"}, + {file = "debugpy-1.6.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a771739902b1ae22a120dbbb6bd91b2cae6696c0e318b5007c5348519a4211c6"}, + {file = "debugpy-1.6.6-cp39-cp39-win32.whl", hash = "sha256:549ae0cb2d34fc09d1675f9b01942499751d174381b6082279cf19cdb3c47cbe"}, + {file = "debugpy-1.6.6-cp39-cp39-win_amd64.whl", hash = "sha256:de4a045fbf388e120bb6ec66501458d3134f4729faed26ff95de52a754abddb1"}, + {file = "debugpy-1.6.6-py2.py3-none-any.whl", hash = "sha256:be596b44448aac14eb3614248c91586e2bc1728e020e82ef3197189aae556115"}, + {file = "debugpy-1.6.6.zip", hash = "sha256:b9c2130e1c632540fbf9c2c88341493797ddf58016e7cba02e311de9b0a96b67"}, +] +decorator = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] +defusedxml = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] +envs = [ + {file = "envs-1.4-py3-none-any.whl", hash = "sha256:4a1fcf85e4d4443e77c348ff7cdd3bfc4c0178b181d447057de342e4172e5ed1"}, + {file = "envs-1.4.tar.gz", hash = "sha256:9d8435c6985d1cdd68299e04c58e2bdb8ae6cf66b2596a8079e6f9a93f2a0398"}, +] +executing = [ + {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, + {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, +] +fastjsonschema = [ + {file = "fastjsonschema-2.16.3-py3-none-any.whl", hash = "sha256:04fbecc94300436f628517b05741b7ea009506ce8f946d40996567c669318490"}, + {file = "fastjsonschema-2.16.3.tar.gz", hash = "sha256:4a30d6315a68c253cfa8f963b9697246315aa3db89f98b97235e345dedfb0b8e"}, +] +faunadb = [ + {file = "faunadb-4.5.0-py2.py3-none-any.whl", hash = "sha256:5845911a3c16bc405145e16a247b1bcf67b4113822962cbfc40e1d1c6b5ac745"}, +] +formy = [ + {file = "formy-1.3.1-py3-none-any.whl", hash = "sha256:07c2a7ee351039694fe5b958ad4dfec34baeb0ffbddbf4af231609a75994e6f6"}, + {file = "formy-1.3.1.tar.gz", hash = "sha256:4ce7f79185c88f2fd896984a17e3d5cd23360db5408c7e726f64609371c0035d"}, +] +fqdn = [ + {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, + {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, +] +future = [ + {file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"}, +] +graphql-py = [ + {file = "graphql-py-0.8.1.tar.gz", hash = "sha256:a03557d67817a0f23c5bef83ce8791c8fa3f5f9d165e1408abc6393def1d720c"}, +] +h11 = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] +h2 = [ + {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, + {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, +] +hpack = [ + {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, + {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, +] +httpcore = [ + {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, + {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, +] +httpx = [ + {file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"}, + {file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"}, +] +hyperframe = [ + {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, + {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, +] +idna = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] +importlib-metadata = [ + {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"}, + {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"}, +] +importlib-resources = [ + {file = "importlib_resources-5.12.0-py3-none-any.whl", hash = "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"}, + {file = "importlib_resources-5.12.0.tar.gz", hash = "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6"}, +] +ipykernel = [ + {file = "ipykernel-6.21.3-py3-none-any.whl", hash = "sha256:24ebd9715e317c185e37156ab3a87382410185230dde7aeffce389d6c7d4428a"}, + {file = "ipykernel-6.21.3.tar.gz", hash = "sha256:c8ff581905d70e7299bc1473a2f7c113bec1744fb3746d58e5b4b93bd8ee7001"}, +] +ipython = [ + {file = "ipython-8.11.0-py3-none-any.whl", hash = "sha256:5b54478e459155a326bf5f42ee4f29df76258c0279c36f21d71ddb560f88b156"}, + {file = "ipython-8.11.0.tar.gz", hash = "sha256:735cede4099dbc903ee540307b9171fbfef4aa75cfcacc5a273b2cda2f02be04"}, +] +ipython_genutils = [ + {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, + {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, +] +ipywidgets = [ + {file = "ipywidgets-8.0.4-py3-none-any.whl", hash = "sha256:ebb195e743b16c3947fe8827190fb87b4d00979c0fbf685afe4d2c4927059fa1"}, + {file = "ipywidgets-8.0.4.tar.gz", hash = "sha256:c0005a77a47d77889cafed892b58e33b4a2a96712154404c6548ec22272811ea"}, +] +iso8601 = [ + {file = "iso8601-1.1.0-py3-none-any.whl", hash = "sha256:8400e90141bf792bce2634df533dc57e3bee19ea120a87bebcd3da89a58ad73f"}, + {file = "iso8601-1.1.0.tar.gz", hash = "sha256:32811e7b81deee2063ea6d2e94f8819a86d1f3811e49d23623a41fa832bef03f"}, +] +isoduration = [ + {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, + {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, +] +jedi = [ + {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"}, + {file = "jedi-0.18.2.tar.gz", hash = "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612"}, +] +Jinja2 = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] +jmespath = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] +jsonpointer = [ + {file = "jsonpointer-2.3-py2.py3-none-any.whl", hash = "sha256:51801e558539b4e9cd268638c078c6c5746c9ac96bc38152d443400e4f3793e9"}, + {file = "jsonpointer-2.3.tar.gz", hash = "sha256:97cba51526c829282218feb99dab1b1e6bdf8efd1c43dc9d57be093c0d69c99a"}, +] +jsonschema = [ + {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, + {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, +] +jupyter = [ + {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, + {file = "jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"}, + {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, +] +jupyter-client = [ + {file = "jupyter_client-8.0.3-py3-none-any.whl", hash = "sha256:be48ac6bd659cbbddb7a674cf06b3b8afbf53f228253cf58bde604c03bd487b0"}, + {file = "jupyter_client-8.0.3.tar.gz", hash = "sha256:ed65498bea6d876ef9d8da3e0db3dd33c5d129f5b2645f56ae03993782966bd0"}, +] +jupyter-console = [ + {file = "jupyter_console-6.6.3-py3-none-any.whl", hash = "sha256:309d33409fcc92ffdad25f0bcdf9a4a9daa61b6f341177570fdac03de5352485"}, + {file = "jupyter_console-6.6.3.tar.gz", hash = "sha256:566a4bf31c87adbfadf22cdf846e3069b59a71ed5da71d6ba4d8aaad14a53539"}, +] +jupyter-core = [ + {file = "jupyter_core-5.2.0-py3-none-any.whl", hash = "sha256:4bdc2928c37f6917130c667d8b8708f20aee539d8283c6be72aabd2a4b4c83b0"}, + {file = "jupyter_core-5.2.0.tar.gz", hash = "sha256:1407cdb4c79ee467696c04b76633fc1884015fa109323365a6372c8e890cc83f"}, +] +jupyter-events = [ + {file = "jupyter_events-0.6.3-py3-none-any.whl", hash = "sha256:57a2749f87ba387cd1bfd9b22a0875b889237dbf2edc2121ebb22bde47036c17"}, + {file = "jupyter_events-0.6.3.tar.gz", hash = "sha256:9a6e9995f75d1b7146b436ea24d696ce3a35bfa8bfe45e0c33c334c79464d0b3"}, +] +jupyter-server = [ + {file = "jupyter_server-2.4.0-py3-none-any.whl", hash = "sha256:cc22792281bfb0131a728414f28ae74883b44ad6d009971aa975cae9bcc650de"}, + {file = "jupyter_server-2.4.0.tar.gz", hash = "sha256:f31f0ba2c3c44f07143bfa03fb07dd0253f857eb63f0c26f2fea955f04a49765"}, +] +jupyter-server-terminals = [ + {file = "jupyter_server_terminals-0.4.4-py3-none-any.whl", hash = "sha256:75779164661cec02a8758a5311e18bb8eb70c4e86c6b699403100f1585a12a36"}, + {file = "jupyter_server_terminals-0.4.4.tar.gz", hash = "sha256:57ab779797c25a7ba68e97bcfb5d7740f2b5e8a83b5e8102b10438041a7eac5d"}, +] +jupyterlab-pygments = [ + {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, + {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, +] +jupyterlab-widgets = [ + {file = "jupyterlab_widgets-3.0.5-py3-none-any.whl", hash = "sha256:a04a42e50231b355b7087e16a818f541e53589f7647144ea0344c4bf16f300e5"}, + {file = "jupyterlab_widgets-3.0.5.tar.gz", hash = "sha256:eeaecdeaf6c03afc960ddae201ced88d5979b4ca9c3891bcb8f6631af705f5ef"}, +] +MarkupSafe = [ + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, + {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, +] +matplotlib-inline = [ + {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, + {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, +] +mistune = [ + {file = "mistune-2.0.5-py2.py3-none-any.whl", hash = "sha256:bad7f5d431886fcbaf5f758118ecff70d31f75231b34024a1341120340a65ce8"}, + {file = "mistune-2.0.5.tar.gz", hash = "sha256:0246113cb2492db875c6be56974a7c893333bf26cd92891c85f63151cee09d34"}, +] +nbclassic = [ + {file = "nbclassic-0.5.3-py3-none-any.whl", hash = "sha256:e849277872d9ffd8fe4b39a8038d01ba82d6a1def9ce11b1b3c26c9546ed5131"}, + {file = "nbclassic-0.5.3.tar.gz", hash = "sha256:889772a7ba524eb781d2901f396540bcad41151e1f7e043f12ebc14a6540d342"}, +] +nbclient = [ + {file = "nbclient-0.7.2-py3-none-any.whl", hash = "sha256:d97ac6257de2794f5397609df754fcbca1a603e94e924eb9b99787c031ae2e7c"}, + {file = "nbclient-0.7.2.tar.gz", hash = "sha256:884a3f4a8c4fc24bb9302f263e0af47d97f0d01fe11ba714171b320c8ac09547"}, +] +nbconvert = [ + {file = "nbconvert-7.2.9-py3-none-any.whl", hash = "sha256:495638c5e06005f4a5ce828d8a81d28e34f95c20f4384d5d7a22254b443836e7"}, + {file = "nbconvert-7.2.9.tar.gz", hash = "sha256:a42c3ac137c64f70cbe4d763111bf358641ea53b37a01a5c202ed86374af5234"}, +] +nbformat = [ + {file = "nbformat-5.7.3-py3-none-any.whl", hash = "sha256:22a98a6516ca216002b0a34591af5bcb8072ca6c63910baffc901cfa07fefbf0"}, + {file = "nbformat-5.7.3.tar.gz", hash = "sha256:4b021fca24d3a747bf4e626694033d792d594705829e5e35b14ee3369f9f6477"}, +] +nest-asyncio = [ + {file = "nest_asyncio-1.5.6-py3-none-any.whl", hash = "sha256:b9a953fb40dceaa587d109609098db21900182b16440652454a146cffb06e8b8"}, + {file = "nest_asyncio-1.5.6.tar.gz", hash = "sha256:d267cc1ff794403f7df692964d1d2a3fa9418ffea2a3f6859a439ff482fef290"}, +] +notebook = [ + {file = "notebook-6.5.3-py3-none-any.whl", hash = "sha256:50a334ad9d60b30cb759405168ef6fc3d60350ab5439fb1631544bb09dcb2cce"}, + {file = "notebook-6.5.3.tar.gz", hash = "sha256:b12bee3292211d85dd7e588a790ddce30cb3e8fbcfa1e803522a207f60819e05"}, +] +notebook-shim = [ + {file = "notebook_shim-0.2.2-py3-none-any.whl", hash = "sha256:9c6c30f74c4fbea6fce55c1be58e7fd0409b1c681b075dcedceb005db5026949"}, + {file = "notebook_shim-0.2.2.tar.gz", hash = "sha256:090e0baf9a5582ff59b607af523ca2db68ff216da0c69956b62cab2ef4fc9c3f"}, +] +openapi-schema-validator = [ + {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, + {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, +] +openapi-spec-validator = [ + {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, + {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, +] +packaging = [ + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, +] +pandocfilters = [ + {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, + {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, +] +parso = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] +pdoc = [ + {file = "pdoc-7.4.0-py3-none-any.whl", hash = "sha256:681a2f243e4ca51bedd0645c2d18275b8b83444e9b6e42b502882ec45369e679"}, +] +pexpect = [ + {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, + {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, +] +pickleshare = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] +pip = [ + {file = "pip-23.0.1-py3-none-any.whl", hash = "sha256:236bcb61156d76c4b8a05821b988c7b8c35bf0da28a4b614e8d6ab5212c25c6f"}, + {file = "pip-23.0.1.tar.gz", hash = "sha256:cd015ea1bfb0fcef59d8a286c1f8bebcb983f6317719d415dc5351efb7cd7024"}, +] +pkgutil_resolve_name = [ + {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, + {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, +] +platformdirs = [ + {file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"}, + {file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"}, +] +ply = [ + {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"}, + {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, +] +prometheus-client = [ + {file = "prometheus_client-0.16.0-py3-none-any.whl", hash = "sha256:0836af6eb2c8f4fed712b2f279f6c0a8bbab29f9f4aa15276b91c7cb0d1616ab"}, + {file = "prometheus_client-0.16.0.tar.gz", hash = "sha256:a03e35b359f14dd1630898543e2120addfdeacd1a6069c1367ae90fd93ad3f48"}, +] +prompt-toolkit = [ + {file = "prompt_toolkit-3.0.38-py3-none-any.whl", hash = "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"}, + {file = "prompt_toolkit-3.0.38.tar.gz", hash = "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b"}, +] +psutil = [ + {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, + {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, + {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, + {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"}, + {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"}, + {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"}, + {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"}, + {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"}, + {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"}, + {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"}, + {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"}, + {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"}, + {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, + {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, +] +ptyprocess = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] +pure-eval = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] +pycparser = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] +Pygments = [ + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, +] +PyJWT = [ + {file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"}, + {file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"}, +] +pyrsistent = [ + {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, + {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, + {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, + {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, + {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, + {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] +python-json-logger = [ + {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, + {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, +] +pytz = [ + {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, + {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, +] +pywin32 = [ + {file = "pywin32-305-cp310-cp310-win32.whl", hash = "sha256:421f6cd86e84bbb696d54563c48014b12a23ef95a14e0bdba526be756d89f116"}, + {file = "pywin32-305-cp310-cp310-win_amd64.whl", hash = "sha256:73e819c6bed89f44ff1d690498c0a811948f73777e5f97c494c152b850fad478"}, + {file = "pywin32-305-cp310-cp310-win_arm64.whl", hash = "sha256:742eb905ce2187133a29365b428e6c3b9001d79accdc30aa8969afba1d8470f4"}, + {file = "pywin32-305-cp311-cp311-win32.whl", hash = "sha256:19ca459cd2e66c0e2cc9a09d589f71d827f26d47fe4a9d09175f6aa0256b51c2"}, + {file = "pywin32-305-cp311-cp311-win_amd64.whl", hash = "sha256:326f42ab4cfff56e77e3e595aeaf6c216712bbdd91e464d167c6434b28d65990"}, + {file = "pywin32-305-cp311-cp311-win_arm64.whl", hash = "sha256:4ecd404b2c6eceaca52f8b2e3e91b2187850a1ad3f8b746d0796a98b4cea04db"}, + {file = "pywin32-305-cp36-cp36m-win32.whl", hash = "sha256:48d8b1659284f3c17b68587af047d110d8c44837736b8932c034091683e05863"}, + {file = "pywin32-305-cp36-cp36m-win_amd64.whl", hash = "sha256:13362cc5aa93c2beaf489c9c9017c793722aeb56d3e5166dadd5ef82da021fe1"}, + {file = "pywin32-305-cp37-cp37m-win32.whl", hash = "sha256:a55db448124d1c1484df22fa8bbcbc45c64da5e6eae74ab095b9ea62e6d00496"}, + {file = "pywin32-305-cp37-cp37m-win_amd64.whl", hash = "sha256:109f98980bfb27e78f4df8a51a8198e10b0f347257d1e265bb1a32993d0c973d"}, + {file = "pywin32-305-cp38-cp38-win32.whl", hash = "sha256:9dd98384da775afa009bc04863426cb30596fd78c6f8e4e2e5bbf4edf8029504"}, + {file = "pywin32-305-cp38-cp38-win_amd64.whl", hash = "sha256:56d7a9c6e1a6835f521788f53b5af7912090674bb84ef5611663ee1595860fc7"}, + {file = "pywin32-305-cp39-cp39-win32.whl", hash = "sha256:9d968c677ac4d5cbdaa62fd3014ab241718e619d8e36ef8e11fb930515a1e918"}, + {file = "pywin32-305-cp39-cp39-win_amd64.whl", hash = "sha256:50768c6b7c3f0b38b7fb14dd4104da93ebced5f1a50dc0e834594bff6fbe1271"}, +] +pywinpty = [ + {file = "pywinpty-2.0.10-cp310-none-win_amd64.whl", hash = "sha256:4c7d06ad10f6e92bc850a467f26d98f4f30e73d2fe5926536308c6ae0566bc16"}, + {file = "pywinpty-2.0.10-cp311-none-win_amd64.whl", hash = "sha256:7ffbd66310b83e42028fc9df7746118978d94fba8c1ebf15a7c1275fdd80b28a"}, + {file = "pywinpty-2.0.10-cp37-none-win_amd64.whl", hash = "sha256:38cb924f2778b5751ef91a75febd114776b3af0ae411bc667be45dd84fc881d3"}, + {file = "pywinpty-2.0.10-cp38-none-win_amd64.whl", hash = "sha256:902d79444b29ad1833b8d5c3c9aabdfd428f4f068504430df18074007c8c0de8"}, + {file = "pywinpty-2.0.10-cp39-none-win_amd64.whl", hash = "sha256:3c46aef80dd50979aff93de199e4a00a8ee033ba7a03cadf0a91fed45f0c39d7"}, + {file = "pywinpty-2.0.10.tar.gz", hash = "sha256:cdbb5694cf8c7242c2ecfaca35c545d31fa5d5814c3d67a4e628f803f680ebea"}, +] +PyYAML = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] +pyzmq = [ + {file = "pyzmq-25.0.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:2d05d904f03ddf1e0d83d97341354dfe52244a619b5a1440a5f47a5b3451e84e"}, + {file = "pyzmq-25.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a154ef810d44f9d28868be04641f837374a64e7449df98d9208e76c260c7ef1"}, + {file = "pyzmq-25.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:487305c2a011fdcf3db1f24e8814bb76d23bc4d2f46e145bc80316a59a9aa07d"}, + {file = "pyzmq-25.0.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e7b87638ee30ab13230e37ce5331b3e730b1e0dda30120b9eeec3540ed292c8"}, + {file = "pyzmq-25.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75243e422e85a62f0ab7953dc315452a56b2c6a7e7d1a3c3109ac3cc57ed6b47"}, + {file = "pyzmq-25.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:31e523d067ce44a04e876bed3ff9ea1ff8d1b6636d16e5fcace9d22f8c564369"}, + {file = "pyzmq-25.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8539216173135e9e89f6b1cc392e74e6b935b91e8c76106cf50e7a02ab02efe5"}, + {file = "pyzmq-25.0.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2754fa68da08a854f4816e05160137fa938a2347276471103d31e04bcee5365c"}, + {file = "pyzmq-25.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4a1bc30f0c18444d51e9b0d0dd39e3a4e7c53ee74190bebef238cd58de577ea9"}, + {file = "pyzmq-25.0.0-cp310-cp310-win32.whl", hash = "sha256:01d53958c787cfea34091fcb8ef36003dbb7913b8e9f8f62a0715234ebc98b70"}, + {file = "pyzmq-25.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:58fc3ad5e1cfd2e6d24741fbb1e216b388115d31b0ca6670f894187f280b6ba6"}, + {file = "pyzmq-25.0.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:e4bba04ea779a3d7ef25a821bb63fd0939142c88e7813e5bd9c6265a20c523a2"}, + {file = "pyzmq-25.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:af1fbfb7ad6ac0009ccee33c90a1d303431c7fb594335eb97760988727a37577"}, + {file = "pyzmq-25.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85456f0d8f3268eecd63dede3b99d5bd8d3b306310c37d4c15141111d22baeaf"}, + {file = "pyzmq-25.0.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0645b5a2d2a06fd8eb738018490c514907f7488bf9359c6ee9d92f62e844b76f"}, + {file = "pyzmq-25.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f72ea279b2941a5203e935a4588b9ba8a48aeb9a926d9dfa1986278bd362cb8"}, + {file = "pyzmq-25.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:4e295f7928a31ae0f657e848c5045ba6d693fe8921205f408ca3804b1b236968"}, + {file = "pyzmq-25.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ac97e7d647d5519bcef48dd8d3d331f72975afa5c4496c95f6e854686f45e2d9"}, + {file = "pyzmq-25.0.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:656281d496aaf9ca4fd4cea84e6d893e3361057c4707bd38618f7e811759103c"}, + {file = "pyzmq-25.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f6116991568aac48b94d6d8aaed6157d407942ea385335a6ed313692777fb9d"}, + {file = "pyzmq-25.0.0-cp311-cp311-win32.whl", hash = "sha256:0282bba9aee6e0346aa27d6c69b5f7df72b5a964c91958fc9e0c62dcae5fdcdc"}, + {file = "pyzmq-25.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:526f884a27e8bba62fe1f4e07c62be2cfe492b6d432a8fdc4210397f8cf15331"}, + {file = "pyzmq-25.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ccb3e1a863222afdbda42b7ca8ac8569959593d7abd44f5a709177d6fa27d266"}, + {file = "pyzmq-25.0.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4046d03100aca266e70d54a35694cb35d6654cfbef633e848b3c4a8d64b9d187"}, + {file = "pyzmq-25.0.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3100dddcada66ec5940ed6391ebf9d003cc3ede3d320748b2737553019f58230"}, + {file = "pyzmq-25.0.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7877264aa851c19404b1bb9dbe6eed21ea0c13698be1eda3784aab3036d1c861"}, + {file = "pyzmq-25.0.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5049e75cc99db65754a3da5f079230fb8889230cf09462ec972d884d1704a3ed"}, + {file = "pyzmq-25.0.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:81f99fb1224d36eb91557afec8cdc2264e856f3464500b55749020ce4c848ef2"}, + {file = "pyzmq-25.0.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a1cd4a95f176cdc0ee0a82d49d5830f13ae6015d89decbf834c273bc33eeb3d3"}, + {file = "pyzmq-25.0.0-cp36-cp36m-win32.whl", hash = "sha256:926236ca003aec70574754f39703528947211a406f5c6c8b3e50eca04a9e87fc"}, + {file = "pyzmq-25.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:94f0a7289d0f5c80807c37ebb404205e7deb737e8763eb176f4770839ee2a287"}, + {file = "pyzmq-25.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f3f96d452e9580cb961ece2e5a788e64abaecb1232a80e61deffb28e105ff84a"}, + {file = "pyzmq-25.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:930e6ad4f2eaac31a3d0c2130619d25db754b267487ebc186c6ad18af2a74018"}, + {file = "pyzmq-25.0.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e1081d7030a1229c8ff90120346fb7599b54f552e98fcea5170544e7c6725aab"}, + {file = "pyzmq-25.0.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:531866c491aee5a1e967c286cfa470dffac1e2a203b1afda52d62b58782651e9"}, + {file = "pyzmq-25.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fc7c1421c5b1c916acf3128bf3cc7ea7f5018b58c69a6866d70c14190e600ce9"}, + {file = "pyzmq-25.0.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9a2d5e419bd39a1edb6cdd326d831f0120ddb9b1ff397e7d73541bf393294973"}, + {file = "pyzmq-25.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:183e18742be3621acf8908903f689ec520aee3f08449bfd29f583010ca33022b"}, + {file = "pyzmq-25.0.0-cp37-cp37m-win32.whl", hash = "sha256:02f5cb60a7da1edd5591a15efa654ffe2303297a41e1b40c3c8942f8f11fc17c"}, + {file = "pyzmq-25.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:cac602e02341eaaf4edfd3e29bd3fdef672e61d4e6dfe5c1d065172aee00acee"}, + {file = "pyzmq-25.0.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:e14df47c1265356715d3d66e90282a645ebc077b70b3806cf47efcb7d1d630cb"}, + {file = "pyzmq-25.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:293a7c2128690f496057f1f1eb6074f8746058d13588389981089ec45d8fdc77"}, + {file = "pyzmq-25.0.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:731b208bc9412deeb553c9519dca47136b5a01ca66667cafd8733211941b17e4"}, + {file = "pyzmq-25.0.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b055a1cddf8035966ad13aa51edae5dc8f1bba0b5d5e06f7a843d8b83dc9b66b"}, + {file = "pyzmq-25.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17e1cb97d573ea84d7cd97188b42ca6f611ab3ee600f6a75041294ede58e3d20"}, + {file = "pyzmq-25.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:60ecbfe7669d3808ffa8a7dd1487d6eb8a4015b07235e3b723d4b2a2d4de7203"}, + {file = "pyzmq-25.0.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4c25c95416133942280faaf068d0fddfd642b927fb28aaf4ab201a738e597c1e"}, + {file = "pyzmq-25.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:be05504af0619d1cffa500af1e0ede69fb683f301003851f5993b5247cc2c576"}, + {file = "pyzmq-25.0.0-cp38-cp38-win32.whl", hash = "sha256:6bf3842af37af43fa953e96074ebbb5315f6a297198f805d019d788a1021dbc8"}, + {file = "pyzmq-25.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:b90bb8dfbbd138558f1f284fecfe328f7653616ff9a972433a00711d9475d1a9"}, + {file = "pyzmq-25.0.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:62b9e80890c0d2408eb42d5d7e1fc62a5ce71be3288684788f74cf3e59ffd6e2"}, + {file = "pyzmq-25.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:484c2c4ee02c1edc07039f42130bd16e804b1fe81c4f428e0042e03967f40c20"}, + {file = "pyzmq-25.0.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9ca6db34b26c4d3e9b0728841ec9aa39484eee272caa97972ec8c8e231b20c7e"}, + {file = "pyzmq-25.0.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:610d2d112acd4e5501fac31010064a6c6efd716ceb968e443cae0059eb7b86de"}, + {file = "pyzmq-25.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3594c0ff604e685d7e907860b61d0e10e46c74a9ffca168f6e9e50ea934ee440"}, + {file = "pyzmq-25.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c21a5f4e54a807df5afdef52b6d24ec1580153a6bcf0607f70a6e1d9fa74c5c3"}, + {file = "pyzmq-25.0.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4725412e27612f0d7d7c2f794d89807ad0227c2fc01dd6146b39ada49c748ef9"}, + {file = "pyzmq-25.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4d3d604fe0a67afd1aff906e54da557a5203368a99dcc50a70eef374f1d2abef"}, + {file = "pyzmq-25.0.0-cp39-cp39-win32.whl", hash = "sha256:3670e8c5644768f214a3b598fe46378a4a6f096d5fb82a67dfd3440028460565"}, + {file = "pyzmq-25.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:e99629a976809fe102ef73e856cf4b2660acd82a412a51e80ba2215e523dfd0a"}, + {file = "pyzmq-25.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:66509c48f7446b640eeae24b60c9c1461799a27b1b0754e438582e36b5af3315"}, + {file = "pyzmq-25.0.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9c464cc508177c09a5a6122b67f978f20e2954a21362bf095a0da4647e3e908"}, + {file = "pyzmq-25.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:28bcb2e66224a7ac2843eb632e4109d6b161479e7a2baf24e37210461485b4f1"}, + {file = "pyzmq-25.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0e7ef9ac807db50b4eb6f534c5dcc22f998f5dae920cc28873d2c1d080a4fc9"}, + {file = "pyzmq-25.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:5050f5c50b58a6e38ccaf9263a356f74ef1040f5ca4030225d1cb1a858c5b7b6"}, + {file = "pyzmq-25.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2a73af6504e0d2805e926abf136ebf536735a13c22f709be7113c2ec65b4bec3"}, + {file = "pyzmq-25.0.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0e8d00228db627ddd1b418c7afd81820b38575f237128c9650365f2dd6ac3443"}, + {file = "pyzmq-25.0.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5605621f2181f20b71f13f698944deb26a0a71af4aaf435b34dd90146092d530"}, + {file = "pyzmq-25.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6136bfb0e5a9cf8c60c6ac763eb21f82940a77e6758ea53516c8c7074f4ff948"}, + {file = "pyzmq-25.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0a90b2480a26aef7c13cff18703ba8d68e181facb40f78873df79e6d42c1facc"}, + {file = "pyzmq-25.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00c94fd4c9dd3c95aace0c629a7fa713627a5c80c1819326b642adf6c4b8e2a2"}, + {file = "pyzmq-25.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20638121b0bdc80777ce0ec8c1f14f1ffec0697a1f88f0b564fa4a23078791c4"}, + {file = "pyzmq-25.0.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6f75b4b8574f3a8a0d6b4b52606fc75b82cb4391471be48ab0b8677c82f9ed4"}, + {file = "pyzmq-25.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cbb885f347eba7ab7681c450dee5b14aed9f153eec224ec0c3f299273d9241f"}, + {file = "pyzmq-25.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c48f257da280b3be6c94e05bd575eddb1373419dbb1a72c3ce64e88f29d1cd6d"}, + {file = "pyzmq-25.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:866eabf7c1315ef2e93e34230db7cbf672e0d7c626b37c11f7e870c8612c3dcc"}, + {file = "pyzmq-25.0.0.tar.gz", hash = "sha256:f330a1a2c7f89fd4b0aa4dcb7bf50243bf1c8da9a2f1efc31daf57a2046b31f2"}, +] +qtconsole = [ + {file = "qtconsole-5.4.1-py3-none-any.whl", hash = "sha256:bae8c7e10170cdcdcaf7e6d53ad7d6a7412249b9b8310a0eaa6b6f3b260f32db"}, + {file = "qtconsole-5.4.1.tar.gz", hash = "sha256:f67a03f40f722e13261791280f73068dbaf9dafcc335cbba644ccc8f892640e5"}, +] +QtPy = [ + {file = "QtPy-2.3.0-py3-none-any.whl", hash = "sha256:8d6d544fc20facd27360ea189592e6135c614785f0dec0b4f083289de6beb408"}, + {file = "QtPy-2.3.0.tar.gz", hash = "sha256:0603c9c83ccc035a4717a12908bf6bc6cb22509827ea2ec0e94c2da7c9ed57c5"}, +] +requests = [ + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, +] +rfc3339-validator = [ + {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, + {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, +] +rfc3986 = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] +rfc3986-validator = [ + {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, + {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, +] +s3transfer = [ + {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, + {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, +] +sammy = [ + {file = "sammy-0.4.3-py2.py3-none-any.whl", hash = "sha256:f7fc12fae537ba22830a9de761686fac439e23c67339807f2f3d026d019bae9e"}, + {file = "sammy-0.4.3.tar.gz", hash = "sha256:dff98017d59c8860349cb07bd8af2eb843ccfc7a78d11ed82599b306ef4ce7ca"}, +] +Send2Trash = [ + {file = "Send2Trash-1.8.0-py3-none-any.whl", hash = "sha256:f20eaadfdb517eaca5ce077640cb261c7d2698385a6a0f072a4a5447fd49fa08"}, + {file = "Send2Trash-1.8.0.tar.gz", hash = "sha256:d2c24762fd3759860a0aff155e45871447ea58d2be6bdd39b5c8f966a0c99c2d"}, +] +setuptools = [ + {file = "setuptools-67.6.0-py3-none-any.whl", hash = "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"}, + {file = "setuptools-67.6.0.tar.gz", hash = "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +sniffio = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] +soupsieve = [ + {file = "soupsieve-2.4-py3-none-any.whl", hash = "sha256:49e5368c2cda80ee7e84da9dbe3e110b70a4575f196efb74e51b94549d921955"}, + {file = "soupsieve-2.4.tar.gz", hash = "sha256:e28dba9ca6c7c00173e34e4ba57448f0688bb681b7c5e8bf4971daafc093d69a"}, +] +stack-data = [ + {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, + {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, +] +stripe = [ + {file = "stripe-2.76.0-py2.py3-none-any.whl", hash = "sha256:756bf6c1206f438d1fa23bb90cdf1233c9383478f854f2720a8a3e1eaf1f715b"}, + {file = "stripe-2.76.0.tar.gz", hash = "sha256:fd3fc6935c3b6189967191607b6f38ebe490005a590b4d0d43fbe3aba45deca8"}, +] +swaggyp = [ + {file = "swaggyp-0.3.0-py3-none-any.whl", hash = "sha256:d678daa026f374c09690bfae93670e8824c616fe7186375cca531dd9530d2c41"}, + {file = "swaggyp-0.3.0.tar.gz", hash = "sha256:2af8b74e014ba83287d4b2fcbe355636ca8ec09ca70bffba4290b3f833c8ad46"}, +] +terminado = [ + {file = "terminado-0.17.1-py3-none-any.whl", hash = "sha256:8650d44334eba354dd591129ca3124a6ba42c3d5b70df5051b6921d506fdaeae"}, + {file = "terminado-0.17.1.tar.gz", hash = "sha256:6ccbbcd3a4f8a25a5ec04991f39a0b8db52dfcd487ea0e578d977e6752380333"}, +] +tinycss2 = [ + {file = "tinycss2-1.2.1-py3-none-any.whl", hash = "sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847"}, + {file = "tinycss2-1.2.1.tar.gz", hash = "sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627"}, +] +tornado = [ + {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"}, + {file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba09ef14ca9893954244fd872798b4ccb2367c165946ce2dd7376aebdde8e3ac"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8150f721c101abdef99073bf66d3903e292d851bee51910839831caba341a75"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a2f5999215a3a06a4fc218026cd84c61b8b2b40ac5296a6db1f1451ef04c1e"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5f8c52d219d4995388119af7ccaa0bcec289535747620116a58d830e7c25d8a8"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_i686.whl", hash = "sha256:6fdfabffd8dfcb6cf887428849d30cf19a3ea34c2c248461e1f7d718ad30b66b"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:1d54d13ab8414ed44de07efecb97d4ef7c39f7438cf5e976ccd356bebb1b5fca"}, + {file = "tornado-6.2-cp37-abi3-win32.whl", hash = "sha256:5c87076709343557ef8032934ce5f637dbb552efa7b21d08e89ae7619ed0eb23"}, + {file = "tornado-6.2-cp37-abi3-win_amd64.whl", hash = "sha256:e5f923aa6a47e133d1cf87d60700889d7eae68988704e20c75fb2d65677a8e4b"}, + {file = "tornado-6.2.tar.gz", hash = "sha256:9b630419bde84ec666bfd7ea0a4cb2a8a651c2d5cccdbdd1972a0c859dfc3c13"}, +] +traitlets = [ + {file = "traitlets-5.9.0-py3-none-any.whl", hash = "sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8"}, + {file = "traitlets-5.9.0.tar.gz", hash = "sha256:f6cde21a9c68cf756af02035f72d5a723bf607e862e7be33ece505abf4a3bad9"}, +] +uri-template = [ + {file = "uri_template-1.2.0-py3-none-any.whl", hash = "sha256:f1699c77b73b925cf4937eae31ab282a86dc885c333f2e942513f08f691fc7db"}, + {file = "uri_template-1.2.0.tar.gz", hash = "sha256:934e4d09d108b70eb8a24410af8615294d09d279ce0e7cbcdaef1bd21f932b06"}, +] +urllib3 = [ + {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, + {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, +] +valley = [ + {file = "valley-1.5.8-py3-none-any.whl", hash = "sha256:c30c0bdb30e5be561dd4332281fc53315c4c34f174d268d8cc7496a6f47ee314"}, + {file = "valley-1.5.8.tar.gz", hash = "sha256:88342fa4af854b8e9e426776995c9c2c690b432ea35c0c9529fa0abb62e553e3"}, +] +wcwidth = [ + {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, + {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, +] +webcolors = [ + {file = "webcolors-1.12-py3-none-any.whl", hash = "sha256:d98743d81d498a2d3eaf165196e65481f0d2ea85281463d856b1e51b09f62dce"}, + {file = "webcolors-1.12.tar.gz", hash = "sha256:16d043d3a08fd6a1b1b7e3e9e62640d09790dce80d2bdd4792a175b35fe794a9"}, +] +webencodings = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] +websocket-client = [ + {file = "websocket-client-1.5.1.tar.gz", hash = "sha256:3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40"}, + {file = "websocket_client-1.5.1-py3-none-any.whl", hash = "sha256:cdf5877568b7e83aa7cf2244ab56a3213de587bbe0ce9d8b9600fc77b455d89e"}, +] +Werkzeug = [ + {file = "Werkzeug-2.1.2-py3-none-any.whl", hash = "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255"}, + {file = "Werkzeug-2.1.2.tar.gz", hash = "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6"}, +] +wheel = [ + {file = "wheel-0.38.4-py3-none-any.whl", hash = "sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8"}, + {file = "wheel-0.38.4.tar.gz", hash = "sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac"}, +] +widgetsnbextension = [ + {file = "widgetsnbextension-4.0.5-py3-none-any.whl", hash = "sha256:eaaaf434fb9b08bd197b2a14ffe45ddb5ac3897593d43c69287091e5f3147bf7"}, + {file = "widgetsnbextension-4.0.5.tar.gz", hash = "sha256:003f716d930d385be3fd9de42dd9bf008e30053f73bddde235d14fbeaeff19af"}, +] +zipp = [ + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, +] diff --git a/pyproject.toml b/pyproject.toml index 2e3bdbc..5151a21 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,6 @@ envs = "^1.3" requests = "^2.23.0" pytz = "^2021.1" decorator = "^5.0.9" -pyjwt = "^2.1.0" pip = "^23.0.1" cryptography = "^3.4.7" cachetools = "^4.2.2" @@ -28,6 +27,7 @@ swaggyp = "^0.3.0" formy = "1.3.1" Jinja2 = "^3.1.2" Werkzeug = "2.1.2" +PyJWT = "^2.6.0" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From 89355ad459edc5e5f42af06ce6fbdac5bd7a79eb Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 14 Mar 2023 15:05:02 +0800 Subject: [PATCH 119/214] Added essential docstrings and little refactors for performance and documentation --- pfunk/collection.py | 14 +- pfunk/contrib/auth/collections.py | 152 +++++++++-- pfunk/contrib/auth/resources.py | 258 +++++++++++------- pfunk/tests/test_aws.py | 3 +- .../test_web_custom_user_group_group_perms.py | 16 +- pfunk/tests/test_web_custom_user_group_m2m.py | 23 +- .../test_web_custom_user_group_users_perms.py | 16 +- 7 files changed, 326 insertions(+), 156 deletions(-) diff --git a/pfunk/collection.py b/pfunk/collection.py index 500b573..ef3a9f8 100644 --- a/pfunk/collection.py +++ b/pfunk/collection.py @@ -140,10 +140,9 @@ def get_user_field(self) -> str: fields = self._base_properties.items() user_class = self.user_collection or env('USER_COLLECTION', 'User') user_field = None - for k, v in fields: - if user_class in v.get_graphql_type(): - user_field = k - break + user_fields = [k for k, v in fields if user_class in v.get_graphql_type()] + if user_fields: + user_field = user_fields[0] return user_field def get_group_field(self) -> str: @@ -156,10 +155,9 @@ def get_group_field(self) -> str: fields = self._base_properties.items() group_class = self.group_collection or env('GROUP_COLLECTION', 'Group') group_field = None - for k, v in fields: - if group_class in v.get_graphql_type(): - group_field = k - break + group_fields = [k for k, v in fields if group_class in v.get_graphql_type()] + if group_fields: + group_field = group_fields[0] return group_field def get_collection_name(self) -> str: diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index fec6f3a..5e39ed8 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -28,10 +28,16 @@ class BaseGroup(Collection): slug = SlugField(unique=True, required=False) def __unicode__(self): + """Return the name of the group + + Returns: + str: Name of the group + """ return self.name # pragma: no cover class UserGroupByUserAndGroupIndex(Index): + """Lookup index for UserGroup M2M collection""" name = 'usergroups_by_userID_and_groupID' source = 'Usergroups' terms = [ @@ -49,6 +55,11 @@ class BaseUserGroup(Collection): permissions = ListField() def __unicode__(self): + """Return the userID, groupID, and permissions + + Returns: + str: userID, groupID, and permissions + """ return f"{self.userID}, {self.groupID}, {self.permissions}" @@ -58,7 +69,7 @@ class UserGroups(BaseUserGroup): The native fauna-way of holding many-to-many relationship is to only have the ID of the 2 object. Here in pfunk, we leverage the flexibility of the collection to have another - field, which is `permissions`, this field holds the capablities + field, which is `permissions`, this field holds the capabilities of a user, allowing us to add easier permission handling. Instead of manually going to roles and adding individual collections which can be painful in long term. @@ -77,6 +88,15 @@ class UserGroups(BaseUserGroup): env('USER_COLLECTION_DIR', 'pfunk.contrib.auth.collections.User')) groupID = ReferenceField( env('GROUP_COLLECTION_DIR', 'pfunk.contrib.auth.collections.Group')) + permissions = ListField() + + def __unicode__(self): + """Return the userID, groupID, and permissions + + Returns: + str: userID, groupID, and permissions + """ + return f"{self.userID}, {self.groupID}, {self.permissions}" AccountStatus = Enum(name='AccountStatus', choices=['ACTIVE', 'INACTIVE']) @@ -120,14 +140,20 @@ class BaseUser(Collection): AccountStatus, required=True, default_value="INACTIVE") def __unicode__(self): + """Returns the username of the user""" return self.username # pragma: no cover @classmethod def login(cls, username, password, _token=None): """ Logs the user in to Fauna + Args: + username (str, required): Username of the user + password (str, required): Password of the user + _token (str, optional): Token of the user + Returns: - token: the token from fauna + token (str, required): the token from fauna """ c = cls() try: @@ -141,17 +167,35 @@ def login(cls, username, password, _token=None): @classmethod def logout(cls, _token=None): - """ Expires/invalidates the user's login token """ + """ Expires/invalidates the user's login token + + Args: + _token (str, optional): Token of the user + + Returns: + None + """ c = cls() return c.client(_token=_token).query( q.call("logout_user") ) def permissions(self, _token=None): + """Returns an empty array""" return [] @classmethod def api_login(cls, username, password, _token=None): + """ Logs the user in to Fauna and creates a JWT + + Args: + username (str, required): Username of the user + password (str, required): Password of the user + _token (str, optional): Token of the user + + Returns: + token (str, required): the token from fauna + """ token = cls.login(username=username, password=password, _token=_token) user = cls.get_current_user(_token=token) claims = user.to_dict().copy() @@ -166,7 +210,14 @@ def api_login(cls, username, password, _token=None): @classmethod def get_from_id(cls, _token=None): - """ Acquire user from the given Id """ + """ Acquire user from the given Id + + Args: + _token (str, optional): Token of the user + + Returns: + user (BaseUser, required): The user object + """ c = cls() ref = c.client(_token=_token).query( q.current_identity() @@ -176,10 +227,18 @@ def get_from_id(cls, _token=None): def attach_verification_key(self): """ Attaches the verification key to user to enable one-time activate + + Returns: + None """ self.verification_key = str(uuid.uuid4()) def attach_forgot_verification_key(self): + """ Attaches forgot password key to user + + Returns: + None + """ self.forgot_password_key = str(uuid.uuid4()) self.save() @@ -187,11 +246,18 @@ def attach_forgot_verification_key(self): def verify_email(cls, verification_key, verify_type='signup', password=None): """ Activate the user from the verification key - Args: - verification_key (str, required): - verification key in the email to compare the one - attached to the user - """ + Args: + verification_key (str, required): + verification key in the email to compare the one + attached to the user + verify_type (str, optional): + Type of verification being performed. Default: 'signup' + password (str, optional): + Password of the user. Required if verify_type is 'forgot' + + Returns: + None + """ if verify_type == 'signup': user = cls.get_by('unique_User_verification_key', [verification_key]) @@ -205,7 +271,18 @@ def verify_email(cls, verification_key, verify_type='signup', password=None): user.save(_credentials=password) def send_verification_email(self, from_email=None, verification_type='signup'): - """ Send the verification email with the hashed key """ + """ Send the verification email with the hashed key + + Args: + from_email (str, optional): + From email address of the verification email. + Default: env('DEFAULT_FROM_EMAIL') + verification_type (str, optional): + Type of verification being performed. Default: 'signup' + + Returns: + None + """ project_name = env('PROJECT_NAME', '') if verification_type == 'signup': txt_template = 'auth/verification_email.txt' @@ -240,8 +317,11 @@ def forgot_password(cls, email): """ Sends forgot password email to let user use that link to reset their password """ + # get the user object user = cls.get_by('unique_User_email', email) + # attach the forgot verification key user.attach_forgot_verification_key() + # send the verification email user.send_verification_email(verification_type='forgot') @classmethod @@ -254,13 +334,15 @@ def signup(cls, _token=None, **kwargs): **kwargs (dict, required): The user's needed information for creation """ + # create a data dict with the user's needed information data = kwargs data['account_status'] = 'INACTIVE' + # pop the group key if it exists try: data.pop('groups') except KeyError: pass - + # create the user cls.create(**data, _token=_token) @classmethod @@ -287,10 +369,13 @@ def update_password(cls, current_password, new_password, new_password_confirm, _ If current_password is wrong, will return `Wrong current password.` """ + # raise an exception if new password and new password confirm don't match if new_password != new_password_confirm: raise ValidationException( 'new_password: Password field and password confirm field do not match.') + # create a collection instance c = cls() + # update the password using the user's current password and the new password try: return c.client(_token=_token).query( q.call("update_password", { @@ -310,23 +395,31 @@ def get_current_user(cls, _token=None): id (str): Fauna ID of the user in `User` collection """ + # create a collection instance c = cls() + # get the current identity return cls.get(c.client(_token=_token).query(q.current_identity()).id()) - def __unicode__(self): - return self.username # pragma: no cover - class ExtendedUser(BaseUser): """ User that has permission capabilities. Extension of `BaseUser`. - Subclass and define these properties - Provides base methods for group-user permissions. If there are no - supplied `groups` property, will raise `NotImplementedErrror` + Subclass and define the properties needed. """ - # user_group_class = import_util('pfunk.contrib.auth.collections.UserGroups') + user_group_class = None + group_class = None @classmethod def get_permissions(cls, ref, _token=None): + """Returns the permissions of the user + + Args: + ref (str): The user ID + _token (str): Fauna auth token + + Returns: + str[]: Permissions of the user in list: + `['create', 'read', 'delete', 'write']` + """ return cls.get(ref, _token).permissions(_token=_token) def get_groups(self, _token=None): @@ -340,10 +433,12 @@ def get_groups(self, _token=None): index_name = f'{user_class}s_{group_class}s_by_{user_class}' if relation_name: index_name = f'{relation_name}_by_{user_class}' - - return [self.group_class.get(i.id(), _token=_token) for i in self.client(_token=_token).query( + # query Fauna to get the data + data = self.client(_token=_token).query( q.paginate(q.match(index_name, self.ref)) - ).get('data')] + ).get('data') + # create a list of group instances from the data + return [self.group_class.get(i.id(), _token=_token) for i in data] def permissions(self, _token=None): """ Returns the permissions of the user @@ -359,18 +454,24 @@ def permissions(self, _token=None): perm_list (str[]): Permissions of the user in list: `['create', 'read', 'delete', 'write']` """ - index_name = 'usergroups_by_userID_and_groupID' perm_list = [] + # loop over the groups of the user for i in self.get_groups(_token=_token): + # query Fauna to get the UserGroup instance of the user ug = self.user_group_class.get_index(index_name, [ self.ref, i.ref], _token=_token) + # loop over the UserGroup instances for user_group in ug: p = [] + # check if there are any permissions in the instance if isinstance(user_group.permissions, list): + # loop over the permissions p = [ f'{user_group.groupID.slug}-{i}' for i in user_group.permissions] + # add the permissions to the list perm_list.extend(p) + # return a list of the user's permissions return perm_list def add_permissions(self, group, permissions: list, _token=None): @@ -397,21 +498,28 @@ def add_permissions(self, group, permissions: list, _token=None): perm_list = [] index_name = 'usergroups_by_userID_and_groupID' + # loop over the permissions and add to the list for i in permissions: perm_list.extend(i.permissions) + # raise an error if the user_group_class is not defined if not self.user_group_class: raise NotImplementedError + # try to get the UserGroup instance try: user_group = self.user_group_class.get_by( index_name, terms=[self.ref, group.ref]) + # create a new instance if not found except DocNotFound: user_group = self.user_group_class.create( userID=self.ref, groupID=group.ref, permissions=perm_list, _token=_token) + # update the permissions if they're not the same if user_group.permissions != perm_list: user_group.permissions = perm_list + # save the changes user_group.save() + # return the UserGroup instance return user_group diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 36c2064..52ba448 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -181,25 +181,34 @@ def get_lambda(self, resource_type): ) + class GenericAuthorizationRole(Role): + """This class provides generic authorization roles for collections""" - def get_relation_index_name(self): - """ Returns the index name of the created permission index of group and user -> 'usergroups_by_userID_and_groupID' """ + def get_relation_index_name(self) -> str: + """ + Returns the index name of the created permission index of group and user -> 'usergroups_by_userID_and_groupID' + """ return 'usergroups_by_userID_and_groupID' - def get_user_table(self): + def get_user_table(self) -> str: + """Returns the user table name""" return self.collection.user_collection or env('USER_COLLECTION', 'User') - def get_group_table(self): + def get_group_table(self) -> str: + """Returns the group table name""" return self.collection.group_collection or env('GROUP_COLLECTION', 'Group') - def get_name_suffix(self): + def get_name_suffix(self) -> str: + """Returns the name suffix for this role""" return f'{self.collection.get_user_field().lower()}_based_crud_role' - def get_name(self): + def get_name(self) -> str: + """Returns the name for this role""" return self.name or f"{self.collection.get_class_name()}_{self.get_name_suffix()}" - def get_privileges(self): + def get_privileges(self) -> list: + """Returns the list of privileges for this role""" priv_list = [ { "resource": q.collection(self.collection.get_collection_name()), @@ -241,16 +250,24 @@ def get_privileges(self): } for i in self.collection.collection_functions ]) + return priv_list class GenericUserBasedRole(GenericAuthorizationRole): - """ Generic set of permissions for entity to user relationship """ + """Class to provide a generic set of permissions based on the user-entity relationship. + + Args: + GenericAuthorizationRole (class): Inherited class + """ def get_relation_index_name(self): - """ Returns the user-group by user index name + """Returns the user-group by user index name + + Formatted as: {user_group_relation_name}_by_{user_class} - Formatted as: {user_group_relation_name}_by_{user_class} + Returns: + str: User-group by user index name """ # Acquires the `groups` field from the user collection user_field = self.collection.get_user_field() @@ -270,67 +287,86 @@ def get_relation_index_name(self): return None def get_lambda(self, resource_type): + """ Returns the lambda for the specified resource type + + Args: + resource_type (str): Type of resource + + Returns: + q.query: Lambda query + """ current_user_field = self.collection.get_user_field() if resource_type == 'write': lambda_args = ["old_object", "new_object", "object_ref"] - user_ref = q.select(current_user_field, - q.select('data', q.var('old_object'))) + user_ref = q.select( + current_user_field, q.select('data', q.var('old_object'))) return q.query( - q.lambda_(lambda_args, - q.and_( - q.equals( - user_ref, - q.current_identity() - ), - q.equals( - q.select(current_user_field, q.select( - 'data', q.var('new_object'))), - q.current_identity() - ) - ) - - ) + q.lambda_( + lambda_args, + q.and_( + q.equals(user_ref, q.current_identity()), + q.equals( + q.select(current_user_field, q.select( + 'data', q.var('new_object'))), + q.current_identity() + ) + ) + ) ) elif resource_type == 'create': lambda_args = ["new_object"] - user_ref = q.select(current_user_field, - q.select('data', q.var('new_object'))) + user_ref = q.select( + current_user_field, q.select('data', q.var('new_object'))) elif resource_type == 'read' or resource_type == 'delete': lambda_args = ["object_ref"] - user_ref = q.select(current_user_field, - q.select('data', q.get(q.var('object_ref')))) + user_ref = q.select( + current_user_field, q.select('data', q.get(q.var('object_ref')))) return q.query( - q.lambda_(lambda_args, - q.equals( - user_ref, - q.current_identity() - ) - ) + q.lambda_(lambda_args, q.equals(user_ref, q.current_identity())) ) - class GenericGroupBasedRole(GenericAuthorizationRole): + """Class for giving permissions to Group-based entities + """ + # Initialize the `permissions_field` variable permissions_field = 'permissions' def get_name_suffix(self): + """Get the name suffix for the group-based role + + Returns: + str: The name suffix for the group-based role + """ return f'{self.get_group_table().lower()}_based_crud_role' def get_lambda(self, resource_type): - """ Returns the lambda function for giving the permission to Group-based entities + """Returns the lambda function for giving the permission to Group-based entities - Allows modification if: - 1. You belong to the group that owns the document - 2. You have the create permission to perform the action (create, read, write, and delete) + Args: + resource_type (str): The type of operation (create, read, write, and delete) + + Returns: + Lambda: The lambda function for giving the permission to Group-based entities """ current_group_field = self.collection.get_group_field().lower() perm = f'{self.collection.get_collection_name()}-{resource_type}'.lower() + # Initialize the lambda arguments based on the `resource_type` if resource_type == 'write': group_ref = q.select(current_group_field, q.select('data', q.var('old_object'))) lambda_args = ["old_object", "new_object", "object_ref"] + elif resource_type == 'create': + lambda_args = ["new_object"] + group_ref = q.select(current_group_field, + q.select('data', q.var('new_object'))) + elif resource_type == 'read' or resource_type == 'delete': + lambda_args = ["object_ref"] + group_ref = q.select(current_group_field, + q.select('data', q.get(q.var('object_ref')))) + if resource_type == 'write': return q.query( q.lambda_(lambda_args, q.and_( @@ -355,49 +391,47 @@ def get_lambda(self, resource_type): q.select(current_group_field, q.select( 'data', q.var('new_object'))), ) - ) - ) + ) + ) ) - elif resource_type == 'create': - lambda_args = ["new_object"] - group_ref = q.select(current_group_field, - q.select('data', q.var('new_object'))) - elif resource_type == 'read' or resource_type == 'delete': - lambda_args = ["object_ref"] - group_ref = q.select(current_group_field, - q.select('data', q.get(q.var('object_ref')))) - - return q.query( - q.lambda_( - lambda_args, - q.equals( - # NOTE: After acquiring the instance of `UserGroup`, filter the result: permission field - # that matches the `perm` variable AND then see if that is equals to `perm` var - # IMPORTANT: by using this, it will easily filter permissions available, and if there were none, then it is automatically false - q.select(0, q.filter_(lambda i: q.equals(perm, i), - q.select(self.permissions_field, - q.select("data", - q.get(q.match( - q.index( - self.get_relation_index_name()), - q.current_identity(), - group_ref - )))))), - perm + else: + # Return the lambda function for giving the permission to Group-based entities + return q.query( + q.lambda_( + lambda_args, + q.equals( + # NOTE: After acquiring the instance of `UserGroup`, filter the result: permission field + # that matches the `perm` variable AND then see if that is equals to `perm` var + # IMPORTANT: by using this, it will easily filter permissions available, and if there were none, then it is automatically false + q.select(0, q.filter_(lambda i: q.equals(perm, i), + q.select(self.permissions_field, + q.select("data", + q.get(q.match( + q.index( + self.get_relation_index_name()), + q.current_identity(), + group_ref + )))))), + perm + ) ) ) - ) class GenericUserBasedRoleM2M(GenericAuthorizationRole): """ Generic set of permissions for many-to-many entity to user relationship """ def get_privileges(self): - """ Usage of parent `get_privileges()` with addition of access to M2M collection """ + """ + Usage of parent `get_privileges()` with addition of access to M2M collection + Returns: + List: list of privileges + """ priv_list = super().get_privileges() fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') - for k, v in fields.items(): - foreign_col = self.collection._base_properties.get(k) + for field, value in fields.items(): + # Get foreign column + foreign_col = self.collection._base_properties.get(field) relation_name = foreign_col.relation_name if relation_name: priv_list.extend([ @@ -414,10 +448,18 @@ def get_privileges(self): return priv_list def get_name_suffix(self): + """ + Returns: + String: suffix for name of the role + """ return f'{self.collection.get_user_field().lower()}_based_crud_role' - + def get_relation_index_name(self): - """ Returns the index name of the m2m index of an entity and user e.g. 'users_blogs_by_blog_and_newuser' """ + """ + Returns the index name of the m2m index of an entity and user e.g. 'users_blogs_by_blog_and_newuser' + Returns: + String: name of the index + """ user_field = self.collection.get_user_field() if user_field: user_field = user_field.lower() @@ -436,34 +478,40 @@ def get_relation_index_name(self): return relation_index_name return None - def get_lambda(self, resource_type): + """ + Returns lamda expression for the given resource type + Args: + resource_type (String): type of resource + Returns: + Lamda expression + """ current_user_field = self.collection.get_user_field() if resource_type == 'write': lambda_args = ["old_object", "new_object", "object_ref"] obj_ref = q.var('old_object') return q.query( q.lambda_(lambda_args, - q.and_( - q.equals( - q.select(f'{self.get_user_table().lower()}ID', - q.select("data", - q.get(q.match( - q.index( - self.get_relation_index_name()), - obj_ref, - q.current_identity() - ))) - ), - q.current_identity() - ), - q.equals( - q.select(current_user_field, q.select( - 'data', q.var('new_object'))), - q.current_identity() - ) - ) - ) + q.and_( + q.equals( + q.select(f'{self.get_user_table().lower()}ID', + q.select("data", + q.get(q.match( + q.index( + self.get_relation_index_name()), + obj_ref, + q.current_identity() + ))) + ), + q.current_identity() + ), + q.equals( + q.select(current_user_field, q.select( + 'data', q.var('new_object'))), + q.current_identity() + ) + ) + ) ) elif resource_type == 'create': # Create ops will always be allowed @@ -477,15 +525,15 @@ def get_lambda(self, resource_type): lambda_args, q.equals( q.select(f'{self.get_user_table().lower()}ID', - q.select("data", - q.get(q.match( - q.index( - self.get_relation_index_name()), - obj_ref, - q.current_identity() - ))) - ), + q.select("data", + q.get(q.match( + q.index( + self.get_relation_index_name()), + obj_ref, + q.current_identity() + ))) + ), q.current_identity() ) ) - ) + ) \ No newline at end of file diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index d28c852..b3152c7 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -4,8 +4,9 @@ from unittest import mock from pfunk.utils.aws import ApiGateway -from pfunk.tests import User, Group, Person, Sport +from pfunk.tests import Person, Sport from pfunk.project import Project +from pfunk.contrib.auth.collections import Group, User, UserGroups class ApiGatewayTests(unittest.TestCase): diff --git a/pfunk/tests/test_web_custom_user_group_group_perms.py b/pfunk/tests/test_web_custom_user_group_group_perms.py index 38274e5..cf0f057 100644 --- a/pfunk/tests/test_web_custom_user_group_group_perms.py +++ b/pfunk/tests/test_web_custom_user_group_group_perms.py @@ -9,8 +9,10 @@ class UserGroups(ug): - userID = ReferenceField('pfunk.tests.test_web_custom_user_group_group_perms.Newuser') - groupID = ReferenceField('pfunk.tests.test_web_custom_user_group_group_perms.Newgroup') + userID = ReferenceField( + 'pfunk.tests.test_web_custom_user_group_group_perms.Newuser') + groupID = ReferenceField( + 'pfunk.tests.test_web_custom_user_group_group_perms.Newgroup') class Newgroup(BaseGroup): @@ -35,8 +37,10 @@ def __unicode__(self): class Newuser(ExtendedUser): group_collection = 'Newgroup' - user_group_class = import_util('pfunk.tests.test_web_custom_user_group_group_perms.UserGroups') - group_class = import_util('pfunk.tests.test_web_custom_user_group_group_perms.Newgroup') + user_group_class = import_util( + 'pfunk.tests.test_web_custom_user_group_group_perms.UserGroups') + group_class = import_util( + 'pfunk.tests.test_web_custom_user_group_group_perms.Newgroup') groups = ManyToManyField( 'pfunk.tests.test_web_custom_user_group_group_perms.Newgroup', relation_name='custom_users_groups') @@ -89,8 +93,8 @@ def test_update(self): house.address for house in Blog.all()]) res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', json={ - "title": "updated blog", - "content": "I updated my blog."}, + "title": "updated blog", + "content": "I updated my blog."}, headers={ "Authorization": self.token}) diff --git a/pfunk/tests/test_web_custom_user_group_m2m.py b/pfunk/tests/test_web_custom_user_group_m2m.py index 3f86e4a..8e04044 100644 --- a/pfunk/tests/test_web_custom_user_group_m2m.py +++ b/pfunk/tests/test_web_custom_user_group_m2m.py @@ -11,8 +11,10 @@ class UserGroups(ug): - userID = ReferenceField('pfunk.tests.test_web_custom_user_group_m2m.Newuser') - groupID = ReferenceField('pfunk.tests.test_web_custom_user_group_m2m.Newgroup') + userID = ReferenceField( + 'pfunk.tests.test_web_custom_user_group_m2m.Newuser') + groupID = ReferenceField( + 'pfunk.tests.test_web_custom_user_group_m2m.Newgroup') class Newgroup(BaseGroup): @@ -22,8 +24,10 @@ class Newgroup(BaseGroup): class Newuser(ExtendedUser): group_collection = 'Newgroup' - user_group_class = import_util('pfunk.tests.test_web_custom_user_group_m2m.UserGroups') - group_class = import_util('pfunk.tests.test_web_custom_user_group_m2m.Newgroup') + user_group_class = import_util( + 'pfunk.tests.test_web_custom_user_group_m2m.UserGroups') + group_class = import_util( + 'pfunk.tests.test_web_custom_user_group_m2m.Newgroup') groups = ManyToManyField( 'pfunk.tests.test_web_custom_user_group_m2m.Newgroup', relation_name='custom_users_groups') blogs = ManyToManyField('pfunk.tests.test_web_custom_user_group_m2m.Blog', @@ -39,7 +43,7 @@ class Blog(Collection): title = StringField(required=True) content = StringField(required=True) users = ManyToManyField('pfunk.tests.test_web_custom_user_group_m2m.Newuser', - relation_name='users_blogs') + relation_name='users_blogs') def __unicode__(self): return self.title @@ -56,13 +60,12 @@ def setUp(self) -> None: last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.group]) self.user2 = Newuser.create(username='test2', email='tlasso2@example.org', first_name='Juliuz', - last_name='Lasso', _credentials='abc123', account_status='ACTIVE', - groups=[self.group]) + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.group]) self.blog = Blog.create( title='test_blog', content='test content', users=[self.user], token=self.secret) self.token, self.exp = Newuser.api_login("test", "abc123") - def test_read(self): res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', headers={ @@ -99,7 +102,7 @@ def test_update(self): "title": "updated blog", "content": "I updated my blog.", "users": [self.user.ref.id()] - }, + }, headers={ "Authorization": self.token}) @@ -115,3 +118,5 @@ def test_delete(self): }) self.assertTrue(res.status_code, 200) + self.assertNotIn("test_blog", [ + blog.title for blog in Blog.all()]) diff --git a/pfunk/tests/test_web_custom_user_group_users_perms.py b/pfunk/tests/test_web_custom_user_group_users_perms.py index a1b7b0c..7dcdcb4 100644 --- a/pfunk/tests/test_web_custom_user_group_users_perms.py +++ b/pfunk/tests/test_web_custom_user_group_users_perms.py @@ -4,7 +4,7 @@ from valley.utils import import_util from pprint import pprint as p -from pfunk.contrib.auth.collections import BaseGroup , ExtendedUser, BaseUserGroup as ug +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, BaseUserGroup as ug from pfunk.testcase import APITestCase from pfunk import Collection, StringField, ReferenceField, ManyToManyField from pfunk.fields import ManyToManyField, StringField @@ -12,8 +12,10 @@ class UserGroups(ug): - userID = ReferenceField('pfunk.tests.test_web_custom_user_group_users_perms.Newuser') - groupID = ReferenceField('pfunk.tests.test_web_custom_user_group_users_perms.Newgroup') + userID = ReferenceField( + 'pfunk.tests.test_web_custom_user_group_users_perms.Newuser') + groupID = ReferenceField( + 'pfunk.tests.test_web_custom_user_group_users_perms.Newgroup') class Newgroup(BaseGroup): @@ -23,8 +25,10 @@ class Newgroup(BaseGroup): class Newuser(ExtendedUser): group_collection = 'Newgroup' - user_group_class = import_util('pfunk.tests.test_web_custom_user_group_users_perms.UserGroups') - group_class = import_util('pfunk.tests.test_web_custom_user_group_users_perms.Newgroup') + user_group_class = import_util( + 'pfunk.tests.test_web_custom_user_group_users_perms.UserGroups') + group_class = import_util( + 'pfunk.tests.test_web_custom_user_group_users_perms.Newgroup') groups = ManyToManyField( 'pfunk.tests.test_web_custom_user_group_users_perms.Newgroup', relation_name='custom_users_groups') blogs = ManyToManyField('pfunk.tests.test_web_custom_user_group_users_perms.Blog', @@ -111,3 +115,5 @@ def test_delete(self): }) self.assertTrue(res.status_code, 200) + self.assertNotIn("test_blog", [ + blog.title for blog in Blog.all()]) From b3f2d6134fb306155c1f8c85d4252ea76dc5cfbb Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 17 Mar 2023 14:08:30 +0800 Subject: [PATCH 120/214] updated README to show examples of custom user and group collection --- README.md | 121 +++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 120 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index b0634bf..7143deb 100644 --- a/README.md +++ b/README.md @@ -30,7 +30,9 @@ Includes GraphQL and generic ABAC auth workflow integrations. - [Save Some Data](#save-some-data) - [Query Your Data](#query-your-data) - [Delete a Record](#delete-a-record) - +- [Customizing your setup](#customizing-your-setup) + - [Option 1: Environment Variables](#option-1-environment-variables) + - [Option 2: Inline Field](#option-2-inline-field) ### Getting Started @@ -213,3 +215,120 @@ Let's delete the record from above. ```python product.delete() ``` + + + +### Customizing your setup +This section is for customizing your `user` and `group` collections to your liking. e.g. +- Renaming your `User` and `Group` collection to something more verbose to your usage e.g. `Agent (User)` to `Firm (Group)` +- Subclassing the `user` or `group` collection in order to have more control to what kind of auth collection you want to have + + +### Custom User and Groups +Initially, we have multiple ways of defining your custom user-group collections. +Things to keep in mind: +- `UserGroup` class **must** be subclassed and reference to the custom `user` and `group` +- How the permissions work will do the same thing in custom user and group. This just gives you the ability to fully customize your own auth collections. + + + +### Option 1: Environment Variables +This is the easiest way. Just go to your `.env` file and define: +``` +USER_COLLECTION=Newuser # Class name of your custom user class - case-sensitive! +GROUP_COLLECTION=Newgroup # Class name of your custom group class - case-sensitive! +GROUP_COLLECTION_DIR=dir.to.Newgroup # class dir to import your custom group +USER_COLLECTION_DIR=dir.to.Newuser # class dir to import your custom user group +``` +Then you'll end up with this in your `collections.py` +```python +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, BaseUserGroup as ug +from pfunk.contrib.auth.resources import GenericUserBasedRole + + +class UserGroups(ug): + userID = ReferenceField('dir.to.Newuser') + groupID = ReferenceField('dir.to.Newgroup') + + +class Newgroup(BaseGroup): + users = ManyToManyField('dir.to.Newuser', relation_name='custom_users_groups') + + +class Newuser(ExtendedUser): + user_group_class = import_util('dir.to.UserGroups') + group_class = import_util('dir.to.Newgroup') + groups = ManyToManyField('dir.to.Newgroup', relation_name='custom_users_groups') + blogs = ManyToManyField('dir.to.Blog', relation_name='users_blogs') + + +class Blog(Collection): + collection_roles = [GenericUserBasedRole] + title = StringField(required=True) + content = StringField(required=True) + user = ReferenceField('dir.to.Newuser', relation_name='users_blogs') + + def __unicode__(self): + return self.title + +``` + + + +### Option 2: Inline Field +If for some reason you don't want to use the environment variables, you can define the needed fields +directly in your `Collection`. This is what we use in PFunk's unit tests, refer to it if you +need more usage but essentially: +```python +class Blog(Collection): + user_collection = 'Newuser' + group_collection = 'Newgroup' + user_collection_dir = 'dir.to.Newuser' + group_collection_dir = 'dir.to.Newgroup' + ... +``` + + +Generally, this is how your `collections.py` will look like in the end if you want to define +your custom auth collections in fields. + + +```python +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, BaseUserGroup as ug +from pfunk.contrib.auth.resources import GenericUserBasedRole + + +class UserGroups(ug): + userID = ReferenceField('this.file.NewUser') + groupID = ReferenceField('this.file.Newgroup') + + +class Newgroup(BaseGroup): + users = ManyToManyField('this.file.Newuser', relation_name='custom_users_groups') + + +class Newuser(ExtendedUser): + user_group_class = import_util('this.file.UserGroups') + group_class = import_util('this.file.Newgroup') + groups = ManyToManyField('this.file.Newgroup', relation_name='custom_users_groups') + blogs = ManyToManyField('this.file.Blog', + relation_name='users_blogs') + + group_collection = 'Newgroup' + + +class Blog(Collection): + collection_roles = [GenericUserBasedRole] + title = StringField(required=True) + content = StringField(required=True) + user = ReferenceField('this.file.Newuser', relation_name='users_blogs') + + user_collection = 'Newuser' + group_collection = 'Newgroup' + user_collection_dir = 'this.file.Newuser' + group_collection_dir = 'this.file.Newgroup' + + def __unicode__(self): + return self.title +``` + From 0788ddea0cb52eff4f2a4c8811653b51f3e19079 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 28 Mar 2023 15:10:09 +0800 Subject: [PATCH 121/214] Refactored tests to work without pre-existing files --- pfunk/tests/test_aws.py | 22 ++++++++++++++++++++++ pfunk/tests/test_email.py | 21 ++++++++++++--------- pfunk/tests/test_project.py | 8 ++++---- pfunk/tests/test_web_json_stripe.py | 16 +++++++++------- 4 files changed, 47 insertions(+), 20 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index b3152c7..b181b78 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -1,4 +1,5 @@ import os +import json import unittest import tempfile from unittest import mock @@ -27,10 +28,31 @@ def setUpClass(cls, mocked) -> None: cls.aws_client = ApiGateway() cls.project.add_resources([Person, Sport, Group, User]) + with open(f'pfunk.json', 'x') as f: + json.dump({ + 'name': 'test', + 'api_type': 'rest', + 'description': 'test project', + 'host': 'localhost', + 'stages': {'dev': { + 'key_module': f'test.dev_keys.KEYS', + 'fauna_secret': 'test-key', + 'bucket': 'test-bucket', + 'default_from_email': 'test@example.org' + }} + }, f, indent=4, sort_keys=True) swagger = cls.project.generate_swagger() cls.swagger_dir = swagger['dir'] cls.swagger_file = swagger['swagger_file'] + @classmethod + def tearDownClass(cls) -> None: + os.remove("pfunk.json") + try: + os.remove('swagger.yaml') + except FileNotFoundError: + pass + def test_validate_yaml(self): result = self.aws_client.validate_yaml(self.swagger_dir) self.assertIsNone(result) # if there are no errors, then spec is valid diff --git a/pfunk/tests/test_email.py b/pfunk/tests/test_email.py index a841463..eb93ceb 100644 --- a/pfunk/tests/test_email.py +++ b/pfunk/tests/test_email.py @@ -1,3 +1,4 @@ +import os import tempfile from unittest import mock @@ -22,7 +23,8 @@ def setUp(self) -> None: self.backend = EmailBackend() def test_get_template(self): - template = self.backend.get_template('email/email_template.html') + with tempfile.NamedTemporaryFile(suffix='.html') as tmp: + template = self.backend.get_template(tmp.name.split("/")[-1]) # test jinja render if no exceptions template.render(unittest_value="random value") self.assertTrue(True) # if there are no exceptions, then it is a pass @@ -57,14 +59,15 @@ def setUp(self) -> None: @mock.patch('boto3.client') def test_send_email(self, mocked): - res = self.SES.send_email( - subject="test", - to_emails=["testemail@email.com"], - html_template='email/email_template.html', - from_email="testFromEmail@email.com", - cc_emails=["testCCemail@email.com"], - bcc_emails=["testBCCemail@email.com"], - ) + with tempfile.NamedTemporaryFile(suffix='.html') as tmp: + res = self.SES.send_email( + subject="test", + to_emails=["testemail@email.com"], + html_template=tmp.name.split("/")[-1], + from_email="testFromEmail@email.com", + cc_emails=["testCCemail@email.com"], + bcc_emails=["testBCCemail@email.com"], + ) # if there are no exceptions, then it's a passing test self.assertTrue(True) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index f7e97e0..2665808 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -29,10 +29,10 @@ def setUp(self) -> None: def tearDown(self) -> None: os.remove("pfunk.json") - # try: - # os.remove('swagger.yaml') - # except FileNotFoundError: - # pass + try: + os.remove('swagger.yaml') + except FileNotFoundError: + pass def test_add_resource(self): self.project.add_resource(Person) diff --git a/pfunk/tests/test_web_json_stripe.py b/pfunk/tests/test_web_json_stripe.py index be45dec..59b484b 100644 --- a/pfunk/tests/test_web_json_stripe.py +++ b/pfunk/tests/test_web_json_stripe.py @@ -1,3 +1,4 @@ +import tempfile from types import SimpleNamespace from unittest import mock @@ -230,13 +231,14 @@ def test_check_ip(self): @mock.patch('boto3.client') def test_send_html_email(self, mocked): - # Requires to have `TEMPLATE_ROOT_DIR=/tmp` in your .env file - res = self.view.send_html_email( - subject='Test Subject', - from_email='unittesting@email.com', - to_email_list=['recipient@email.com'], - template_name=('email/email_template.html') - ) + with tempfile.NamedTemporaryFile(suffix='.html') as tmp: + # Requires to have `TEMPLATE_ROOT_DIR=/tmp` in your .env file + res = self.view.send_html_email( + subject='Test Subject', + from_email='unittesting@email.com', + to_email_list=['recipient@email.com'], + template_name=tmp.name.split("/")[-1] + ) self.assertTrue(True) # if there are no exceptions, then it passed @mock.patch('stripe.Webhook') From 851638a1e6454a60e04623ea8a2ae0f682b854c8 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 3 Apr 2023 08:11:33 +0800 Subject: [PATCH 122/214] Fixed poetry errors --- Dockerfile | 1 + docker-compose.yaml | 2 +- pyproject.toml | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 4b7920a..7f67ec2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,5 @@ FROM capless/capless-docker:jupyter +RUN pip install --upgrade pip COPY . /code RUN poetry run pip install --upgrade pip RUN poetry install diff --git a/docker-compose.yaml b/docker-compose.yaml index 94e7299..d6bad6a 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -18,7 +18,7 @@ services: - ./:/code/ env_file: .env working_dir: /code/ - command: /root/.cache/pypoetry/virtualenvs/pfunk-MATOk_fk-py3.9/bin/jupyter notebook --port=8888 --ip=0.0.0.0 --allow-root + command: jupyter notebook --port=8888 --ip=0.0.0.0 --allow-root fauna: restart: always diff --git a/pyproject.toml b/pyproject.toml index 5151a21..cb614bc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ coverage = "^5.5" pdoc = "^7.2.0" [build-system] -requires = ["setuptools", "poetry>=0.12"] +requires = ["poetry>=0.12"] [tool.poetry.scripts] pfunk = 'pfunk.cli:pfunk' From f7c67dbc0e1654ed145911ee65c7d0b6ede58084 Mon Sep 17 00:00:00 2001 From: Brian Jinwright Date: Sun, 2 Apr 2023 21:41:27 -0400 Subject: [PATCH 123/214] updated the poetry dependency list --- poetry.lock | 1343 +++++++++--------------------------------------- pyproject.toml | 1 - 2 files changed, 240 insertions(+), 1104 deletions(-) diff --git a/poetry.lock b/poetry.lock index 965a56f..b876c0f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,8 +11,8 @@ idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] +doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] +test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] trio = ["trio (>=0.16,<0.22)"] [[package]] @@ -35,8 +35,8 @@ python-versions = ">=3.6" argon2-cffi-bindings = "*" [package.extras] -dev = ["cogapp", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "pre-commit", "pytest", "sphinx", "sphinx-notfound-page", "tomli"] -docs = ["furo", "sphinx", "sphinx-notfound-page"] +dev = ["pre-commit", "cogapp", "tomli", "coverage[toml] (>=5.0.2)", "hypothesis", "pytest", "sphinx", "sphinx-notfound-page", "furo"] +docs = ["sphinx", "sphinx-notfound-page", "furo"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] [[package]] @@ -51,7 +51,7 @@ python-versions = ">=3.6" cffi = ">=1.0.1" [package.extras] -dev = ["cogapp", "pre-commit", "pytest", "wheel"] +dev = ["pytest", "cogapp", "pre-commit", "wheel"] tests = ["pytest"] [[package]] @@ -89,7 +89,6 @@ python-versions = "*" [package.dependencies] six = ">=1.6.1,<2.0" -wheel = ">=0.23.0,<1.0" [[package]] name = "attrs" @@ -100,12 +99,12 @@ optional = false python-versions = ">=3.6" [package.extras] -cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] -tests = ["attrs[tests-no-zope]", "zope.interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=0.971,<0.990)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests_no_zope = ["cloudpickle", "hypothesis", "mypy (>=0.971,<0.990)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["attrs", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs"] +docs = ["furo", "sphinx", "myst-parser", "zope.interface", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] +tests = ["attrs", "zope.interface"] +tests-no-zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"] +tests_no_zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"] [[package]] name = "backcall" @@ -117,7 +116,7 @@ python-versions = "*" [[package]] name = "beautifulsoup4" -version = "4.11.2" +version = "4.12.0" description = "Screen-scraping library" category = "dev" optional = false @@ -145,14 +144,14 @@ webencodings = "*" [[package]] name = "boto3" -version = "1.26.89" +version = "1.26.104" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.7" [package.dependencies] -botocore = ">=1.29.89,<1.30.0" +botocore = ">=1.29.104,<1.30.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -161,7 +160,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.29.89" +version = "1.29.104" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -231,7 +230,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7 [[package]] name = "comm" -version = "0.1.2" +version = "0.1.3" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." category = "dev" optional = false @@ -241,7 +240,9 @@ python-versions = ">=3.6" traitlets = ">=5.3" [package.extras] +lint = ["black (>=22.6.0)", "mdformat-gfm (>=0.3.5)", "mdformat (>0.7)", "ruff (>=0.0.156)"] test = ["pytest"] +typing = ["mypy (>=0.990)"] [[package]] name = "coverage" @@ -266,12 +267,12 @@ python-versions = ">=3.6" cffi = ">=1.12" [package.extras] -docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx_rtd_theme"] -docstest = ["doc8", "pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] +docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] +test = ["pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] [[package]] name = "debugpy" @@ -306,7 +307,7 @@ optional = false python-versions = ">=3.6,<4.0" [package.extras] -cli = ["Jinja2[cli] (>=3.0.3,<4.0.0)", "click[cli] (>=8.0.3,<9.0.0)", "terminaltables[cli] (>=3.1.10,<4.0.0)"] +cli = ["click[cli] (>=8.0.3,<9.0.0)", "Jinja2[cli] (>=3.0.3,<4.0.0)", "terminaltables[cli] (>=3.1.10,<4.0.0)"] [[package]] name = "executing" @@ -317,7 +318,7 @@ optional = false python-versions = "*" [package.extras] -tests = ["asttokens", "littleutils", "pytest", "rich"] +tests = ["asttokens", "pytest", "littleutils", "rich"] [[package]] name = "fastjsonschema" @@ -328,7 +329,7 @@ optional = false python-versions = "*" [package.extras] -devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] +devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] [[package]] name = "faunadb" @@ -346,7 +347,7 @@ requests = "*" [package.extras] lint = ["pylint"] -test = ["nose2", "nose2[coverage_plugin]"] +test = ["nose2", "nose2"] [[package]] name = "formy" @@ -472,7 +473,7 @@ python-versions = ">=3.5" [[package]] name = "importlib-metadata" -version = "6.0.0" +version = "6.1.0" description = "Read metadata from Python packages" category = "dev" optional = false @@ -482,9 +483,9 @@ python-versions = ">=3.7" zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"] perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8", "importlib-resources (>=1.3)"] [[package]] name = "importlib-resources" @@ -498,12 +499,12 @@ python-versions = ">=3.7" zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"] [[package]] name = "ipykernel" -version = "6.21.3" +version = "6.22.0" description = "IPython Kernel for Jupyter" category = "dev" optional = false @@ -525,15 +526,15 @@ tornado = ">=6.1" traitlets = ">=5.4.0" [package.extras] -cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] +cov = ["coverage", "curio", "matplotlib", "pytest-cov", "trio"] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] pyqt5 = ["pyqt5"] pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest-asyncio", "pytest-cov", "pytest-timeout", "pytest (>=7.0)"] [[package]] name = "ipython" -version = "8.11.0" +version = "8.12.0" description = "IPython: Productive Interactive Computing" category = "dev" optional = false @@ -552,11 +553,12 @@ prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" pygments = ">=2.4.0" stack-data = "*" traitlets = ">=5" +typing-extensions = {version = "*", markers = "python_version < \"3.10\""} [package.extras] -all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +all = ["black", "ipykernel", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "docrepr", "matplotlib", "stack-data", "pytest (<7)", "typing-extensions", "pytest (<7.1)", "pytest-asyncio", "testpath", "nbconvert", "nbformat", "ipywidgets", "notebook", "ipyparallel", "qtconsole", "curio", "matplotlib (!=3.2.0)", "numpy (>=1.21)", "pandas", "trio"] black = ["black"] -doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +doc = ["ipykernel", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "docrepr", "matplotlib", "stack-data", "pytest (<7)", "typing-extensions", "pytest (<7.1)", "pytest-asyncio", "testpath"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] @@ -564,10 +566,10 @@ notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test_extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] +test_extra = ["pytest (<7.1)", "pytest-asyncio", "testpath", "curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "trio"] [[package]] -name = "ipython_genutils" +name = "ipython-genutils" version = "0.2.0" description = "Vestigial utilities from IPython" category = "dev" @@ -576,7 +578,7 @@ python-versions = "*" [[package]] name = "ipywidgets" -version = "8.0.4" +version = "8.0.6" description = "Jupyter interactive widgets" category = "dev" optional = false @@ -585,12 +587,12 @@ python-versions = ">=3.7" [package.dependencies] ipykernel = ">=4.5.1" ipython = ">=6.1.0" -jupyterlab-widgets = ">=3.0,<4.0" +jupyterlab-widgets = ">=3.0.7,<3.1.0" traitlets = ">=4.3.1" -widgetsnbextension = ">=4.0,<5.0" +widgetsnbextension = ">=4.0.7,<4.1.0" [package.extras] -test = ["jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] +test = ["jsonschema", "ipykernel", "pytest (>=3.6.0)", "pytest-cov", "pytz"] [[package]] name = "iso8601" @@ -623,12 +625,12 @@ python-versions = ">=3.6" parso = ">=0.8.0,<0.9.0" [package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx-rtd-theme (==0.4.3)", "sphinx (==1.8.5)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] -name = "Jinja2" +name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." category = "main" @@ -701,7 +703,7 @@ qtconsole = "*" [[package]] name = "jupyter-client" -version = "8.0.3" +version = "8.1.0" description = "Jupyter protocol implementation and client libraries" category = "dev" optional = false @@ -716,7 +718,7 @@ tornado = ">=6.2" traitlets = ">=5.3" [package.extras] -docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinx (>=4)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] test = ["codecov", "coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] [[package]] @@ -742,7 +744,7 @@ test = ["flaky", "pexpect", "pytest"] [[package]] name = "jupyter-core" -version = "5.2.0" +version = "5.3.0" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "dev" optional = false @@ -750,7 +752,7 @@ python-versions = ">=3.8" [package.dependencies] platformdirs = ">=2.5" -pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} traitlets = ">=5.3" [package.extras] @@ -776,11 +778,11 @@ traitlets = ">=5.3" [package.extras] cli = ["click", "rich"] docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] -test = ["click", "coverage", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "pytest-cov", "rich"] +test = ["click", "coverage", "pre-commit", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "pytest-cov", "pytest (>=7.0)", "rich"] [[package]] name = "jupyter-server" -version = "2.4.0" +version = "2.5.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." category = "dev" optional = false @@ -808,7 +810,7 @@ websocket-client = "*" [package.extras] docs = ["docutils (<0.20)", "ipykernel", "jinja2", "jupyter-client", "jupyter-server", "mistune (<1.0.0)", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] -test = ["ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"] +test = ["ipykernel", "pre-commit", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "pytest (>=7.0)", "requests"] [[package]] name = "jupyter-server-terminals" @@ -824,7 +826,7 @@ terminado = ">=0.8.3" [package.extras] docs = ["jinja2", "jupyter-server", "mistune (<3.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] -test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] +test = ["coverage", "jupyter-server (>=2.0.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout", "pytest (>=7.0)"] [[package]] name = "jupyterlab-pygments" @@ -836,14 +838,14 @@ python-versions = ">=3.7" [[package]] name = "jupyterlab-widgets" -version = "3.0.5" +version = "3.0.7" description = "Jupyter interactive widgets for JupyterLab" category = "dev" optional = false python-versions = ">=3.7" [[package]] -name = "MarkupSafe" +name = "markupsafe" version = "2.1.2" description = "Safely add untrusted strings to HTML/XML markup." category = "main" @@ -871,7 +873,7 @@ python-versions = "*" [[package]] name = "nbclassic" -version = "0.5.3" +version = "0.5.4" description = "Jupyter Notebook as a Jupyter Server extension." category = "dev" optional = false @@ -897,9 +899,9 @@ tornado = ">=6.1" traitlets = ">=4.2.1" [package.extras] -docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] +docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"] json-logging = ["json-logging"] -test = ["coverage", "nbval", "pytest", "pytest-cov", "pytest-jupyter", "pytest-playwright", "pytest-tornasync", "requests", "requests-unixsocket", "testpath"] +test = ["pytest", "coverage", "requests", "testpath", "nbval", "pytest-playwright", "pytest-cov", "pytest-jupyter", "pytest-tornasync", "requests-unixsocket"] [[package]] name = "nbclient" @@ -917,12 +919,12 @@ traitlets = ">=5.3" [package.extras] dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme"] -test = ["ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] +docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient", "sphinx-book-theme", "sphinx (>=1.7)"] +test = ["ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "pytest (>=7.0)", "testpath", "xmltodict"] [[package]] name = "nbconvert" -version = "7.2.9" +version = "7.2.10" description = "Converting Jupyter Notebooks" category = "dev" optional = false @@ -947,9 +949,9 @@ tinycss2 = "*" traitlets = ">=5.0" [package.extras] -all = ["nbconvert[docs,qtpdf,serve,test,webpdf]"] +all = ["nbconvert"] docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] -qtpdf = ["nbconvert[qtpng]"] +qtpdf = ["nbconvert"] qtpng = ["pyqtwebengine (>=5.15)"] serve = ["tornado (>=6.1)"] test = ["ipykernel", "ipywidgets (>=7)", "pre-commit", "pytest", "pytest-dependency"] @@ -957,7 +959,7 @@ webpdf = ["pyppeteer (>=1,<1.1)"] [[package]] name = "nbformat" -version = "5.7.3" +version = "5.8.0" description = "The Jupyter Notebook format" category = "dev" optional = false @@ -1008,9 +1010,9 @@ tornado = ">=6.1" traitlets = ">=4.2.1" [package.extras] -docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] +docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"] json-logging = ["json-logging"] -test = ["coverage", "nbval", "pytest", "pytest-cov", "requests", "requests-unixsocket", "selenium (==4.1.5)", "testpath"] +test = ["pytest", "coverage", "requests", "testpath", "nbval", "selenium (==4.1.5)", "pytest-cov", "requests-unixsocket"] [[package]] name = "notebook-shim" @@ -1038,9 +1040,9 @@ python-versions = ">=3.7.0,<4.0.0" jsonschema = ">=3.0.0,<5.0.0" [package.extras] -isodate = ["isodate"] rfc3339-validator = ["rfc3339-validator"] strict-rfc3339 = ["strict-rfc3339"] +isodate = ["isodate"] [[package]] name = "openapi-spec-validator" @@ -1054,7 +1056,6 @@ python-versions = ">=3.7.0,<4.0.0" jsonschema = ">=3.2.0,<5.0.0" openapi-schema-validator = ">=0.2.0,<0.3.0" PyYAML = ">=5.1" -setuptools = "*" [package.extras] requests = ["requests"] @@ -1124,15 +1125,7 @@ optional = false python-versions = "*" [[package]] -name = "pip" -version = "23.0.1" -description = "The PyPA recommended tool for installing Python packages." -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "pkgutil_resolve_name" +name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." category = "main" @@ -1141,15 +1134,15 @@ python-versions = ">=3.6" [[package]] name = "platformdirs" -version = "3.1.1" +version = "3.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)", "sphinx (>=6.1.3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest (>=7.2.2)"] [[package]] name = "ply" @@ -1190,7 +1183,7 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] +test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] [[package]] name = "ptyprocess" @@ -1220,7 +1213,7 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] -name = "Pygments" +name = "pygments" version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" @@ -1231,7 +1224,7 @@ python-versions = ">=3.6" plugins = ["importlib-metadata"] [[package]] -name = "PyJWT" +name = "pyjwt" version = "2.6.0" description = "JSON Web Token implementation in Python" category = "main" @@ -1240,9 +1233,9 @@ python-versions = ">=3.7" [package.extras] crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +dev = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.4.0)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "pre-commit"] docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] +tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"] [[package]] name = "pyrsistent" @@ -1281,7 +1274,7 @@ python-versions = "*" [[package]] name = "pywin32" -version = "305" +version = "306" description = "Python for Window Extensions" category = "dev" optional = false @@ -1296,7 +1289,7 @@ optional = false python-versions = ">=3.7" [[package]] -name = "PyYAML" +name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" category = "main" @@ -1305,7 +1298,7 @@ python-versions = ">=3.6" [[package]] name = "pyzmq" -version = "25.0.0" +version = "25.0.2" description = "Python bindings for 0MQ" category = "dev" optional = false @@ -1316,7 +1309,7 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "qtconsole" -version = "5.4.1" +version = "5.4.2" description = "Jupyter Qt console" category = "dev" optional = false @@ -1338,8 +1331,8 @@ doc = ["Sphinx (>=1.3)"] test = ["flaky", "pytest", "pytest-qt"] [[package]] -name = "QtPy" -version = "2.3.0" +name = "qtpy" +version = "2.3.1" description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." category = "dev" optional = false @@ -1430,7 +1423,7 @@ PyYAML = ">=3.12" valley = ">=1.5.2" [[package]] -name = "Send2Trash" +name = "send2trash" version = "1.8.0" description = "Send file to trash natively under Mac OS X, Windows and Linux." category = "dev" @@ -1438,23 +1431,10 @@ optional = false python-versions = "*" [package.extras] -nativelib = ["pyobjc-framework-Cocoa", "pywin32"] -objc = ["pyobjc-framework-Cocoa"] +nativelib = ["pyobjc-framework-cocoa", "pywin32"] +objc = ["pyobjc-framework-cocoa"] win32 = ["pywin32"] -[[package]] -name = "setuptools" -version = "67.6.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - [[package]] name = "six" version = "1.16.0" @@ -1493,7 +1473,7 @@ executing = ">=1.2.0" pure-eval = "*" [package.extras] -tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] +tests = ["pytest", "typeguard", "pygments", "littleutils", "cython"] [[package]] name = "stripe" @@ -1533,7 +1513,7 @@ tornado = ">=6.1.0" [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] +test = ["pre-commit", "pytest-timeout", "pytest (>=7.0)"] [[package]] name = "tinycss2" @@ -1547,8 +1527,8 @@ python-versions = ">=3.7" webencodings = ">=0.4" [package.extras] -doc = ["sphinx", "sphinx_rtd_theme"] -test = ["flake8", "isort", "pytest"] +doc = ["sphinx", "sphinx-rtd-theme"] +test = ["pytest", "isort", "flake8"] [[package]] name = "tornado" @@ -1570,6 +1550,14 @@ python-versions = ">=3.7" docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] +[[package]] +name = "typing-extensions" +version = "4.5.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "dev" +optional = false +python-versions = ">=3.7" + [[package]] name = "uri-template" version = "1.2.0" @@ -1579,7 +1567,7 @@ optional = false python-versions = ">=3.6" [package.extras] -dev = ["flake8 (<4.0.0)", "flake8-annotations", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-noqa", "flake8-requirements", "flake8-type-annotations", "flake8-use-fstring", "mypy", "pep8-naming"] +dev = ["mypy", "flake8 (<4.0.0)", "flake8-annotations", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-noqa", "flake8-requirements", "flake8-type-annotations", "flake8-use-fstring", "pep8-naming"] [[package]] name = "urllib3" @@ -1590,8 +1578,8 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -1615,12 +1603,16 @@ python-versions = "*" [[package]] name = "webcolors" -version = "1.12" -description = "A library for working with color names and color values formats defined by HTML and CSS." +version = "1.13" +description = "A library for working with the color formats defined by HTML and CSS." category = "dev" optional = false python-versions = ">=3.7" +[package.extras] +docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] +tests = ["pytest", "pytest-cov"] + [[package]] name = "webencodings" version = "0.5.1" @@ -1643,7 +1635,7 @@ optional = ["python-socks", "wsaccel"] test = ["websockets"] [[package]] -name = "Werkzeug" +name = "werkzeug" version = "2.1.2" description = "The comprehensive WSGI web application library." category = "main" @@ -1653,20 +1645,9 @@ python-versions = ">=3.7" [package.extras] watchdog = ["watchdog"] -[[package]] -name = "wheel" -version = "0.38.4" -description = "A built-package format for Python" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -test = ["pytest (>=3.0.0)"] - [[package]] name = "widgetsnbextension" -version = "4.0.5" +version = "4.0.7" description = "Jupyter interactive widgets for Jupyter Notebook" category = "dev" optional = false @@ -1681,985 +1662,141 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "jaraco.functools", "more-itertools", "big-o", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "2da6450ab510552fae213960c22acc6456126a4642acf9fd9bc77062959c14f7" +content-hash = "115514b1f1229bd8bf8ae3bbf89d647aea751f1d261ebe80e7beef93315170eb" [metadata.files] -anyio = [ - {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, - {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, -] -appnope = [ - {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, - {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, -] -argon2-cffi = [ - {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, - {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, -] -argon2-cffi-bindings = [ - {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, - {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, -] -arrow = [ - {file = "arrow-1.2.3-py3-none-any.whl", hash = "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2"}, - {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"}, -] -asttokens = [ - {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, - {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, -] -astunparse = [ - {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, - {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, -] -attrs = [ - {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, - {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, -] -backcall = [ - {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, - {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, -] -beautifulsoup4 = [ - {file = "beautifulsoup4-4.11.2-py3-none-any.whl", hash = "sha256:0e79446b10b3ecb499c1556f7e228a53e64a2bfcebd455f370d8927cb5b59e39"}, - {file = "beautifulsoup4-4.11.2.tar.gz", hash = "sha256:bc4bdda6717de5a2987436fb8d72f45dc90dd856bdfd512a1314ce90349a0106"}, -] -bleach = [ - {file = "bleach-4.1.0-py2.py3-none-any.whl", hash = "sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994"}, - {file = "bleach-4.1.0.tar.gz", hash = "sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da"}, -] -boto3 = [ - {file = "boto3-1.26.89-py3-none-any.whl", hash = "sha256:09929b24aaec4951e435d53d31f800e2ca52244af049dc11e5385ce062e106e9"}, - {file = "boto3-1.26.89.tar.gz", hash = "sha256:e819812f16fab46fadf9b2853a46aaa126e108e7f038502dde555ebbbfc80133"}, -] -botocore = [ - {file = "botocore-1.29.89-py3-none-any.whl", hash = "sha256:b757e59feca82ac62934f658918133116b4535cf66f1d72ff4935fa24e522527"}, - {file = "botocore-1.29.89.tar.gz", hash = "sha256:ac8da651f73a9d5759cf5d80beba68deda407e56aaaeb10d249fd557459f3b56"}, -] -cachetools = [ - {file = "cachetools-4.2.4-py3-none-any.whl", hash = "sha256:92971d3cb7d2a97efff7c7bb1657f21a8f5fb309a37530537c71b1774189f2d1"}, - {file = "cachetools-4.2.4.tar.gz", hash = "sha256:89ea6f1b638d5a73a4f9226be57ac5e4f399d22770b92355f92dcb0f7f001693"}, -] -certifi = [ - {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, - {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, -] -cffi = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, -] -charset-normalizer = [ - {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, - {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, -] -click = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, -] -colorama = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] -comm = [ - {file = "comm-0.1.2-py3-none-any.whl", hash = "sha256:9f3abf3515112fa7c55a42a6a5ab358735c9dccc8b5910a9d8e3ef5998130666"}, - {file = "comm-0.1.2.tar.gz", hash = "sha256:3e2f5826578e683999b93716285b3b1f344f157bf75fa9ce0a797564e742f062"}, -] -coverage = [ - {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, - {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, - {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, - {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, - {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, - {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, - {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, - {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, - {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, - {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, - {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, - {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, - {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, - {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, - {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, - {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, - {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, - {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, - {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, - {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, - {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, - {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, - {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, - {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, - {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, - {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, - {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, - {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, - {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, - {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, - {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, - {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, - {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, - {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, - {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, - {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, - {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, - {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, - {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, - {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, - {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, - {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, - {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, - {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, - {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, - {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, - {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, - {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, -] -cryptography = [ - {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, - {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c4129fc3fdc0fa8e40861b5ac0c673315b3c902bbdc05fc176764815b43dd1d"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:695104a9223a7239d155d7627ad912953b540929ef97ae0c34c7b8bf30857e89"}, - {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"}, - {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"}, - {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, -] -debugpy = [ - {file = "debugpy-1.6.6-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:0ea1011e94416e90fb3598cc3ef5e08b0a4dd6ce6b9b33ccd436c1dffc8cd664"}, - {file = "debugpy-1.6.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dff595686178b0e75580c24d316aa45a8f4d56e2418063865c114eef651a982e"}, - {file = "debugpy-1.6.6-cp310-cp310-win32.whl", hash = "sha256:87755e173fcf2ec45f584bb9d61aa7686bb665d861b81faa366d59808bbd3494"}, - {file = "debugpy-1.6.6-cp310-cp310-win_amd64.whl", hash = "sha256:72687b62a54d9d9e3fb85e7a37ea67f0e803aaa31be700e61d2f3742a5683917"}, - {file = "debugpy-1.6.6-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:78739f77c58048ec006e2b3eb2e0cd5a06d5f48c915e2fc7911a337354508110"}, - {file = "debugpy-1.6.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23c29e40e39ad7d869d408ded414f6d46d82f8a93b5857ac3ac1e915893139ca"}, - {file = "debugpy-1.6.6-cp37-cp37m-win32.whl", hash = "sha256:7aa7e103610e5867d19a7d069e02e72eb2b3045b124d051cfd1538f1d8832d1b"}, - {file = "debugpy-1.6.6-cp37-cp37m-win_amd64.whl", hash = "sha256:f6383c29e796203a0bba74a250615ad262c4279d398e89d895a69d3069498305"}, - {file = "debugpy-1.6.6-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:23363e6d2a04d726bbc1400bd4e9898d54419b36b2cdf7020e3e215e1dcd0f8e"}, - {file = "debugpy-1.6.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b5d1b13d7c7bf5d7cf700e33c0b8ddb7baf030fcf502f76fc061ddd9405d16c"}, - {file = "debugpy-1.6.6-cp38-cp38-win32.whl", hash = "sha256:70ab53918fd907a3ade01909b3ed783287ede362c80c75f41e79596d5ccacd32"}, - {file = "debugpy-1.6.6-cp38-cp38-win_amd64.whl", hash = "sha256:c05349890804d846eca32ce0623ab66c06f8800db881af7a876dc073ac1c2225"}, - {file = "debugpy-1.6.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a771739902b1ae22a120dbbb6bd91b2cae6696c0e318b5007c5348519a4211c6"}, - {file = "debugpy-1.6.6-cp39-cp39-win32.whl", hash = "sha256:549ae0cb2d34fc09d1675f9b01942499751d174381b6082279cf19cdb3c47cbe"}, - {file = "debugpy-1.6.6-cp39-cp39-win_amd64.whl", hash = "sha256:de4a045fbf388e120bb6ec66501458d3134f4729faed26ff95de52a754abddb1"}, - {file = "debugpy-1.6.6-py2.py3-none-any.whl", hash = "sha256:be596b44448aac14eb3614248c91586e2bc1728e020e82ef3197189aae556115"}, - {file = "debugpy-1.6.6.zip", hash = "sha256:b9c2130e1c632540fbf9c2c88341493797ddf58016e7cba02e311de9b0a96b67"}, -] -decorator = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] -defusedxml = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] -envs = [ - {file = "envs-1.4-py3-none-any.whl", hash = "sha256:4a1fcf85e4d4443e77c348ff7cdd3bfc4c0178b181d447057de342e4172e5ed1"}, - {file = "envs-1.4.tar.gz", hash = "sha256:9d8435c6985d1cdd68299e04c58e2bdb8ae6cf66b2596a8079e6f9a93f2a0398"}, -] -executing = [ - {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, - {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, -] -fastjsonschema = [ - {file = "fastjsonschema-2.16.3-py3-none-any.whl", hash = "sha256:04fbecc94300436f628517b05741b7ea009506ce8f946d40996567c669318490"}, - {file = "fastjsonschema-2.16.3.tar.gz", hash = "sha256:4a30d6315a68c253cfa8f963b9697246315aa3db89f98b97235e345dedfb0b8e"}, -] -faunadb = [ - {file = "faunadb-4.5.0-py2.py3-none-any.whl", hash = "sha256:5845911a3c16bc405145e16a247b1bcf67b4113822962cbfc40e1d1c6b5ac745"}, -] -formy = [ - {file = "formy-1.3.1-py3-none-any.whl", hash = "sha256:07c2a7ee351039694fe5b958ad4dfec34baeb0ffbddbf4af231609a75994e6f6"}, - {file = "formy-1.3.1.tar.gz", hash = "sha256:4ce7f79185c88f2fd896984a17e3d5cd23360db5408c7e726f64609371c0035d"}, -] -fqdn = [ - {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, - {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, -] -future = [ - {file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"}, -] -graphql-py = [ - {file = "graphql-py-0.8.1.tar.gz", hash = "sha256:a03557d67817a0f23c5bef83ce8791c8fa3f5f9d165e1408abc6393def1d720c"}, -] -h11 = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] -h2 = [ - {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, - {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, -] -hpack = [ - {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, - {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, -] -httpcore = [ - {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, - {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, -] -httpx = [ - {file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"}, - {file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"}, -] -hyperframe = [ - {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, - {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, -] -idna = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] -importlib-metadata = [ - {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"}, - {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"}, -] -importlib-resources = [ - {file = "importlib_resources-5.12.0-py3-none-any.whl", hash = "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"}, - {file = "importlib_resources-5.12.0.tar.gz", hash = "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6"}, -] -ipykernel = [ - {file = "ipykernel-6.21.3-py3-none-any.whl", hash = "sha256:24ebd9715e317c185e37156ab3a87382410185230dde7aeffce389d6c7d4428a"}, - {file = "ipykernel-6.21.3.tar.gz", hash = "sha256:c8ff581905d70e7299bc1473a2f7c113bec1744fb3746d58e5b4b93bd8ee7001"}, -] -ipython = [ - {file = "ipython-8.11.0-py3-none-any.whl", hash = "sha256:5b54478e459155a326bf5f42ee4f29df76258c0279c36f21d71ddb560f88b156"}, - {file = "ipython-8.11.0.tar.gz", hash = "sha256:735cede4099dbc903ee540307b9171fbfef4aa75cfcacc5a273b2cda2f02be04"}, -] -ipython_genutils = [ - {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, - {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, -] -ipywidgets = [ - {file = "ipywidgets-8.0.4-py3-none-any.whl", hash = "sha256:ebb195e743b16c3947fe8827190fb87b4d00979c0fbf685afe4d2c4927059fa1"}, - {file = "ipywidgets-8.0.4.tar.gz", hash = "sha256:c0005a77a47d77889cafed892b58e33b4a2a96712154404c6548ec22272811ea"}, -] -iso8601 = [ - {file = "iso8601-1.1.0-py3-none-any.whl", hash = "sha256:8400e90141bf792bce2634df533dc57e3bee19ea120a87bebcd3da89a58ad73f"}, - {file = "iso8601-1.1.0.tar.gz", hash = "sha256:32811e7b81deee2063ea6d2e94f8819a86d1f3811e49d23623a41fa832bef03f"}, -] -isoduration = [ - {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, - {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, -] -jedi = [ - {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"}, - {file = "jedi-0.18.2.tar.gz", hash = "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612"}, -] -Jinja2 = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, -] -jmespath = [ - {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, - {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, -] -jsonpointer = [ - {file = "jsonpointer-2.3-py2.py3-none-any.whl", hash = "sha256:51801e558539b4e9cd268638c078c6c5746c9ac96bc38152d443400e4f3793e9"}, - {file = "jsonpointer-2.3.tar.gz", hash = "sha256:97cba51526c829282218feb99dab1b1e6bdf8efd1c43dc9d57be093c0d69c99a"}, -] -jsonschema = [ - {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, - {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, -] -jupyter = [ - {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, - {file = "jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"}, - {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, -] -jupyter-client = [ - {file = "jupyter_client-8.0.3-py3-none-any.whl", hash = "sha256:be48ac6bd659cbbddb7a674cf06b3b8afbf53f228253cf58bde604c03bd487b0"}, - {file = "jupyter_client-8.0.3.tar.gz", hash = "sha256:ed65498bea6d876ef9d8da3e0db3dd33c5d129f5b2645f56ae03993782966bd0"}, -] -jupyter-console = [ - {file = "jupyter_console-6.6.3-py3-none-any.whl", hash = "sha256:309d33409fcc92ffdad25f0bcdf9a4a9daa61b6f341177570fdac03de5352485"}, - {file = "jupyter_console-6.6.3.tar.gz", hash = "sha256:566a4bf31c87adbfadf22cdf846e3069b59a71ed5da71d6ba4d8aaad14a53539"}, -] -jupyter-core = [ - {file = "jupyter_core-5.2.0-py3-none-any.whl", hash = "sha256:4bdc2928c37f6917130c667d8b8708f20aee539d8283c6be72aabd2a4b4c83b0"}, - {file = "jupyter_core-5.2.0.tar.gz", hash = "sha256:1407cdb4c79ee467696c04b76633fc1884015fa109323365a6372c8e890cc83f"}, -] -jupyter-events = [ - {file = "jupyter_events-0.6.3-py3-none-any.whl", hash = "sha256:57a2749f87ba387cd1bfd9b22a0875b889237dbf2edc2121ebb22bde47036c17"}, - {file = "jupyter_events-0.6.3.tar.gz", hash = "sha256:9a6e9995f75d1b7146b436ea24d696ce3a35bfa8bfe45e0c33c334c79464d0b3"}, -] -jupyter-server = [ - {file = "jupyter_server-2.4.0-py3-none-any.whl", hash = "sha256:cc22792281bfb0131a728414f28ae74883b44ad6d009971aa975cae9bcc650de"}, - {file = "jupyter_server-2.4.0.tar.gz", hash = "sha256:f31f0ba2c3c44f07143bfa03fb07dd0253f857eb63f0c26f2fea955f04a49765"}, -] -jupyter-server-terminals = [ - {file = "jupyter_server_terminals-0.4.4-py3-none-any.whl", hash = "sha256:75779164661cec02a8758a5311e18bb8eb70c4e86c6b699403100f1585a12a36"}, - {file = "jupyter_server_terminals-0.4.4.tar.gz", hash = "sha256:57ab779797c25a7ba68e97bcfb5d7740f2b5e8a83b5e8102b10438041a7eac5d"}, -] -jupyterlab-pygments = [ - {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, - {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, -] -jupyterlab-widgets = [ - {file = "jupyterlab_widgets-3.0.5-py3-none-any.whl", hash = "sha256:a04a42e50231b355b7087e16a818f541e53589f7647144ea0344c4bf16f300e5"}, - {file = "jupyterlab_widgets-3.0.5.tar.gz", hash = "sha256:eeaecdeaf6c03afc960ddae201ced88d5979b4ca9c3891bcb8f6631af705f5ef"}, -] -MarkupSafe = [ - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, - {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, -] -matplotlib-inline = [ - {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, - {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, -] -mistune = [ - {file = "mistune-2.0.5-py2.py3-none-any.whl", hash = "sha256:bad7f5d431886fcbaf5f758118ecff70d31f75231b34024a1341120340a65ce8"}, - {file = "mistune-2.0.5.tar.gz", hash = "sha256:0246113cb2492db875c6be56974a7c893333bf26cd92891c85f63151cee09d34"}, -] -nbclassic = [ - {file = "nbclassic-0.5.3-py3-none-any.whl", hash = "sha256:e849277872d9ffd8fe4b39a8038d01ba82d6a1def9ce11b1b3c26c9546ed5131"}, - {file = "nbclassic-0.5.3.tar.gz", hash = "sha256:889772a7ba524eb781d2901f396540bcad41151e1f7e043f12ebc14a6540d342"}, -] -nbclient = [ - {file = "nbclient-0.7.2-py3-none-any.whl", hash = "sha256:d97ac6257de2794f5397609df754fcbca1a603e94e924eb9b99787c031ae2e7c"}, - {file = "nbclient-0.7.2.tar.gz", hash = "sha256:884a3f4a8c4fc24bb9302f263e0af47d97f0d01fe11ba714171b320c8ac09547"}, -] -nbconvert = [ - {file = "nbconvert-7.2.9-py3-none-any.whl", hash = "sha256:495638c5e06005f4a5ce828d8a81d28e34f95c20f4384d5d7a22254b443836e7"}, - {file = "nbconvert-7.2.9.tar.gz", hash = "sha256:a42c3ac137c64f70cbe4d763111bf358641ea53b37a01a5c202ed86374af5234"}, -] -nbformat = [ - {file = "nbformat-5.7.3-py3-none-any.whl", hash = "sha256:22a98a6516ca216002b0a34591af5bcb8072ca6c63910baffc901cfa07fefbf0"}, - {file = "nbformat-5.7.3.tar.gz", hash = "sha256:4b021fca24d3a747bf4e626694033d792d594705829e5e35b14ee3369f9f6477"}, -] -nest-asyncio = [ - {file = "nest_asyncio-1.5.6-py3-none-any.whl", hash = "sha256:b9a953fb40dceaa587d109609098db21900182b16440652454a146cffb06e8b8"}, - {file = "nest_asyncio-1.5.6.tar.gz", hash = "sha256:d267cc1ff794403f7df692964d1d2a3fa9418ffea2a3f6859a439ff482fef290"}, -] -notebook = [ - {file = "notebook-6.5.3-py3-none-any.whl", hash = "sha256:50a334ad9d60b30cb759405168ef6fc3d60350ab5439fb1631544bb09dcb2cce"}, - {file = "notebook-6.5.3.tar.gz", hash = "sha256:b12bee3292211d85dd7e588a790ddce30cb3e8fbcfa1e803522a207f60819e05"}, -] -notebook-shim = [ - {file = "notebook_shim-0.2.2-py3-none-any.whl", hash = "sha256:9c6c30f74c4fbea6fce55c1be58e7fd0409b1c681b075dcedceb005db5026949"}, - {file = "notebook_shim-0.2.2.tar.gz", hash = "sha256:090e0baf9a5582ff59b607af523ca2db68ff216da0c69956b62cab2ef4fc9c3f"}, -] -openapi-schema-validator = [ - {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, - {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, -] -openapi-spec-validator = [ - {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, - {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, -] -packaging = [ - {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, - {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, -] -pandocfilters = [ - {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, - {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, -] -parso = [ - {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, - {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, -] -pdoc = [ - {file = "pdoc-7.4.0-py3-none-any.whl", hash = "sha256:681a2f243e4ca51bedd0645c2d18275b8b83444e9b6e42b502882ec45369e679"}, -] -pexpect = [ - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, - {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, -] -pickleshare = [ - {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, - {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, -] -pip = [ - {file = "pip-23.0.1-py3-none-any.whl", hash = "sha256:236bcb61156d76c4b8a05821b988c7b8c35bf0da28a4b614e8d6ab5212c25c6f"}, - {file = "pip-23.0.1.tar.gz", hash = "sha256:cd015ea1bfb0fcef59d8a286c1f8bebcb983f6317719d415dc5351efb7cd7024"}, -] -pkgutil_resolve_name = [ - {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, - {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, -] -platformdirs = [ - {file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"}, - {file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"}, -] -ply = [ - {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"}, - {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, -] -prometheus-client = [ - {file = "prometheus_client-0.16.0-py3-none-any.whl", hash = "sha256:0836af6eb2c8f4fed712b2f279f6c0a8bbab29f9f4aa15276b91c7cb0d1616ab"}, - {file = "prometheus_client-0.16.0.tar.gz", hash = "sha256:a03e35b359f14dd1630898543e2120addfdeacd1a6069c1367ae90fd93ad3f48"}, -] -prompt-toolkit = [ - {file = "prompt_toolkit-3.0.38-py3-none-any.whl", hash = "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"}, - {file = "prompt_toolkit-3.0.38.tar.gz", hash = "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b"}, -] -psutil = [ - {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, - {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"}, - {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"}, - {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"}, - {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"}, - {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"}, - {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, - {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, -] -ptyprocess = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] -pure-eval = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, -] -pycparser = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] -Pygments = [ - {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, - {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, -] -PyJWT = [ - {file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"}, - {file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"}, -] -pyrsistent = [ - {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, - {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, - {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, - {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, - {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, - {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, -] -python-dateutil = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] -python-json-logger = [ - {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, - {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, -] -pytz = [ - {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, - {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, -] -pywin32 = [ - {file = "pywin32-305-cp310-cp310-win32.whl", hash = "sha256:421f6cd86e84bbb696d54563c48014b12a23ef95a14e0bdba526be756d89f116"}, - {file = "pywin32-305-cp310-cp310-win_amd64.whl", hash = "sha256:73e819c6bed89f44ff1d690498c0a811948f73777e5f97c494c152b850fad478"}, - {file = "pywin32-305-cp310-cp310-win_arm64.whl", hash = "sha256:742eb905ce2187133a29365b428e6c3b9001d79accdc30aa8969afba1d8470f4"}, - {file = "pywin32-305-cp311-cp311-win32.whl", hash = "sha256:19ca459cd2e66c0e2cc9a09d589f71d827f26d47fe4a9d09175f6aa0256b51c2"}, - {file = "pywin32-305-cp311-cp311-win_amd64.whl", hash = "sha256:326f42ab4cfff56e77e3e595aeaf6c216712bbdd91e464d167c6434b28d65990"}, - {file = "pywin32-305-cp311-cp311-win_arm64.whl", hash = "sha256:4ecd404b2c6eceaca52f8b2e3e91b2187850a1ad3f8b746d0796a98b4cea04db"}, - {file = "pywin32-305-cp36-cp36m-win32.whl", hash = "sha256:48d8b1659284f3c17b68587af047d110d8c44837736b8932c034091683e05863"}, - {file = "pywin32-305-cp36-cp36m-win_amd64.whl", hash = "sha256:13362cc5aa93c2beaf489c9c9017c793722aeb56d3e5166dadd5ef82da021fe1"}, - {file = "pywin32-305-cp37-cp37m-win32.whl", hash = "sha256:a55db448124d1c1484df22fa8bbcbc45c64da5e6eae74ab095b9ea62e6d00496"}, - {file = "pywin32-305-cp37-cp37m-win_amd64.whl", hash = "sha256:109f98980bfb27e78f4df8a51a8198e10b0f347257d1e265bb1a32993d0c973d"}, - {file = "pywin32-305-cp38-cp38-win32.whl", hash = "sha256:9dd98384da775afa009bc04863426cb30596fd78c6f8e4e2e5bbf4edf8029504"}, - {file = "pywin32-305-cp38-cp38-win_amd64.whl", hash = "sha256:56d7a9c6e1a6835f521788f53b5af7912090674bb84ef5611663ee1595860fc7"}, - {file = "pywin32-305-cp39-cp39-win32.whl", hash = "sha256:9d968c677ac4d5cbdaa62fd3014ab241718e619d8e36ef8e11fb930515a1e918"}, - {file = "pywin32-305-cp39-cp39-win_amd64.whl", hash = "sha256:50768c6b7c3f0b38b7fb14dd4104da93ebced5f1a50dc0e834594bff6fbe1271"}, -] -pywinpty = [ - {file = "pywinpty-2.0.10-cp310-none-win_amd64.whl", hash = "sha256:4c7d06ad10f6e92bc850a467f26d98f4f30e73d2fe5926536308c6ae0566bc16"}, - {file = "pywinpty-2.0.10-cp311-none-win_amd64.whl", hash = "sha256:7ffbd66310b83e42028fc9df7746118978d94fba8c1ebf15a7c1275fdd80b28a"}, - {file = "pywinpty-2.0.10-cp37-none-win_amd64.whl", hash = "sha256:38cb924f2778b5751ef91a75febd114776b3af0ae411bc667be45dd84fc881d3"}, - {file = "pywinpty-2.0.10-cp38-none-win_amd64.whl", hash = "sha256:902d79444b29ad1833b8d5c3c9aabdfd428f4f068504430df18074007c8c0de8"}, - {file = "pywinpty-2.0.10-cp39-none-win_amd64.whl", hash = "sha256:3c46aef80dd50979aff93de199e4a00a8ee033ba7a03cadf0a91fed45f0c39d7"}, - {file = "pywinpty-2.0.10.tar.gz", hash = "sha256:cdbb5694cf8c7242c2ecfaca35c545d31fa5d5814c3d67a4e628f803f680ebea"}, -] -PyYAML = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, -] -pyzmq = [ - {file = "pyzmq-25.0.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:2d05d904f03ddf1e0d83d97341354dfe52244a619b5a1440a5f47a5b3451e84e"}, - {file = "pyzmq-25.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a154ef810d44f9d28868be04641f837374a64e7449df98d9208e76c260c7ef1"}, - {file = "pyzmq-25.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:487305c2a011fdcf3db1f24e8814bb76d23bc4d2f46e145bc80316a59a9aa07d"}, - {file = "pyzmq-25.0.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e7b87638ee30ab13230e37ce5331b3e730b1e0dda30120b9eeec3540ed292c8"}, - {file = "pyzmq-25.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75243e422e85a62f0ab7953dc315452a56b2c6a7e7d1a3c3109ac3cc57ed6b47"}, - {file = "pyzmq-25.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:31e523d067ce44a04e876bed3ff9ea1ff8d1b6636d16e5fcace9d22f8c564369"}, - {file = "pyzmq-25.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8539216173135e9e89f6b1cc392e74e6b935b91e8c76106cf50e7a02ab02efe5"}, - {file = "pyzmq-25.0.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2754fa68da08a854f4816e05160137fa938a2347276471103d31e04bcee5365c"}, - {file = "pyzmq-25.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4a1bc30f0c18444d51e9b0d0dd39e3a4e7c53ee74190bebef238cd58de577ea9"}, - {file = "pyzmq-25.0.0-cp310-cp310-win32.whl", hash = "sha256:01d53958c787cfea34091fcb8ef36003dbb7913b8e9f8f62a0715234ebc98b70"}, - {file = "pyzmq-25.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:58fc3ad5e1cfd2e6d24741fbb1e216b388115d31b0ca6670f894187f280b6ba6"}, - {file = "pyzmq-25.0.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:e4bba04ea779a3d7ef25a821bb63fd0939142c88e7813e5bd9c6265a20c523a2"}, - {file = "pyzmq-25.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:af1fbfb7ad6ac0009ccee33c90a1d303431c7fb594335eb97760988727a37577"}, - {file = "pyzmq-25.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85456f0d8f3268eecd63dede3b99d5bd8d3b306310c37d4c15141111d22baeaf"}, - {file = "pyzmq-25.0.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0645b5a2d2a06fd8eb738018490c514907f7488bf9359c6ee9d92f62e844b76f"}, - {file = "pyzmq-25.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f72ea279b2941a5203e935a4588b9ba8a48aeb9a926d9dfa1986278bd362cb8"}, - {file = "pyzmq-25.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:4e295f7928a31ae0f657e848c5045ba6d693fe8921205f408ca3804b1b236968"}, - {file = "pyzmq-25.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ac97e7d647d5519bcef48dd8d3d331f72975afa5c4496c95f6e854686f45e2d9"}, - {file = "pyzmq-25.0.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:656281d496aaf9ca4fd4cea84e6d893e3361057c4707bd38618f7e811759103c"}, - {file = "pyzmq-25.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f6116991568aac48b94d6d8aaed6157d407942ea385335a6ed313692777fb9d"}, - {file = "pyzmq-25.0.0-cp311-cp311-win32.whl", hash = "sha256:0282bba9aee6e0346aa27d6c69b5f7df72b5a964c91958fc9e0c62dcae5fdcdc"}, - {file = "pyzmq-25.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:526f884a27e8bba62fe1f4e07c62be2cfe492b6d432a8fdc4210397f8cf15331"}, - {file = "pyzmq-25.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ccb3e1a863222afdbda42b7ca8ac8569959593d7abd44f5a709177d6fa27d266"}, - {file = "pyzmq-25.0.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4046d03100aca266e70d54a35694cb35d6654cfbef633e848b3c4a8d64b9d187"}, - {file = "pyzmq-25.0.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3100dddcada66ec5940ed6391ebf9d003cc3ede3d320748b2737553019f58230"}, - {file = "pyzmq-25.0.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7877264aa851c19404b1bb9dbe6eed21ea0c13698be1eda3784aab3036d1c861"}, - {file = "pyzmq-25.0.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5049e75cc99db65754a3da5f079230fb8889230cf09462ec972d884d1704a3ed"}, - {file = "pyzmq-25.0.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:81f99fb1224d36eb91557afec8cdc2264e856f3464500b55749020ce4c848ef2"}, - {file = "pyzmq-25.0.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a1cd4a95f176cdc0ee0a82d49d5830f13ae6015d89decbf834c273bc33eeb3d3"}, - {file = "pyzmq-25.0.0-cp36-cp36m-win32.whl", hash = "sha256:926236ca003aec70574754f39703528947211a406f5c6c8b3e50eca04a9e87fc"}, - {file = "pyzmq-25.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:94f0a7289d0f5c80807c37ebb404205e7deb737e8763eb176f4770839ee2a287"}, - {file = "pyzmq-25.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f3f96d452e9580cb961ece2e5a788e64abaecb1232a80e61deffb28e105ff84a"}, - {file = "pyzmq-25.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:930e6ad4f2eaac31a3d0c2130619d25db754b267487ebc186c6ad18af2a74018"}, - {file = "pyzmq-25.0.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e1081d7030a1229c8ff90120346fb7599b54f552e98fcea5170544e7c6725aab"}, - {file = "pyzmq-25.0.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:531866c491aee5a1e967c286cfa470dffac1e2a203b1afda52d62b58782651e9"}, - {file = "pyzmq-25.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fc7c1421c5b1c916acf3128bf3cc7ea7f5018b58c69a6866d70c14190e600ce9"}, - {file = "pyzmq-25.0.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9a2d5e419bd39a1edb6cdd326d831f0120ddb9b1ff397e7d73541bf393294973"}, - {file = "pyzmq-25.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:183e18742be3621acf8908903f689ec520aee3f08449bfd29f583010ca33022b"}, - {file = "pyzmq-25.0.0-cp37-cp37m-win32.whl", hash = "sha256:02f5cb60a7da1edd5591a15efa654ffe2303297a41e1b40c3c8942f8f11fc17c"}, - {file = "pyzmq-25.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:cac602e02341eaaf4edfd3e29bd3fdef672e61d4e6dfe5c1d065172aee00acee"}, - {file = "pyzmq-25.0.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:e14df47c1265356715d3d66e90282a645ebc077b70b3806cf47efcb7d1d630cb"}, - {file = "pyzmq-25.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:293a7c2128690f496057f1f1eb6074f8746058d13588389981089ec45d8fdc77"}, - {file = "pyzmq-25.0.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:731b208bc9412deeb553c9519dca47136b5a01ca66667cafd8733211941b17e4"}, - {file = "pyzmq-25.0.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b055a1cddf8035966ad13aa51edae5dc8f1bba0b5d5e06f7a843d8b83dc9b66b"}, - {file = "pyzmq-25.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17e1cb97d573ea84d7cd97188b42ca6f611ab3ee600f6a75041294ede58e3d20"}, - {file = "pyzmq-25.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:60ecbfe7669d3808ffa8a7dd1487d6eb8a4015b07235e3b723d4b2a2d4de7203"}, - {file = "pyzmq-25.0.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4c25c95416133942280faaf068d0fddfd642b927fb28aaf4ab201a738e597c1e"}, - {file = "pyzmq-25.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:be05504af0619d1cffa500af1e0ede69fb683f301003851f5993b5247cc2c576"}, - {file = "pyzmq-25.0.0-cp38-cp38-win32.whl", hash = "sha256:6bf3842af37af43fa953e96074ebbb5315f6a297198f805d019d788a1021dbc8"}, - {file = "pyzmq-25.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:b90bb8dfbbd138558f1f284fecfe328f7653616ff9a972433a00711d9475d1a9"}, - {file = "pyzmq-25.0.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:62b9e80890c0d2408eb42d5d7e1fc62a5ce71be3288684788f74cf3e59ffd6e2"}, - {file = "pyzmq-25.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:484c2c4ee02c1edc07039f42130bd16e804b1fe81c4f428e0042e03967f40c20"}, - {file = "pyzmq-25.0.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9ca6db34b26c4d3e9b0728841ec9aa39484eee272caa97972ec8c8e231b20c7e"}, - {file = "pyzmq-25.0.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:610d2d112acd4e5501fac31010064a6c6efd716ceb968e443cae0059eb7b86de"}, - {file = "pyzmq-25.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3594c0ff604e685d7e907860b61d0e10e46c74a9ffca168f6e9e50ea934ee440"}, - {file = "pyzmq-25.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c21a5f4e54a807df5afdef52b6d24ec1580153a6bcf0607f70a6e1d9fa74c5c3"}, - {file = "pyzmq-25.0.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4725412e27612f0d7d7c2f794d89807ad0227c2fc01dd6146b39ada49c748ef9"}, - {file = "pyzmq-25.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4d3d604fe0a67afd1aff906e54da557a5203368a99dcc50a70eef374f1d2abef"}, - {file = "pyzmq-25.0.0-cp39-cp39-win32.whl", hash = "sha256:3670e8c5644768f214a3b598fe46378a4a6f096d5fb82a67dfd3440028460565"}, - {file = "pyzmq-25.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:e99629a976809fe102ef73e856cf4b2660acd82a412a51e80ba2215e523dfd0a"}, - {file = "pyzmq-25.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:66509c48f7446b640eeae24b60c9c1461799a27b1b0754e438582e36b5af3315"}, - {file = "pyzmq-25.0.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9c464cc508177c09a5a6122b67f978f20e2954a21362bf095a0da4647e3e908"}, - {file = "pyzmq-25.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:28bcb2e66224a7ac2843eb632e4109d6b161479e7a2baf24e37210461485b4f1"}, - {file = "pyzmq-25.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0e7ef9ac807db50b4eb6f534c5dcc22f998f5dae920cc28873d2c1d080a4fc9"}, - {file = "pyzmq-25.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:5050f5c50b58a6e38ccaf9263a356f74ef1040f5ca4030225d1cb1a858c5b7b6"}, - {file = "pyzmq-25.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2a73af6504e0d2805e926abf136ebf536735a13c22f709be7113c2ec65b4bec3"}, - {file = "pyzmq-25.0.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0e8d00228db627ddd1b418c7afd81820b38575f237128c9650365f2dd6ac3443"}, - {file = "pyzmq-25.0.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5605621f2181f20b71f13f698944deb26a0a71af4aaf435b34dd90146092d530"}, - {file = "pyzmq-25.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6136bfb0e5a9cf8c60c6ac763eb21f82940a77e6758ea53516c8c7074f4ff948"}, - {file = "pyzmq-25.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0a90b2480a26aef7c13cff18703ba8d68e181facb40f78873df79e6d42c1facc"}, - {file = "pyzmq-25.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00c94fd4c9dd3c95aace0c629a7fa713627a5c80c1819326b642adf6c4b8e2a2"}, - {file = "pyzmq-25.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20638121b0bdc80777ce0ec8c1f14f1ffec0697a1f88f0b564fa4a23078791c4"}, - {file = "pyzmq-25.0.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6f75b4b8574f3a8a0d6b4b52606fc75b82cb4391471be48ab0b8677c82f9ed4"}, - {file = "pyzmq-25.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cbb885f347eba7ab7681c450dee5b14aed9f153eec224ec0c3f299273d9241f"}, - {file = "pyzmq-25.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c48f257da280b3be6c94e05bd575eddb1373419dbb1a72c3ce64e88f29d1cd6d"}, - {file = "pyzmq-25.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:866eabf7c1315ef2e93e34230db7cbf672e0d7c626b37c11f7e870c8612c3dcc"}, - {file = "pyzmq-25.0.0.tar.gz", hash = "sha256:f330a1a2c7f89fd4b0aa4dcb7bf50243bf1c8da9a2f1efc31daf57a2046b31f2"}, -] -qtconsole = [ - {file = "qtconsole-5.4.1-py3-none-any.whl", hash = "sha256:bae8c7e10170cdcdcaf7e6d53ad7d6a7412249b9b8310a0eaa6b6f3b260f32db"}, - {file = "qtconsole-5.4.1.tar.gz", hash = "sha256:f67a03f40f722e13261791280f73068dbaf9dafcc335cbba644ccc8f892640e5"}, -] -QtPy = [ - {file = "QtPy-2.3.0-py3-none-any.whl", hash = "sha256:8d6d544fc20facd27360ea189592e6135c614785f0dec0b4f083289de6beb408"}, - {file = "QtPy-2.3.0.tar.gz", hash = "sha256:0603c9c83ccc035a4717a12908bf6bc6cb22509827ea2ec0e94c2da7c9ed57c5"}, -] -requests = [ - {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, - {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, -] -rfc3339-validator = [ - {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, - {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, -] -rfc3986 = [ - {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, - {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, -] -rfc3986-validator = [ - {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, - {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, -] -s3transfer = [ - {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, - {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, -] -sammy = [ - {file = "sammy-0.4.3-py2.py3-none-any.whl", hash = "sha256:f7fc12fae537ba22830a9de761686fac439e23c67339807f2f3d026d019bae9e"}, - {file = "sammy-0.4.3.tar.gz", hash = "sha256:dff98017d59c8860349cb07bd8af2eb843ccfc7a78d11ed82599b306ef4ce7ca"}, -] -Send2Trash = [ - {file = "Send2Trash-1.8.0-py3-none-any.whl", hash = "sha256:f20eaadfdb517eaca5ce077640cb261c7d2698385a6a0f072a4a5447fd49fa08"}, - {file = "Send2Trash-1.8.0.tar.gz", hash = "sha256:d2c24762fd3759860a0aff155e45871447ea58d2be6bdd39b5c8f966a0c99c2d"}, -] -setuptools = [ - {file = "setuptools-67.6.0-py3-none-any.whl", hash = "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"}, - {file = "setuptools-67.6.0.tar.gz", hash = "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -sniffio = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, -] -soupsieve = [ - {file = "soupsieve-2.4-py3-none-any.whl", hash = "sha256:49e5368c2cda80ee7e84da9dbe3e110b70a4575f196efb74e51b94549d921955"}, - {file = "soupsieve-2.4.tar.gz", hash = "sha256:e28dba9ca6c7c00173e34e4ba57448f0688bb681b7c5e8bf4971daafc093d69a"}, -] -stack-data = [ - {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, - {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, -] -stripe = [ - {file = "stripe-2.76.0-py2.py3-none-any.whl", hash = "sha256:756bf6c1206f438d1fa23bb90cdf1233c9383478f854f2720a8a3e1eaf1f715b"}, - {file = "stripe-2.76.0.tar.gz", hash = "sha256:fd3fc6935c3b6189967191607b6f38ebe490005a590b4d0d43fbe3aba45deca8"}, -] -swaggyp = [ - {file = "swaggyp-0.3.0-py3-none-any.whl", hash = "sha256:d678daa026f374c09690bfae93670e8824c616fe7186375cca531dd9530d2c41"}, - {file = "swaggyp-0.3.0.tar.gz", hash = "sha256:2af8b74e014ba83287d4b2fcbe355636ca8ec09ca70bffba4290b3f833c8ad46"}, -] -terminado = [ - {file = "terminado-0.17.1-py3-none-any.whl", hash = "sha256:8650d44334eba354dd591129ca3124a6ba42c3d5b70df5051b6921d506fdaeae"}, - {file = "terminado-0.17.1.tar.gz", hash = "sha256:6ccbbcd3a4f8a25a5ec04991f39a0b8db52dfcd487ea0e578d977e6752380333"}, -] -tinycss2 = [ - {file = "tinycss2-1.2.1-py3-none-any.whl", hash = "sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847"}, - {file = "tinycss2-1.2.1.tar.gz", hash = "sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627"}, -] -tornado = [ - {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"}, - {file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"}, - {file = "tornado-6.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba09ef14ca9893954244fd872798b4ccb2367c165946ce2dd7376aebdde8e3ac"}, - {file = "tornado-6.2-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8150f721c101abdef99073bf66d3903e292d851bee51910839831caba341a75"}, - {file = "tornado-6.2-cp37-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a2f5999215a3a06a4fc218026cd84c61b8b2b40ac5296a6db1f1451ef04c1e"}, - {file = "tornado-6.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5f8c52d219d4995388119af7ccaa0bcec289535747620116a58d830e7c25d8a8"}, - {file = "tornado-6.2-cp37-abi3-musllinux_1_1_i686.whl", hash = "sha256:6fdfabffd8dfcb6cf887428849d30cf19a3ea34c2c248461e1f7d718ad30b66b"}, - {file = "tornado-6.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:1d54d13ab8414ed44de07efecb97d4ef7c39f7438cf5e976ccd356bebb1b5fca"}, - {file = "tornado-6.2-cp37-abi3-win32.whl", hash = "sha256:5c87076709343557ef8032934ce5f637dbb552efa7b21d08e89ae7619ed0eb23"}, - {file = "tornado-6.2-cp37-abi3-win_amd64.whl", hash = "sha256:e5f923aa6a47e133d1cf87d60700889d7eae68988704e20c75fb2d65677a8e4b"}, - {file = "tornado-6.2.tar.gz", hash = "sha256:9b630419bde84ec666bfd7ea0a4cb2a8a651c2d5cccdbdd1972a0c859dfc3c13"}, -] -traitlets = [ - {file = "traitlets-5.9.0-py3-none-any.whl", hash = "sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8"}, - {file = "traitlets-5.9.0.tar.gz", hash = "sha256:f6cde21a9c68cf756af02035f72d5a723bf607e862e7be33ece505abf4a3bad9"}, -] -uri-template = [ - {file = "uri_template-1.2.0-py3-none-any.whl", hash = "sha256:f1699c77b73b925cf4937eae31ab282a86dc885c333f2e942513f08f691fc7db"}, - {file = "uri_template-1.2.0.tar.gz", hash = "sha256:934e4d09d108b70eb8a24410af8615294d09d279ce0e7cbcdaef1bd21f932b06"}, -] -urllib3 = [ - {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, - {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, -] -valley = [ - {file = "valley-1.5.8-py3-none-any.whl", hash = "sha256:c30c0bdb30e5be561dd4332281fc53315c4c34f174d268d8cc7496a6f47ee314"}, - {file = "valley-1.5.8.tar.gz", hash = "sha256:88342fa4af854b8e9e426776995c9c2c690b432ea35c0c9529fa0abb62e553e3"}, -] -wcwidth = [ - {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, - {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, -] -webcolors = [ - {file = "webcolors-1.12-py3-none-any.whl", hash = "sha256:d98743d81d498a2d3eaf165196e65481f0d2ea85281463d856b1e51b09f62dce"}, - {file = "webcolors-1.12.tar.gz", hash = "sha256:16d043d3a08fd6a1b1b7e3e9e62640d09790dce80d2bdd4792a175b35fe794a9"}, -] -webencodings = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] -websocket-client = [ - {file = "websocket-client-1.5.1.tar.gz", hash = "sha256:3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40"}, - {file = "websocket_client-1.5.1-py3-none-any.whl", hash = "sha256:cdf5877568b7e83aa7cf2244ab56a3213de587bbe0ce9d8b9600fc77b455d89e"}, -] -Werkzeug = [ - {file = "Werkzeug-2.1.2-py3-none-any.whl", hash = "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255"}, - {file = "Werkzeug-2.1.2.tar.gz", hash = "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6"}, -] -wheel = [ - {file = "wheel-0.38.4-py3-none-any.whl", hash = "sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8"}, - {file = "wheel-0.38.4.tar.gz", hash = "sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac"}, -] -widgetsnbextension = [ - {file = "widgetsnbextension-4.0.5-py3-none-any.whl", hash = "sha256:eaaaf434fb9b08bd197b2a14ffe45ddb5ac3897593d43c69287091e5f3147bf7"}, - {file = "widgetsnbextension-4.0.5.tar.gz", hash = "sha256:003f716d930d385be3fd9de42dd9bf008e30053f73bddde235d14fbeaeff19af"}, -] -zipp = [ - {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, - {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, -] +anyio = [] +appnope = [] +argon2-cffi = [] +argon2-cffi-bindings = [] +arrow = [] +asttokens = [] +astunparse = [] +attrs = [] +backcall = [] +beautifulsoup4 = [] +bleach = [] +boto3 = [] +botocore = [] +cachetools = [] +certifi = [] +cffi = [] +charset-normalizer = [] +click = [] +colorama = [] +comm = [] +coverage = [] +cryptography = [] +debugpy = [] +decorator = [] +defusedxml = [] +envs = [] +executing = [] +fastjsonschema = [] +faunadb = [] +formy = [] +fqdn = [] +future = [] +graphql-py = [] +h11 = [] +h2 = [] +hpack = [] +httpcore = [] +httpx = [] +hyperframe = [] +idna = [] +importlib-metadata = [] +importlib-resources = [] +ipykernel = [] +ipython = [] +ipython-genutils = [] +ipywidgets = [] +iso8601 = [] +isoduration = [] +jedi = [] +jinja2 = [] +jmespath = [] +jsonpointer = [] +jsonschema = [] +jupyter = [] +jupyter-client = [] +jupyter-console = [] +jupyter-core = [] +jupyter-events = [] +jupyter-server = [] +jupyter-server-terminals = [] +jupyterlab-pygments = [] +jupyterlab-widgets = [] +markupsafe = [] +matplotlib-inline = [] +mistune = [] +nbclassic = [] +nbclient = [] +nbconvert = [] +nbformat = [] +nest-asyncio = [] +notebook = [] +notebook-shim = [] +openapi-schema-validator = [] +openapi-spec-validator = [] +packaging = [] +pandocfilters = [] +parso = [] +pdoc = [] +pexpect = [] +pickleshare = [] +pkgutil-resolve-name = [] +platformdirs = [] +ply = [] +prometheus-client = [] +prompt-toolkit = [] +psutil = [] +ptyprocess = [] +pure-eval = [] +pycparser = [] +pygments = [] +pyjwt = [] +pyrsistent = [] +python-dateutil = [] +python-json-logger = [] +pytz = [] +pywin32 = [] +pywinpty = [] +pyyaml = [] +pyzmq = [] +qtconsole = [] +qtpy = [] +requests = [] +rfc3339-validator = [] +rfc3986 = [] +rfc3986-validator = [] +s3transfer = [] +sammy = [] +send2trash = [] +six = [] +sniffio = [] +soupsieve = [] +stack-data = [] +stripe = [] +swaggyp = [] +terminado = [] +tinycss2 = [] +tornado = [] +traitlets = [] +typing-extensions = [] +uri-template = [] +urllib3 = [] +valley = [] +wcwidth = [] +webcolors = [] +webencodings = [] +websocket-client = [] +werkzeug = [] +widgetsnbextension = [] +zipp = [] diff --git a/pyproject.toml b/pyproject.toml index cb614bc..4c5e00d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,6 @@ license = "Apache-2.0" python = "^3.8" faunadb = "^4.0.1" valley = "1.5.8" -envs = "^1.3" requests = "^2.23.0" pytz = "^2021.1" decorator = "^5.0.9" From 1763880a853668da3ed8c1662d4d75ebbc4a093e Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 3 Apr 2023 09:44:32 +0800 Subject: [PATCH 124/214] reverted docker-compose file --- docker-compose.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index d6bad6a..94e7299 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -18,7 +18,7 @@ services: - ./:/code/ env_file: .env working_dir: /code/ - command: jupyter notebook --port=8888 --ip=0.0.0.0 --allow-root + command: /root/.cache/pypoetry/virtualenvs/pfunk-MATOk_fk-py3.9/bin/jupyter notebook --port=8888 --ip=0.0.0.0 --allow-root fauna: restart: always From 9d3f9f8353082174ec2cacb9b39c6ea5b96a8630 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 3 Apr 2023 10:02:02 +0800 Subject: [PATCH 125/214] Updated tests to use local html file instead of tempfile --- pfunk/testcase.py | 2 +- pfunk/tests/test_email.py | 22 +++++++++++----------- pfunk/tests/test_web_json_stripe.py | 15 +++++++-------- 3 files changed, 19 insertions(+), 20 deletions(-) diff --git a/pfunk/testcase.py b/pfunk/testcase.py index dac644e..1b20b80 100644 --- a/pfunk/testcase.py +++ b/pfunk/testcase.py @@ -14,7 +14,7 @@ class PFunkTestCase(unittest.TestCase): def setUp(self) -> None: os.environ['PFUNK_TEST_MODE'] = 'True' - os.environ['TEMPLATE_ROOT_DIR'] = '/tmp' + os.environ['TEMPLATE_ROOT_DIR'] = '/' self.client = FaunaClient(secret='secret') self.db_name = str(uuid.uuid4()) self.client.query( diff --git a/pfunk/tests/test_email.py b/pfunk/tests/test_email.py index eb93ceb..b885441 100644 --- a/pfunk/tests/test_email.py +++ b/pfunk/tests/test_email.py @@ -15,6 +15,7 @@ class TestEmailBackend(APITestCase): collections = [User, Group, UserGroups] def setUp(self) -> None: + # NOTE: env var TEMPLATE_ROOT_DIR should be set to "/" super(TestEmailBackend, self).setUp() self.group = Group.create(name='Power Users', slug='power-users') self.user = User.create(username='test', email='tlasso@example.org', first_name='Ted', @@ -23,8 +24,8 @@ def setUp(self) -> None: self.backend = EmailBackend() def test_get_template(self): - with tempfile.NamedTemporaryFile(suffix='.html') as tmp: - template = self.backend.get_template(tmp.name.split("/")[-1]) + template = self.backend.get_template( + '/code/pfunk/tests/templates/email/email_template.html') # test jinja render if no exceptions template.render(unittest_value="random value") self.assertTrue(True) # if there are no exceptions, then it is a pass @@ -59,15 +60,14 @@ def setUp(self) -> None: @mock.patch('boto3.client') def test_send_email(self, mocked): - with tempfile.NamedTemporaryFile(suffix='.html') as tmp: - res = self.SES.send_email( - subject="test", - to_emails=["testemail@email.com"], - html_template=tmp.name.split("/")[-1], - from_email="testFromEmail@email.com", - cc_emails=["testCCemail@email.com"], - bcc_emails=["testBCCemail@email.com"], - ) + res = self.SES.send_email( + subject="test", + to_emails=["testemail@email.com"], + html_template='code/pfunk/tests/templates/email/email_template.html', + from_email="testFromEmail@email.com", + cc_emails=["testCCemail@email.com"], + bcc_emails=["testBCCemail@email.com"], + ) # if there are no exceptions, then it's a passing test self.assertTrue(True) diff --git a/pfunk/tests/test_web_json_stripe.py b/pfunk/tests/test_web_json_stripe.py index 59b484b..1b7dcc5 100644 --- a/pfunk/tests/test_web_json_stripe.py +++ b/pfunk/tests/test_web_json_stripe.py @@ -231,14 +231,13 @@ def test_check_ip(self): @mock.patch('boto3.client') def test_send_html_email(self, mocked): - with tempfile.NamedTemporaryFile(suffix='.html') as tmp: - # Requires to have `TEMPLATE_ROOT_DIR=/tmp` in your .env file - res = self.view.send_html_email( - subject='Test Subject', - from_email='unittesting@email.com', - to_email_list=['recipient@email.com'], - template_name=tmp.name.split("/")[-1] - ) + # Requires to have `TEMPLATE_ROOT_DIR=/` in your .env file + res = self.view.send_html_email( + subject='Test Subject', + from_email='unittesting@email.com', + to_email_list=['recipient@email.com'], + template_name='/code/pfunk/tests/templates/email/email_template.html' + ) self.assertTrue(True) # if there are no exceptions, then it passed @mock.patch('stripe.Webhook') From 09daeed2a0392064aa9a179f867258c8e40bdfc0 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 5 Apr 2022 15:08:35 +0800 Subject: [PATCH 126/214] Added aws.py as a skeleton for upcoming AWS features --- pfunk/utils/aws.py | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 pfunk/utils/aws.py diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py new file mode 100644 index 0000000..7413120 --- /dev/null +++ b/pfunk/utils/aws.py @@ -0,0 +1,36 @@ +import boto3 +import swaggyp as sw + +class ApiGateway(object): + + def __init__(self): + self.client = boto3.client('apigateway') + pass + + def create_api_from_yaml(self, yaml_file): + # response = client.import_rest_api( + # failOnWarnings=True|False, + # parameters={ + # 'string': 'string' + # }, + # body=b'bytes'|file + # ) + pass + + def update_api_from_yaml(self, yaml_file): + # response = client.put_rest_api( + # restApiId='string', + # mode='merge'|'overwrite', + # failOnWarnings=True|False, + # parameters={ + # 'string': 'string' + # }, + # body=b'bytes'|file + # ) + pass + + def validate_yaml(self, yaml_file): + """ Validate YAML file if it is valid for using """ + pass + + def \ No newline at end of file From 0cf510b112e80c5247a633c3e79bbf706ba37733 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 5 Apr 2022 15:11:41 +0800 Subject: [PATCH 127/214] Added skeleton tests for aws features --- pfunk/tests/test_aws.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 pfunk/tests/test_aws.py diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py new file mode 100644 index 0000000..c3cdc45 --- /dev/null +++ b/pfunk/tests/test_aws.py @@ -0,0 +1,20 @@ +import unittest + +from pfunk.utils.aws import ApiGateway +from pfunk.tests import User, Group +from pfunk.project import Project + + +class ApiGatewayTests(unittest.TestCase): + + def setUp(self) -> None: + self.project = Project() + + def test_validate_yaml(self): + pass + + def test_create_api_from_yaml(self): + pass + + def test_update_api_from_yaml(self): + pass From 8cf501fda204b5ec4dc7634b56db17a21800c27f Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 7 Apr 2022 15:29:22 +0800 Subject: [PATCH 128/214] Added yaml validator and have it return specific errors --- pfunk/tests/test_aws.py | 34 +- pfunk/utils/aws.py | 21 +- pfunk/utils/swagger.py | 1 + poetry.lock | 955 +++++++++++++++++++++++++++++++++++----- pyproject.toml | 4 +- 5 files changed, 881 insertions(+), 134 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index c3cdc45..a6b1314 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -1,20 +1,42 @@ import unittest +from unittest import mock from pfunk.utils.aws import ApiGateway -from pfunk.tests import User, Group +from pfunk.tests import User, Group, Person, Sport from pfunk.project import Project class ApiGatewayTests(unittest.TestCase): - def setUp(self) -> None: - self.project = Project() + @classmethod + def setUpCls(cls) -> None: + cls.project = Project() + cls.aws_client = ApiGateway() + cls.project.add_resources([Person, Sport, Group, User]) + cls.api_yaml = cls.project.generate_swagger() def test_validate_yaml(self): - pass + result = self.aws_client.validate_yaml(self.api_yaml) + self.assertIsNone(result) # if there are no errors, then spec is valid + def test_validate_wrong_yaml(self): + result = self.aws_client.validate_yaml('wrong yaml...33::39') + self.assertIsNotNone(result) # if there are returned objs, there is an error + + @mock.patch('boto3.client') def test_create_api_from_yaml(self): - pass + result = self.aws_client.create_api_from_yaml() + self.assertTrue(result['success']) + @mock.patch('boto3.client') def test_update_api_from_yaml(self): - pass + result = self.aws_client.create_api_from_yaml() + self.assertTrue(result['success']) + + def test_create_api_from_wrong_yaml(self): + result = self.aws_client.create_api_from_yaml('wrong yaml...21320:: asdkas') + self.assertEqual(result, 'Bad Request. YAML is not valid.') + + def test_update_api_from_wrong_yaml(self): + result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') + self.assertEqual(result, 'Bad Request. YAML is not valid.') \ No newline at end of file diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 7413120..4c61506 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,5 +1,9 @@ import boto3 import swaggyp as sw +from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator +from openapi_spec_validator.readers import read_from_filename +from openapi_spec_validator.exceptions import OpenAPIValidationError + class ApiGateway(object): @@ -7,6 +11,17 @@ def __init__(self): self.client = boto3.client('apigateway') pass + def validate_yaml(self, yaml_file): + """ Validate YAML file if it is valid for using OpenAPI Spec v2""" + try: + spec_dict, spec_url = read_from_filename(yaml_file) + validate_v2_spec(spec_dict) + except OpenAPIValidationError as err: + errors = [{err.message: err.json_path} + for err in openapi_v2_spec_validator.iter_errors(spec_dict)] + return errors + return None + def create_api_from_yaml(self, yaml_file): # response = client.import_rest_api( # failOnWarnings=True|False, @@ -28,9 +43,3 @@ def update_api_from_yaml(self, yaml_file): # body=b'bytes'|file # ) pass - - def validate_yaml(self, yaml_file): - """ Validate YAML file if it is valid for using """ - pass - - def \ No newline at end of file diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 0a151f3..3f40ad1 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -110,6 +110,7 @@ def write_to_yaml(self): if not os.path.exists(f'swagger.yaml'): with open(f'swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) + return t.to_yaml() else: print('There is an existing swagger file. Kindly move/delete it to generate a new one. Printing instead...') print(t.to_yaml()) diff --git a/poetry.lock b/poetry.lock index d898edf..66ee101 100644 --- a/poetry.lock +++ b/poetry.lock @@ -83,7 +83,7 @@ six = ">=1.6.1,<2.0" name = "attrs" version = "22.1.0" description = "Classes Without Boilerplate" -category = "dev" +category = "main" optional = false python-versions = ">=3.5" @@ -433,7 +433,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 name = "importlib-resources" version = "5.10.0" description = "Read resources from Python packages" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -578,7 +578,7 @@ python-versions = ">=3.7" name = "jsonschema" version = "4.16.0" description = "An implementation of JSON Schema validation for Python" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -889,6 +889,38 @@ jupyter-server = ">=1.8,<3" [package.extras] test = ["pytest-tornasync", "pytest-console-scripts", "pytest"] +[[package]] +name = "openapi-schema-validator" +version = "0.2.3" +description = "OpenAPI schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.0.0,<5.0.0" + +[package.extras] +rfc3339-validator = ["rfc3339-validator"] +strict-rfc3339 = ["strict-rfc3339"] +isodate = ["isodate"] + +[[package]] +name = "openapi-spec-validator" +version = "0.4.0" +description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3.0 spec validator" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.2.0,<5.0.0" +openapi-schema-validator = ">=0.2.0,<0.3.0" +PyYAML = ">=5.1" + +[package.extras] +requests = ["requests"] + [[package]] name = "packaging" version = "21.3" @@ -1080,7 +1112,7 @@ diagrams = ["railroad-diagrams", "jinja2"] name = "pyrsistent" version = "0.18.1" description = "Persistent/Functional/Immutable data structures" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -1426,7 +1458,7 @@ python-versions = ">=3.7" name = "zipp" version = "3.9.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -1437,119 +1469,804 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "6808ad24f73c3549501769eb7c5dca32503dc360738549cc7fdfc63847f9a38c" +content-hash = "6b06fa36eba0e2e7e8d19a1efecff127af145281ccb3d10c02e1baebb062d147" [metadata.files] -anyio = [] -appnope = [] -argon2-cffi = [] -argon2-cffi-bindings = [] -asttokens = [] -astunparse = [] -attrs = [] -backcall = [] -beautifulsoup4 = [] -bleach = [] -boto3 = [] -botocore = [] -cachetools = [] -certifi = [] -cffi = [] -charset-normalizer = [] -click = [] -colorama = [] -coverage = [] -cryptography = [] -debugpy = [] -decorator = [] -defusedxml = [] -entrypoints = [] -envs = [] -executing = [] -fastjsonschema = [] -faunadb = [] -formy = [] -future = [] -graphql-py = [] -h2 = [] -hpack = [] -hyper = [] -hyperframe = [] -idna = [] -importlib-metadata = [] -importlib-resources = [] -ipykernel = [] -ipython = [] -ipython-genutils = [] -ipywidgets = [] -iso8601 = [] -jedi = [] -jinja2 = [] -jmespath = [] -jsonschema = [] -jupyter = [] -jupyter-client = [] -jupyter-console = [] -jupyter-core = [] -jupyter-server = [] -jupyterlab-pygments = [] -jupyterlab-widgets = [] -markupsafe = [] -matplotlib-inline = [] -mistune = [] -nbclassic = [] -nbclient = [] -nbconvert = [] -nbformat = [] -nest-asyncio = [] -notebook = [] -notebook-shim = [] -packaging = [] -pandocfilters = [] -parso = [] -pdoc = [] -pexpect = [] -pickleshare = [] -pkgutil-resolve-name = [] -ply = [] -prometheus-client = [] -prompt-toolkit = [] -psutil = [] -ptyprocess = [] -pure-eval = [] -py = [] -pycparser = [] -pygments = [] -pyjwt = [] -pyparsing = [] -pyrsistent = [] -python-dateutil = [] -pytz = [] -pywin32 = [] -pywinpty = [] -pyyaml = [] -pyzmq = [] -qtconsole = [] -qtpy = [] -requests = [] -s3transfer = [] -sammy = [] -send2trash = [] -six = [] -sniffio = [] -soupsieve = [] -stack-data = [] -stripe = [] -swaggyp = [] -terminado = [] -tinycss2 = [] -tornado = [] -traitlets = [] -urllib3 = [] -valley = [] -wcwidth = [] -webencodings = [] -websocket-client = [] -werkzeug = [] -widgetsnbextension = [] -zipp = [] +appnope = [ + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, +] +argon2-cffi = [ + {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, + {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, +] +argon2-cffi-bindings = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] +asttokens = [ + {file = "asttokens-2.0.5-py2.py3-none-any.whl", hash = "sha256:0844691e88552595a6f4a4281a9f7f79b8dd45ca4ccea82e5e05b4bbdb76705c"}, + {file = "asttokens-2.0.5.tar.gz", hash = "sha256:9a54c114f02c7a9480d56550932546a3f1fe71d8a02f1bc7ccd0ee3ee35cf4d5"}, +] +astunparse = [ + {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, + {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, +] +attrs = [ + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, +] +backcall = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] +beautifulsoup4 = [ + {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, + {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, +] +bleach = [ + {file = "bleach-4.1.0-py2.py3-none-any.whl", hash = "sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994"}, + {file = "bleach-4.1.0.tar.gz", hash = "sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da"}, +] +boto3 = [ + {file = "boto3-1.23.8-py3-none-any.whl", hash = "sha256:15733c2bbedce7a36fcf1749560c72c3ee90785aa6302a98658c7bffdcbe1f2a"}, + {file = "boto3-1.23.8.tar.gz", hash = "sha256:ea8ebcea4ccb70d1cf57526d9eec6012c76796f28ada3e9cc1d89178683d8107"}, +] +botocore = [ + {file = "botocore-1.26.8-py3-none-any.whl", hash = "sha256:620851daf1245af5bc28137aa821375bac964aa0eddc482437c783fe01e298fc"}, + {file = "botocore-1.26.8.tar.gz", hash = "sha256:e786722cb14de7319331cc55e9092174de66a768559700ef656d05ff41b3e24f"}, +] +cachetools = [ + {file = "cachetools-4.2.4-py3-none-any.whl", hash = "sha256:92971d3cb7d2a97efff7c7bb1657f21a8f5fb309a37530537c71b1774189f2d1"}, + {file = "cachetools-4.2.4.tar.gz", hash = "sha256:89ea6f1b638d5a73a4f9226be57ac5e4f399d22770b92355f92dcb0f7f001693"}, +] +certifi = [ + {file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"}, + {file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"}, +] +cffi = [ + {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, + {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, + {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, + {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, + {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, + {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, + {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, + {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, + {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, + {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, + {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, + {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, + {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, + {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, + {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, + {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, + {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, + {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, +] +click = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] +colorama = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +coverage = [ + {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, + {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, + {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, + {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, + {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, + {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, + {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, + {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, + {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, + {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, + {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, + {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, + {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, + {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, + {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, + {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, + {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, + {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, + {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, + {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, + {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, + {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, + {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, + {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, +] +cryptography = [ + {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, + {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"}, + {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c4129fc3fdc0fa8e40861b5ac0c673315b3c902bbdc05fc176764815b43dd1d"}, + {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:695104a9223a7239d155d7627ad912953b540929ef97ae0c34c7b8bf30857e89"}, + {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"}, + {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"}, + {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, +] +debugpy = [ + {file = "debugpy-1.6.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:eb1946efac0c0c3d411cea0b5ac772fbde744109fd9520fb0c5a51979faf05ad"}, + {file = "debugpy-1.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e3513399177dd37af4c1332df52da5da1d0c387e5927dc4c0709e26ee7302e8f"}, + {file = "debugpy-1.6.0-cp310-cp310-win32.whl", hash = "sha256:5c492235d6b68f879df3bdbdb01f25c15be15682665517c2c7d0420e5658d71f"}, + {file = "debugpy-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:40de9ba137d355538432209d05e0f5fe5d0498dce761c39119ad4b950b51db31"}, + {file = "debugpy-1.6.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:0d383b91efee57dbb923ba20801130cf60450a0eda60bce25bccd937de8e323a"}, + {file = "debugpy-1.6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1ff853e60e77e1c16f85a31adb8360bb2d98ca588d7ed645b7f0985b240bdb5e"}, + {file = "debugpy-1.6.0-cp37-cp37m-win32.whl", hash = "sha256:8e972c717d95f56b6a3a7a29a5ede1ee8f2c3802f6f0e678203b0778eb322bf1"}, + {file = "debugpy-1.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a8aaeb53e87225141fda7b9081bd87155c1debc13e2f5a532d341112d1983b65"}, + {file = "debugpy-1.6.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:132defb585b518955358321d0f42f6aa815aa15b432be27db654807707c70b2f"}, + {file = "debugpy-1.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ee75844242b4537beb5899f3e60a578454d1f136b99e8d57ac424573797b94a"}, + {file = "debugpy-1.6.0-cp38-cp38-win32.whl", hash = "sha256:a65a2499761d47df3e9ea9567109be6e73d412e00ac3ffcf74839f3ddfcdf028"}, + {file = "debugpy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:bd980d533d0ddfc451e03a3bb32acb2900049fec39afc3425b944ebf0889be62"}, + {file = "debugpy-1.6.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:245c7789a012f86210847ec7ee9f38c30a30d4c2223c3e111829a76c9006a5d0"}, + {file = "debugpy-1.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e3aa2368883e83e7b689ddff3cafb595f7b711f6a065886b46a96a7fef874e7"}, + {file = "debugpy-1.6.0-cp39-cp39-win32.whl", hash = "sha256:72bcfa97f3afa0064afc77ab811f48ad4a06ac330f290b675082c24437730366"}, + {file = "debugpy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:30abefefd2ff5a5481162d613cb70e60e2fa80a5eb4c994717c0f008ed25d2e1"}, + {file = "debugpy-1.6.0-py2.py3-none-any.whl", hash = "sha256:4de7777842da7e08652f2776c552070bbdd758557fdec73a15d7be0e4aab95ce"}, + {file = "debugpy-1.6.0.zip", hash = "sha256:7b79c40852991f7b6c3ea65845ed0f5f6b731c37f4f9ad9c61e2ab4bd48a9275"}, +] +decorator = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] +defusedxml = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] +entrypoints = [ + {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, + {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, +] +envs = [ + {file = "envs-1.4-py3-none-any.whl", hash = "sha256:4a1fcf85e4d4443e77c348ff7cdd3bfc4c0178b181d447057de342e4172e5ed1"}, + {file = "envs-1.4.tar.gz", hash = "sha256:9d8435c6985d1cdd68299e04c58e2bdb8ae6cf66b2596a8079e6f9a93f2a0398"}, +] +executing = [ + {file = "executing-0.8.3-py2.py3-none-any.whl", hash = "sha256:d1eef132db1b83649a3905ca6dd8897f71ac6f8cac79a7e58a1a09cf137546c9"}, + {file = "executing-0.8.3.tar.gz", hash = "sha256:c6554e21c6b060590a6d3be4b82fb78f8f0194d809de5ea7df1c093763311501"}, +] +fastjsonschema = [ + {file = "fastjsonschema-2.15.3-py3-none-any.whl", hash = "sha256:ddb0b1d8243e6e3abb822bd14e447a89f4ab7439342912d590444831fa00b6a0"}, + {file = "fastjsonschema-2.15.3.tar.gz", hash = "sha256:0a572f0836962d844c1fc435e200b2e4f4677e4e6611a2e3bdd01ba697c275ec"}, +] +faunadb = [ + {file = "faunadb-4.2.0-py2.py3-none-any.whl", hash = "sha256:73d5f560bddb7fc45f9201d526f97fcd0a7b0ef74ce3d4d46b8e116f4ce6e219"}, +] +future = [ + {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, +] +graphql-py = [ + {file = "graphql-py-0.8.1.tar.gz", hash = "sha256:a03557d67817a0f23c5bef83ce8791c8fa3f5f9d165e1408abc6393def1d720c"}, +] +h2 = [ + {file = "h2-2.6.2-py2.py3-none-any.whl", hash = "sha256:93cbd1013a2218539af05cdf9fc37b786655b93bbc94f5296b7dabd1c5cadf41"}, + {file = "h2-2.6.2.tar.gz", hash = "sha256:af35878673c83a44afbc12b13ac91a489da2819b5dc1e11768f3c2406f740fe9"}, +] +hpack = [ + {file = "hpack-3.0.0-py2.py3-none-any.whl", hash = "sha256:0edd79eda27a53ba5be2dfabf3b15780928a0dff6eb0c60a3d6767720e970c89"}, + {file = "hpack-3.0.0.tar.gz", hash = "sha256:8eec9c1f4bfae3408a3f30500261f7e6a65912dc138526ea054f9ad98892e9d2"}, +] +hyper = [ + {file = "hyper-0.7.0-py2.py3-none-any.whl", hash = "sha256:069514f54231fb7b5df2fb910a114663a83306d5296f588fffcb0a9be19407fc"}, + {file = "hyper-0.7.0.tar.gz", hash = "sha256:12c82eacd122a659673484c1ea0d34576430afbe5aa6b8f63fe37fcb06a2458c"}, +] +hyperframe = [ + {file = "hyperframe-3.2.0-py2.py3-none-any.whl", hash = "sha256:4dcab11967482d400853b396d042038e4c492a15a5d2f57259e2b5f89a32f755"}, + {file = "hyperframe-3.2.0.tar.gz", hash = "sha256:05f0e063e117c16fcdd13c12c93a4424a2c40668abfac3bb419a10f57698204e"}, +] +idna = [ + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, +] +importlib-resources = [ + {file = "importlib_resources-5.7.1-py3-none-any.whl", hash = "sha256:e447dc01619b1e951286f3929be820029d48c75eb25d265c28b92a16548212b8"}, + {file = "importlib_resources-5.7.1.tar.gz", hash = "sha256:b6062987dfc51f0fcb809187cffbd60f35df7acb4589091f154214af6d0d49d3"}, +] +ipykernel = [ + {file = "ipykernel-6.13.0-py3-none-any.whl", hash = "sha256:2b0987af43c0d4b62cecb13c592755f599f96f29aafe36c01731aaa96df30d39"}, + {file = "ipykernel-6.13.0.tar.gz", hash = "sha256:0e28273e290858393e86e152b104e5506a79c13d25b951ac6eca220051b4be60"}, +] +ipython = [ + {file = "ipython-8.3.0-py3-none-any.whl", hash = "sha256:341456643a764c28f670409bbd5d2518f9b82c013441084ff2c2fc999698f83b"}, + {file = "ipython-8.3.0.tar.gz", hash = "sha256:807ae3cf43b84693c9272f70368440a9a7eaa2e7e6882dad943c32fbf7e51402"}, +] +ipython-genutils = [ + {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, + {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, +] +ipywidgets = [ + {file = "ipywidgets-7.7.0-py2.py3-none-any.whl", hash = "sha256:e58ff58bc94d481e91ecb6e13a5cb96a87b6b8ade135e055603d0ca24593df38"}, + {file = "ipywidgets-7.7.0.tar.gz", hash = "sha256:ab4a5596855a88b83761921c768707d65e5847068139bc1729ddfe834703542a"}, +] +iso8601 = [ + {file = "iso8601-1.0.2-py3-none-any.whl", hash = "sha256:d7bc01b1c2a43b259570bb307f057abc578786ea734ba2b87b836c5efc5bd443"}, + {file = "iso8601-1.0.2.tar.gz", hash = "sha256:27f503220e6845d9db954fb212b95b0362d8b7e6c1b2326a87061c3de93594b1"}, +] +jedi = [ + {file = "jedi-0.18.1-py2.py3-none-any.whl", hash = "sha256:637c9635fcf47945ceb91cd7f320234a7be540ded6f3e99a50cb6febdfd1ba8d"}, + {file = "jedi-0.18.1.tar.gz", hash = "sha256:74137626a64a99c8eb6ae5832d99b3bdd7d29a3850fe2aa80a4126b2a7d949ab"}, +] +jinja2 = [ + {file = "Jinja2-3.0.1-py3-none-any.whl", hash = "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4"}, + {file = "Jinja2-3.0.1.tar.gz", hash = "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4"}, +] +jmespath = [ + {file = "jmespath-1.0.0-py3-none-any.whl", hash = "sha256:e8dcd576ed616f14ec02eed0005c85973b5890083313860136657e24784e4c04"}, + {file = "jmespath-1.0.0.tar.gz", hash = "sha256:a490e280edd1f57d6de88636992d05b71e97d69a26a19f058ecf7d304474bf5e"}, +] +jsonschema = [ + {file = "jsonschema-4.5.1-py3-none-any.whl", hash = "sha256:71b5e39324422543546572954ce71c67728922c104902cb7ce252e522235b33f"}, + {file = "jsonschema-4.5.1.tar.gz", hash = "sha256:7c6d882619340c3347a1bf7315e147e6d3dae439033ae6383d6acb908c101dfc"}, +] +jupyter = [ + {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, + {file = "jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"}, + {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, +] +jupyter-client = [ + {file = "jupyter_client-7.3.1-py3-none-any.whl", hash = "sha256:404abe552540aff3527e66e16beb114b6b4ff58479d51a301f4eb9701e4f52ef"}, + {file = "jupyter_client-7.3.1.tar.gz", hash = "sha256:05d4ff6a0ade25138c6bb0fbeac7ddc26b5fe835e7dd816b64b4a45b931bdc0b"}, +] +jupyter-console = [ + {file = "jupyter_console-6.4.3-py3-none-any.whl", hash = "sha256:e630bcb682c0088dda45688ad7c2424d4a825c8acf494cb036ced03ed0424841"}, + {file = "jupyter_console-6.4.3.tar.gz", hash = "sha256:55f32626b0be647a85e3217ddcdb22db69efc79e8b403b9771eb9ecc696019b5"}, +] +jupyter-core = [ + {file = "jupyter_core-4.10.0-py3-none-any.whl", hash = "sha256:e7f5212177af7ab34179690140f188aa9bf3d322d8155ed972cbded19f55b6f3"}, + {file = "jupyter_core-4.10.0.tar.gz", hash = "sha256:a6de44b16b7b31d7271130c71a6792c4040f077011961138afed5e5e73181aec"}, +] +jupyterlab-pygments = [ + {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, + {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, +] +jupyterlab-widgets = [ + {file = "jupyterlab_widgets-1.1.0-py3-none-any.whl", hash = "sha256:c2a9bd3789f120f64d73268c066ed3b000c56bc1dda217be5cdc43e7b4ebad3f"}, + {file = "jupyterlab_widgets-1.1.0.tar.gz", hash = "sha256:d5f41bc1713795385f718d44dcba47e1e1473c6289f28a95aa6b2c0782ee372a"}, +] +markupsafe = [ + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +] +matplotlib-inline = [ + {file = "matplotlib-inline-0.1.3.tar.gz", hash = "sha256:a04bfba22e0d1395479f866853ec1ee28eea1485c1d69a6faf00dc3e24ff34ee"}, + {file = "matplotlib_inline-0.1.3-py3-none-any.whl", hash = "sha256:aed605ba3b72462d64d475a21a9296f400a19c4f74a31b59103d2a99ffd5aa5c"}, +] +mistune = [ + {file = "mistune-0.8.4-py2.py3-none-any.whl", hash = "sha256:88a1051873018da288eee8538d476dffe1262495144b33ecb586c4ab266bb8d4"}, + {file = "mistune-0.8.4.tar.gz", hash = "sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e"}, +] +nbclient = [ + {file = "nbclient-0.6.3-py3-none-any.whl", hash = "sha256:2747ac9b385720d8a6c34f2f71e72cbe64aec6cadaadcc064a4df0b0e99c5874"}, + {file = "nbclient-0.6.3.tar.gz", hash = "sha256:b80726fc1fb89a0e8f8be1e77e28d0026b1e8ed90bc143c8a0c7622e4f8cdd9e"}, +] +nbconvert = [ + {file = "nbconvert-6.5.0-py3-none-any.whl", hash = "sha256:c56dd0b8978a1811a5654f74c727ff16ca87dd5a43abd435a1c49b840fcd8360"}, + {file = "nbconvert-6.5.0.tar.gz", hash = "sha256:223e46e27abe8596b8aed54301fadbba433b7ffea8196a68fd7b1ff509eee99d"}, +] +nbformat = [ + {file = "nbformat-5.4.0-py3-none-any.whl", hash = "sha256:0d6072aaec95dddc39735c144ee8bbc6589c383fb462e4058abc855348152dad"}, + {file = "nbformat-5.4.0.tar.gz", hash = "sha256:44ba5ca6acb80c5d5a500f1e5b83ede8cbe364d5a495c4c8cf60aaf1ba656501"}, +] +nest-asyncio = [ + {file = "nest_asyncio-1.5.5-py3-none-any.whl", hash = "sha256:b98e3ec1b246135e4642eceffa5a6c23a3ab12c82ff816a92c612d68205813b2"}, + {file = "nest_asyncio-1.5.5.tar.gz", hash = "sha256:e442291cd942698be619823a17a86a5759eabe1f8613084790de189fe9e16d65"}, +] +notebook = [ + {file = "notebook-6.4.11-py3-none-any.whl", hash = "sha256:b4a6baf2eba21ce67a0ca11a793d1781b06b8078f34d06c710742e55f3eee505"}, + {file = "notebook-6.4.11.tar.gz", hash = "sha256:709b1856a564fe53054796c80e17a67262071c86bfbdfa6b96aaa346113c555a"}, +] +openapi-schema-validator = [ + {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, + {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, +] +openapi-spec-validator = [ + {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, + {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, +] +packaging = [ + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, +] +pandocfilters = [ + {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, + {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, +] +parso = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] +pdoc = [ + {file = "pdoc-7.4.0-py3-none-any.whl", hash = "sha256:681a2f243e4ca51bedd0645c2d18275b8b83444e9b6e42b502882ec45369e679"}, +] +pexpect = [ + {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, + {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, +] +pickleshare = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] +ply = [ + {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"}, + {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, +] +prometheus-client = [ + {file = "prometheus_client-0.14.1-py3-none-any.whl", hash = "sha256:522fded625282822a89e2773452f42df14b5a8e84a86433e3f8a189c1d54dc01"}, + {file = "prometheus_client-0.14.1.tar.gz", hash = "sha256:5459c427624961076277fdc6dc50540e2bacb98eebde99886e59ec55ed92093a"}, +] +prompt-toolkit = [ + {file = "prompt_toolkit-3.0.29-py3-none-any.whl", hash = "sha256:62291dad495e665fca0bda814e342c69952086afb0f4094d0893d357e5c78752"}, + {file = "prompt_toolkit-3.0.29.tar.gz", hash = "sha256:bd640f60e8cecd74f0dc249713d433ace2ddc62b65ee07f96d358e0b152b6ea7"}, +] +psutil = [ + {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:799759d809c31aab5fe4579e50addf84565e71c1dc9f1c31258f159ff70d3f87"}, + {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9272167b5f5fbfe16945be3db475b3ce8d792386907e673a209da686176552af"}, + {file = "psutil-5.9.1-cp27-cp27m-win32.whl", hash = "sha256:0904727e0b0a038830b019551cf3204dd48ef5c6868adc776e06e93d615fc5fc"}, + {file = "psutil-5.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e7e10454cb1ab62cc6ce776e1c135a64045a11ec4c6d254d3f7689c16eb3efd2"}, + {file = "psutil-5.9.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:56960b9e8edcca1456f8c86a196f0c3d8e3e361320071c93378d41445ffd28b0"}, + {file = "psutil-5.9.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:44d1826150d49ffd62035785a9e2c56afcea66e55b43b8b630d7706276e87f22"}, + {file = "psutil-5.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7be9d7f5b0d206f0bbc3794b8e16fb7dbc53ec9e40bbe8787c6f2d38efcf6c9"}, + {file = "psutil-5.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd9246e4cdd5b554a2ddd97c157e292ac11ef3e7af25ac56b08b455c829dca8"}, + {file = "psutil-5.9.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29a442e25fab1f4d05e2655bb1b8ab6887981838d22effa2396d584b740194de"}, + {file = "psutil-5.9.1-cp310-cp310-win32.whl", hash = "sha256:20b27771b077dcaa0de1de3ad52d22538fe101f9946d6dc7869e6f694f079329"}, + {file = "psutil-5.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:58678bbadae12e0db55186dc58f2888839228ac9f41cc7848853539b70490021"}, + {file = "psutil-5.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3a76ad658641172d9c6e593de6fe248ddde825b5866464c3b2ee26c35da9d237"}, + {file = "psutil-5.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6a11e48cb93a5fa606306493f439b4aa7c56cb03fc9ace7f6bfa21aaf07c453"}, + {file = "psutil-5.9.1-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:068935df39055bf27a29824b95c801c7a5130f118b806eee663cad28dca97685"}, + {file = "psutil-5.9.1-cp36-cp36m-win32.whl", hash = "sha256:0f15a19a05f39a09327345bc279c1ba4a8cfb0172cc0d3c7f7d16c813b2e7d36"}, + {file = "psutil-5.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:db417f0865f90bdc07fa30e1aadc69b6f4cad7f86324b02aa842034efe8d8c4d"}, + {file = "psutil-5.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:91c7ff2a40c373d0cc9121d54bc5f31c4fa09c346528e6a08d1845bce5771ffc"}, + {file = "psutil-5.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fea896b54f3a4ae6f790ac1d017101252c93f6fe075d0e7571543510f11d2676"}, + {file = "psutil-5.9.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3054e923204b8e9c23a55b23b6df73a8089ae1d075cb0bf711d3e9da1724ded4"}, + {file = "psutil-5.9.1-cp37-cp37m-win32.whl", hash = "sha256:d2d006286fbcb60f0b391741f520862e9b69f4019b4d738a2a45728c7e952f1b"}, + {file = "psutil-5.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b14ee12da9338f5e5b3a3ef7ca58b3cba30f5b66f7662159762932e6d0b8f680"}, + {file = "psutil-5.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:19f36c16012ba9cfc742604df189f2f28d2720e23ff7d1e81602dbe066be9fd1"}, + {file = "psutil-5.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:944c4b4b82dc4a1b805329c980f270f170fdc9945464223f2ec8e57563139cf4"}, + {file = "psutil-5.9.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b6750a73a9c4a4e689490ccb862d53c7b976a2a35c4e1846d049dcc3f17d83b"}, + {file = "psutil-5.9.1-cp38-cp38-win32.whl", hash = "sha256:a8746bfe4e8f659528c5c7e9af5090c5a7d252f32b2e859c584ef7d8efb1e689"}, + {file = "psutil-5.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:79c9108d9aa7fa6fba6e668b61b82facc067a6b81517cab34d07a84aa89f3df0"}, + {file = "psutil-5.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:28976df6c64ddd6320d281128817f32c29b539a52bdae5e192537bc338a9ec81"}, + {file = "psutil-5.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b88f75005586131276634027f4219d06e0561292be8bd6bc7f2f00bdabd63c4e"}, + {file = "psutil-5.9.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:645bd4f7bb5b8633803e0b6746ff1628724668681a434482546887d22c7a9537"}, + {file = "psutil-5.9.1-cp39-cp39-win32.whl", hash = "sha256:32c52611756096ae91f5d1499fe6c53b86f4a9ada147ee42db4991ba1520e574"}, + {file = "psutil-5.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:f65f9a46d984b8cd9b3750c2bdb419b2996895b005aefa6cbaba9a143b1ce2c5"}, + {file = "psutil-5.9.1.tar.gz", hash = "sha256:57f1819b5d9e95cdfb0c881a8a5b7d542ed0b7c522d575706a80bedc848c8954"}, +] +ptyprocess = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] +pure-eval = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] +py = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] +pycparser = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] +pygments = [ + {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, + {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, +] +pyjwt = [ + {file = "PyJWT-2.4.0-py3-none-any.whl", hash = "sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf"}, + {file = "PyJWT-2.4.0.tar.gz", hash = "sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba"}, +] +pyparsing = [ + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, +] +pyrsistent = [ + {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, + {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, + {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, + {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] +pytz = [ + {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, + {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, +] +pywin32 = [ + {file = "pywin32-304-cp310-cp310-win32.whl", hash = "sha256:3c7bacf5e24298c86314f03fa20e16558a4e4138fc34615d7de4070c23e65af3"}, + {file = "pywin32-304-cp310-cp310-win_amd64.whl", hash = "sha256:4f32145913a2447736dad62495199a8e280a77a0ca662daa2332acf849f0be48"}, + {file = "pywin32-304-cp310-cp310-win_arm64.whl", hash = "sha256:d3ee45adff48e0551d1aa60d2ec066fec006083b791f5c3527c40cd8aefac71f"}, + {file = "pywin32-304-cp311-cp311-win32.whl", hash = "sha256:30c53d6ce44c12a316a06c153ea74152d3b1342610f1b99d40ba2795e5af0269"}, + {file = "pywin32-304-cp311-cp311-win_amd64.whl", hash = "sha256:7ffa0c0fa4ae4077e8b8aa73800540ef8c24530057768c3ac57c609f99a14fd4"}, + {file = "pywin32-304-cp311-cp311-win_arm64.whl", hash = "sha256:cbbe34dad39bdbaa2889a424d28752f1b4971939b14b1bb48cbf0182a3bcfc43"}, + {file = "pywin32-304-cp36-cp36m-win32.whl", hash = "sha256:be253e7b14bc601718f014d2832e4c18a5b023cbe72db826da63df76b77507a1"}, + {file = "pywin32-304-cp36-cp36m-win_amd64.whl", hash = "sha256:de9827c23321dcf43d2f288f09f3b6d772fee11e809015bdae9e69fe13213988"}, + {file = "pywin32-304-cp37-cp37m-win32.whl", hash = "sha256:f64c0377cf01b61bd5e76c25e1480ca8ab3b73f0c4add50538d332afdf8f69c5"}, + {file = "pywin32-304-cp37-cp37m-win_amd64.whl", hash = "sha256:bb2ea2aa81e96eee6a6b79d87e1d1648d3f8b87f9a64499e0b92b30d141e76df"}, + {file = "pywin32-304-cp38-cp38-win32.whl", hash = "sha256:94037b5259701988954931333aafd39cf897e990852115656b014ce72e052e96"}, + {file = "pywin32-304-cp38-cp38-win_amd64.whl", hash = "sha256:ead865a2e179b30fb717831f73cf4373401fc62fbc3455a0889a7ddac848f83e"}, + {file = "pywin32-304-cp39-cp39-win32.whl", hash = "sha256:25746d841201fd9f96b648a248f731c1dec851c9a08b8e33da8b56148e4c65cc"}, + {file = "pywin32-304-cp39-cp39-win_amd64.whl", hash = "sha256:d24a3382f013b21aa24a5cfbfad5a2cd9926610c0affde3e8ab5b3d7dbcf4ac9"}, +] +pywinpty = [ + {file = "pywinpty-2.0.5-cp310-none-win_amd64.whl", hash = "sha256:f86c76e2881c37e69678cbbf178109f8da1fa8584db24d58e1b9369b0276cfcb"}, + {file = "pywinpty-2.0.5-cp37-none-win_amd64.whl", hash = "sha256:ff9b52f182650cfdf3db1b264a6fe0963eb9d996a7a1fa843ac406c1e32111f8"}, + {file = "pywinpty-2.0.5-cp38-none-win_amd64.whl", hash = "sha256:651ee1467bd7eb6f64d44dbc954b7ab7d15ab6d8adacc4e13299692c67c5d5d2"}, + {file = "pywinpty-2.0.5-cp39-none-win_amd64.whl", hash = "sha256:e59a508ae78374febada3e53b5bbc90b5ad07ae68cbfd72a2e965f9793ae04f3"}, + {file = "pywinpty-2.0.5.tar.gz", hash = "sha256:e125d3f1804d8804952b13e33604ad2ca8b9b2cac92b27b521c005d1604794f8"}, +] +pyyaml = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] +pyzmq = [ + {file = "pyzmq-23.0.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:176be6c348dbec04e8e0d41e810743b7084b73e50954a6fedeeafc65d7fa9290"}, + {file = "pyzmq-23.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ef2d1476cea927ba33a29f59aa128ce3b174e81083cbd091dd3149af741c85d"}, + {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2394bb857607494c3750b5040f852a1ad7831d7a7907b6106f0af2c70860cef"}, + {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fe8807d67456e7cf0e9a33b85e0d05bb9d2977dbdb23977e4cc2b843633618fd"}, + {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be3425dfdb9c46dc62d490fc1a6142a5f3dc6605ebb9048ae675056ef621413c"}, + {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cda55ff0a7566405fb29ca38db1829fecb4c041b8dc3f91754f337bb7b27cbd8"}, + {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2e4d70d34112997a32c8193fae2579aec854745f8730031e5d84cc579fd98ff"}, + {file = "pyzmq-23.0.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f3daabbe42ca31712e29d906dfa4bf1890341d2fd5178de118bc9977a8d2b23b"}, + {file = "pyzmq-23.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e7ae3e520bd182a0cbfff3cc69dda3a2c26f69847e81bd3f090ed04471fc1282"}, + {file = "pyzmq-23.0.0-cp310-cp310-win32.whl", hash = "sha256:1d480d48253f61ff90115b8069ed32f51a0907eb19101c4a5ae0b9a5973e40ad"}, + {file = "pyzmq-23.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:7eca5902ff41575d9a26f91fc750018b7eb129600ea600fe69ce852fbdfab4e2"}, + {file = "pyzmq-23.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b2a4af5e6fa85ee1743c725b46579f8de0b97024eb5ae1a0b5c5711adc436665"}, + {file = "pyzmq-23.0.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:591b455546d34bb96aa453dd9666bddb8c81314e23dbf2606f9614acf7e73d9f"}, + {file = "pyzmq-23.0.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdd008629293a0d4f00b516841ac0df89f17a64bc2d83bcfa48212d3f3b3ca1a"}, + {file = "pyzmq-23.0.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:df0b05fa4321b090abe5601dea9b1c8933c06f496588ccb397a0b1f9dfe32ebe"}, + {file = "pyzmq-23.0.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:12a53f5c13edf12547ce495afebdd5ab11c1b67ea078a941b21e13161783741a"}, + {file = "pyzmq-23.0.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:cb45b7ea577283b547b907a3389d62ca2eaddaf725fbb79cd360802440fa9c91"}, + {file = "pyzmq-23.0.0-cp36-cp36m-win32.whl", hash = "sha256:0a787f7870cba38d655c68ea7ae14bb6c3e9e19bb618d0c2412513321eeaeb80"}, + {file = "pyzmq-23.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:536491ad640448f14d8aa2dc497c354a348f216eb23513bf5aa0ac40e2b02577"}, + {file = "pyzmq-23.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5eaf7e0841d3d8d1d92838c8b56f98cb9bf35b14bcbe4efa281e4812ef4be728"}, + {file = "pyzmq-23.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21792f4d0fcc5040978ee211c033e915d8b6608ea8a5b33fe197a04f0d43e991"}, + {file = "pyzmq-23.0.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a37f0ec88e220326803084208d80229218b309d728954ab747ab21cca33424aa"}, + {file = "pyzmq-23.0.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9622d9560a6fd8d589816cdcec6946642cb4e070b3f68be1d3779b52cf240f73"}, + {file = "pyzmq-23.0.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:434044eec7f9df08fc5ca5c9bdd1a4bb08663679d43ebe7b9849713956f4d85f"}, + {file = "pyzmq-23.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12eac2294d48ee27d1eaef7e214acedb394e4c95e3a1f6e4467099b82d58ef73"}, + {file = "pyzmq-23.0.0-cp37-cp37m-win32.whl", hash = "sha256:07d2008e51718fba60641e5d1a0646b222b7929f16f6e7cf0834b8439f42c9e8"}, + {file = "pyzmq-23.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b8528aefceb787f41ad429f3210a3c6b52e99f85413416e3d0c9e6d035f8ac"}, + {file = "pyzmq-23.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3f3807e81bf51d4c63eb12a21920614e0e840645418e9f2e3b5ffdd5991b3415"}, + {file = "pyzmq-23.0.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:011a45c846ec69a3671ed15893b74b6ad608800c89ac6d0f0411e2137c6b313d"}, + {file = "pyzmq-23.0.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b97dc1273f16f85a38cff6668a07b636ef14e30591039efbfd21f5f91efae964"}, + {file = "pyzmq-23.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8951830d6a00636b3af478091f9668ecc486f1dad01b975527957fd1d8c31bfd"}, + {file = "pyzmq-23.0.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5619f6598d6fd30778053ae2daa48a7c54029816648b908270b751411fd52e74"}, + {file = "pyzmq-23.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0a89b9860d2171bcf674648dc8186db9cf3b773ad3c0610a2c7bf189cf3560b6"}, + {file = "pyzmq-23.0.0-cp38-cp38-win32.whl", hash = "sha256:0258563bf69f6ca305204354f171e0627a9bf8fe78c9d4f63a5e2447035cbb4b"}, + {file = "pyzmq-23.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:9feb7ccd426ff2158ce79f4c87a8a1600ed4f77e65e2fffda2b42638b2bc73e4"}, + {file = "pyzmq-23.0.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:e9631c6a339843e4f95efb80ff9a1bfaaf3d611ba9677a7a5cc61ffb923b4e06"}, + {file = "pyzmq-23.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:34b143751e9b2b89cf9b656081f1b2842a563c4c9ffc8465531875daf546e772"}, + {file = "pyzmq-23.0.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2f227150148e7c3db7ecd8a58500439979f556e15455841a30b6d121755b14bc"}, + {file = "pyzmq-23.0.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277b3ebc684b369a57a186a9acf629c1b01247eb04d1105536ef2dae5f61168a"}, + {file = "pyzmq-23.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e2093a97bf3f6008a4be6b5bae8ae3fc409f18373593bef19dd7b381ab8030c"}, + {file = "pyzmq-23.0.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6c09e6e5c4baf0959287943dc8170624d739ae555d334e896a94d9de01c7bb21"}, + {file = "pyzmq-23.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c234aefeef034c5d6de452e2af5173a95ea06315b685db703091e6f937a6e60"}, + {file = "pyzmq-23.0.0-cp39-cp39-win32.whl", hash = "sha256:7b518ad9cdbaaeb1a9da3444797698871ae2eeae34ff9a656d5150d37e1e42a1"}, + {file = "pyzmq-23.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:011f26841dd56ed87e464c98023dbbd4c0b3ab8802a045de3ea83e0187eb8145"}, + {file = "pyzmq-23.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a89285fedbeca483a855a77285453e21e4fc86ef0944bc018ef4b3033aa04ad2"}, + {file = "pyzmq-23.0.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5a13171268f05d127e31b4c369b753733f67dbb0d765901ef625a115feb5c7de"}, + {file = "pyzmq-23.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cd3f563b98e2a8730c93bdc550f119ae766b2d3da1f0d6a3c7735b59adfa1642"}, + {file = "pyzmq-23.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e730d490b1421e52b43b1b9f5e1f8c3973499206e188f29b582577531e11033b"}, + {file = "pyzmq-23.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0de8a7e13ffacfe33c89acc0d7bfa2f5bde94e3f74b7f1e4d43c97ce17864d77"}, + {file = "pyzmq-23.0.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a64b9cce166396df5f33894074d6515778d48c63aae5ee1abd86d8bbc5a711d8"}, + {file = "pyzmq-23.0.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e464e7b1be2216eba54b47256c15bf307ae4a656aa0f73becea7b3e7283c5ac2"}, + {file = "pyzmq-23.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3fa7126d532effee452c0ab395ab3cbef1c06fd6870ab7e681f812ba9e685cfa"}, + {file = "pyzmq-23.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9273f6d1da1018822f41630fb0f3fe208e8e70e5d5e780795326900cfa22d8b6"}, + {file = "pyzmq-23.0.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca7d77f24644298cbe53bc279eb7ca05f3b8637473d392f0c9f34b37f08b49a"}, + {file = "pyzmq-23.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8f40604437ec8010f77f7053fd135ccb202d6ca18329903831087cab8dbdab1"}, + {file = "pyzmq-23.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4d861ae20040afc17adef33053c328667da78d4d3676b2936788fd031665e3a8"}, + {file = "pyzmq-23.0.0.tar.gz", hash = "sha256:a45f5c0477d12df05ef2e2922b49b7c0ae9d0f4ff9b6bb0d666558df0ef37122"}, +] +qtconsole = [ + {file = "qtconsole-5.3.0-py3-none-any.whl", hash = "sha256:75f2ded876444454edcb5a53262149e33b53db3a4a53116b7c3df52830905b0f"}, + {file = "qtconsole-5.3.0.tar.gz", hash = "sha256:8e3520fdc75e46abc4cc6cffeca16fa2652754109b8ae839fa28e27d1eba5625"}, +] +qtpy = [ + {file = "QtPy-2.1.0-py3-none-any.whl", hash = "sha256:aee0586081f943029312becece9f63977b0a9e3788f77a6ac8cc74802bb173d6"}, + {file = "QtPy-2.1.0.tar.gz", hash = "sha256:ca8cd4217175186344299ee4c0f7e7adcf362c70852ba35b255a534077025c06"}, +] +requests = [ + {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, + {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, +] +s3transfer = [ + {file = "s3transfer-0.5.2-py3-none-any.whl", hash = "sha256:7a6f4c4d1fdb9a2b640244008e142cbc2cd3ae34b386584ef044dd0f27101971"}, + {file = "s3transfer-0.5.2.tar.gz", hash = "sha256:95c58c194ce657a5f4fb0b9e60a84968c808888aed628cd98ab8771fe1db98ed"}, +] +sammy = [ + {file = "sammy-0.4.3-py2.py3-none-any.whl", hash = "sha256:f7fc12fae537ba22830a9de761686fac439e23c67339807f2f3d026d019bae9e"}, + {file = "sammy-0.4.3.tar.gz", hash = "sha256:dff98017d59c8860349cb07bd8af2eb843ccfc7a78d11ed82599b306ef4ce7ca"}, +] +send2trash = [ + {file = "Send2Trash-1.8.0-py3-none-any.whl", hash = "sha256:f20eaadfdb517eaca5ce077640cb261c7d2698385a6a0f072a4a5447fd49fa08"}, + {file = "Send2Trash-1.8.0.tar.gz", hash = "sha256:d2c24762fd3759860a0aff155e45871447ea58d2be6bdd39b5c8f966a0c99c2d"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +soupsieve = [ + {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, + {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, +] +stack-data = [ + {file = "stack_data-0.2.0-py3-none-any.whl", hash = "sha256:999762f9c3132308789affa03e9271bbbe947bf78311851f4d485d8402ed858e"}, + {file = "stack_data-0.2.0.tar.gz", hash = "sha256:45692d41bd633a9503a5195552df22b583caf16f0b27c4e58c98d88c8b648e12"}, +] +stripe = [ + {file = "stripe-2.76.0-py2.py3-none-any.whl", hash = "sha256:756bf6c1206f438d1fa23bb90cdf1233c9383478f854f2720a8a3e1eaf1f715b"}, + {file = "stripe-2.76.0.tar.gz", hash = "sha256:fd3fc6935c3b6189967191607b6f38ebe490005a590b4d0d43fbe3aba45deca8"}, +] +swaggyp = [ + {file = "swaggyp-0.2.0-py3-none-any.whl", hash = "sha256:030dca0f0a24469abcf7e1f047eaa01c84c206a8d891cfb1a4a24f40e2a6a146"}, + {file = "swaggyp-0.2.0.tar.gz", hash = "sha256:b39ab7f8a49fdb17af7862c5ac81b872a5f7f95c3b0989a6a285fde3f4ee4885"}, +] +terminado = [ + {file = "terminado-0.15.0-py3-none-any.whl", hash = "sha256:0d5f126fbfdb5887b25ae7d9d07b0d716b1cc0ccaacc71c1f3c14d228e065197"}, + {file = "terminado-0.15.0.tar.gz", hash = "sha256:ab4eeedccfcc1e6134bfee86106af90852c69d602884ea3a1e8ca6d4486e9bfe"}, +] +tinycss2 = [ + {file = "tinycss2-1.1.1-py3-none-any.whl", hash = "sha256:fe794ceaadfe3cf3e686b22155d0da5780dd0e273471a51846d0a02bc204fec8"}, + {file = "tinycss2-1.1.1.tar.gz", hash = "sha256:b2e44dd8883c360c35dd0d1b5aad0b610e5156c2cb3b33434634e539ead9d8bf"}, +] +tornado = [ + {file = "tornado-6.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:d371e811d6b156d82aa5f9a4e08b58debf97c302a35714f6f45e35139c332e32"}, + {file = "tornado-6.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:0d321a39c36e5f2c4ff12b4ed58d41390460f798422c4504e09eb5678e09998c"}, + {file = "tornado-6.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9de9e5188a782be6b1ce866e8a51bc76a0fbaa0e16613823fc38e4fc2556ad05"}, + {file = "tornado-6.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:61b32d06ae8a036a6607805e6720ef00a3c98207038444ba7fd3d169cd998910"}, + {file = "tornado-6.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:3e63498f680547ed24d2c71e6497f24bca791aca2fe116dbc2bd0ac7f191691b"}, + {file = "tornado-6.1-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:6c77c9937962577a6a76917845d06af6ab9197702a42e1346d8ae2e76b5e3675"}, + {file = "tornado-6.1-cp35-cp35m-win32.whl", hash = "sha256:6286efab1ed6e74b7028327365cf7346b1d777d63ab30e21a0f4d5b275fc17d5"}, + {file = "tornado-6.1-cp35-cp35m-win_amd64.whl", hash = "sha256:fa2ba70284fa42c2a5ecb35e322e68823288a4251f9ba9cc77be04ae15eada68"}, + {file = "tornado-6.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a00ff4561e2929a2c37ce706cb8233b7907e0cdc22eab98888aca5dd3775feb"}, + {file = "tornado-6.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:748290bf9112b581c525e6e6d3820621ff020ed95af6f17fedef416b27ed564c"}, + {file = "tornado-6.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e385b637ac3acaae8022e7e47dfa7b83d3620e432e3ecb9a3f7f58f150e50921"}, + {file = "tornado-6.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:25ad220258349a12ae87ede08a7b04aca51237721f63b1808d39bdb4b2164558"}, + {file = "tornado-6.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:65d98939f1a2e74b58839f8c4dab3b6b3c1ce84972ae712be02845e65391ac7c"}, + {file = "tornado-6.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:e519d64089b0876c7b467274468709dadf11e41d65f63bba207e04217f47c085"}, + {file = "tornado-6.1-cp36-cp36m-win32.whl", hash = "sha256:b87936fd2c317b6ee08a5741ea06b9d11a6074ef4cc42e031bc6403f82a32575"}, + {file = "tornado-6.1-cp36-cp36m-win_amd64.whl", hash = "sha256:cc0ee35043162abbf717b7df924597ade8e5395e7b66d18270116f8745ceb795"}, + {file = "tornado-6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7250a3fa399f08ec9cb3f7b1b987955d17e044f1ade821b32e5f435130250d7f"}, + {file = "tornado-6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ed3ad863b1b40cd1d4bd21e7498329ccaece75db5a5bf58cd3c9f130843e7102"}, + {file = "tornado-6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:dcef026f608f678c118779cd6591c8af6e9b4155c44e0d1bc0c87c036fb8c8c4"}, + {file = "tornado-6.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:70dec29e8ac485dbf57481baee40781c63e381bebea080991893cd297742b8fd"}, + {file = "tornado-6.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:d3f7594930c423fd9f5d1a76bee85a2c36fd8b4b16921cae7e965f22575e9c01"}, + {file = "tornado-6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3447475585bae2e77ecb832fc0300c3695516a47d46cefa0528181a34c5b9d3d"}, + {file = "tornado-6.1-cp37-cp37m-win32.whl", hash = "sha256:e7229e60ac41a1202444497ddde70a48d33909e484f96eb0da9baf8dc68541df"}, + {file = "tornado-6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:cb5ec8eead331e3bb4ce8066cf06d2dfef1bfb1b2a73082dfe8a161301b76e37"}, + {file = "tornado-6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:20241b3cb4f425e971cb0a8e4ffc9b0a861530ae3c52f2b0434e6c1b57e9fd95"}, + {file = "tornado-6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c77da1263aa361938476f04c4b6c8916001b90b2c2fdd92d8d535e1af48fba5a"}, + {file = "tornado-6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fba85b6cd9c39be262fcd23865652920832b61583de2a2ca907dbd8e8a8c81e5"}, + {file = "tornado-6.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:1e8225a1070cd8eec59a996c43229fe8f95689cb16e552d130b9793cb570a288"}, + {file = "tornado-6.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d14d30e7f46a0476efb0deb5b61343b1526f73ebb5ed84f23dc794bdb88f9d9f"}, + {file = "tornado-6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8f959b26f2634a091bb42241c3ed8d3cedb506e7c27b8dd5c7b9f745318ddbb6"}, + {file = "tornado-6.1-cp38-cp38-win32.whl", hash = "sha256:34ca2dac9e4d7afb0bed4677512e36a52f09caa6fded70b4e3e1c89dbd92c326"}, + {file = "tornado-6.1-cp38-cp38-win_amd64.whl", hash = "sha256:6196a5c39286cc37c024cd78834fb9345e464525d8991c21e908cc046d1cc02c"}, + {file = "tornado-6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0ba29bafd8e7e22920567ce0d232c26d4d47c8b5cf4ed7b562b5db39fa199c5"}, + {file = "tornado-6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:33892118b165401f291070100d6d09359ca74addda679b60390b09f8ef325ffe"}, + {file = "tornado-6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7da13da6f985aab7f6f28debab00c67ff9cbacd588e8477034c0652ac141feea"}, + {file = "tornado-6.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:e0791ac58d91ac58f694d8d2957884df8e4e2f6687cdf367ef7eb7497f79eaa2"}, + {file = "tornado-6.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:66324e4e1beede9ac79e60f88de548da58b1f8ab4b2f1354d8375774f997e6c0"}, + {file = "tornado-6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:a48900ecea1cbb71b8c71c620dee15b62f85f7c14189bdeee54966fbd9a0c5bd"}, + {file = "tornado-6.1-cp39-cp39-win32.whl", hash = "sha256:d3d20ea5782ba63ed13bc2b8c291a053c8d807a8fa927d941bd718468f7b950c"}, + {file = "tornado-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:548430be2740e327b3fe0201abe471f314741efcb0067ec4f2d7dcfb4825f3e4"}, + {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, +] +traitlets = [ + {file = "traitlets-5.2.1.post0-py3-none-any.whl", hash = "sha256:f44b708d33d98b0addb40c29d148a761f44af740603a8fd0e2f8b5b27cf0f087"}, + {file = "traitlets-5.2.1.post0.tar.gz", hash = "sha256:70815ecb20ec619d1af28910ade523383be13754283aef90528eb3d47b77c5db"}, +] +urllib3 = [ + {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, + {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, +] +valley = [ + {file = "valley-1.5.6-py3-none-any.whl", hash = "sha256:fa2e5fc51d59901e5eb178116a4fb15b712928b4c87809f59cdf02a934d63cf6"}, + {file = "valley-1.5.6.tar.gz", hash = "sha256:ec55f7df3512f0dfa23c9f253b414a02491dea41a62230ed459a43cf02fee9a3"}, +] +wcwidth = [ + {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, + {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, +] +webencodings = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] +werkzeug = [ + {file = "Werkzeug-2.1.2-py3-none-any.whl", hash = "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255"}, + {file = "Werkzeug-2.1.2.tar.gz", hash = "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6"}, +] +widgetsnbextension = [ + {file = "widgetsnbextension-3.6.0-py2.py3-none-any.whl", hash = "sha256:4fd321cad39fdcf8a8e248a657202d42917ada8e8ed5dd3f60f073e0d54ceabd"}, + {file = "widgetsnbextension-3.6.0.tar.gz", hash = "sha256:e84a7a9fcb9baf3d57106e184a7389a8f8eb935bf741a5eb9d60aa18cc029a80"}, +] +zipp = [ + {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, + {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, +] diff --git a/pyproject.toml b/pyproject.toml index dee44b6..5070eac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,9 +24,7 @@ sammy = "^0.4.3" stripe = "^2.61.0" bleach = "^4.1.0" swaggyp = "^0.2.0" -formy = "1.3.1" -Jinja2 = "^3.1.2" -envs = "^1.4" +openapi-spec-validator = "^0.4.0" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From a972f473cfef0886690965612ee64f0da7d25525 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 11 Apr 2022 15:54:06 +0800 Subject: [PATCH 129/214] Made yaml validation to properly separate openapi errors and python errors. Refactored swagger.py to return the swagger file directory --- pfunk/tests/test_aws.py | 46 +++++++++++++++---------- pfunk/utils/aws.py | 74 ++++++++++++++++++++++++++++++++++------- pfunk/utils/swagger.py | 36 ++++++++++++++------ 3 files changed, 117 insertions(+), 39 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index a6b1314..6ec5841 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -1,4 +1,6 @@ +import os import unittest +import tempfile from unittest import mock from pfunk.utils.aws import ApiGateway @@ -9,34 +11,44 @@ class ApiGatewayTests(unittest.TestCase): @classmethod - def setUpCls(cls) -> None: + def setUpClass(cls) -> None: cls.project = Project() cls.aws_client = ApiGateway() cls.project.add_resources([Person, Sport, Group, User]) - cls.api_yaml = cls.project.generate_swagger() + + swagger = cls.project.generate_swagger() + cls.swagger_dir = swagger['dir'] + cls.swagger_file = swagger['swagger_file'] + print(cls.swagger_dir) def test_validate_yaml(self): - result = self.aws_client.validate_yaml(self.api_yaml) + result = self.aws_client.validate_yaml(self.swagger_dir) self.assertIsNone(result) # if there are no errors, then spec is valid def test_validate_wrong_yaml(self): result = self.aws_client.validate_yaml('wrong yaml...33::39') - self.assertIsNotNone(result) # if there are returned objs, there is an error + # if there are returned objs, there is an error + self.assertIsNotNone(result) @mock.patch('boto3.client') - def test_create_api_from_yaml(self): - result = self.aws_client.create_api_from_yaml() + def test_create_api_from_yaml(self, mocked): + result = self.aws_client.create_api_from_yaml( + yaml_file=self.swagger_dir) self.assertTrue(result['success']) @mock.patch('boto3.client') - def test_update_api_from_yaml(self): - result = self.aws_client.create_api_from_yaml() - self.assertTrue(result['success']) - - def test_create_api_from_wrong_yaml(self): - result = self.aws_client.create_api_from_yaml('wrong yaml...21320:: asdkas') - self.assertEqual(result, 'Bad Request. YAML is not valid.') - - def test_update_api_from_wrong_yaml(self): - result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') - self.assertEqual(result, 'Bad Request. YAML is not valid.') \ No newline at end of file + def test_create_api_from_wrong_yaml(self, mocked): + with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: + tmp.seek(0) + tmp.write('test wrong yaml') + result = self.aws_client.create_api_from_yaml(tmp.name) + self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') + + # @mock.patch('boto3.client') + # def test_update_api_from_yaml(self): + # result = self.aws_client.update_api_from_yaml(yaml_file=self.api_yaml) + # self.assertTrue(result['success']) + + # def test_update_api_from_wrong_yaml(self): + # result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') + # self.assertEqual(result, 'Bad Request. YAML is not valid.') diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 4c61506..b1c26c0 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,36 +1,86 @@ import boto3 import swaggyp as sw +# from botocore.exceptions import BadReq +from envs import env from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator from openapi_spec_validator.readers import read_from_filename from openapi_spec_validator.exceptions import OpenAPIValidationError class ApiGateway(object): + region_name = env('SES_REGION_NAME', 'us-east-1') def __init__(self): - self.client = boto3.client('apigateway') - pass + self.client = boto3.client('apigateway', region_name=self.region_name) def validate_yaml(self, yaml_file): """ Validate YAML file if it is valid for using OpenAPI Spec v2""" try: spec_dict, spec_url = read_from_filename(yaml_file) validate_v2_spec(spec_dict) + except (OSError, AttributeError) as err: + return {'errors': str(err)} except OpenAPIValidationError as err: + return self._iterate_validator_errors(spec_dict) + return None + + def _iterate_validator_errors(self, spec_dict): + """ Iterates through list of errors that the `openapi_spec_validator` returned + + This method was implemented due to `openapi_spec_validator` design + that if an error happened while iterating through the YAML file + it returns a Python error. + + Args: + spec_dict (dict, required): + `spec_dict` generated from `openapi_spec_validator.readers.read_from_filename` + Returns: + list of errors + """ + try: errors = [{err.message: err.json_path} for err in openapi_v2_spec_validator.iter_errors(spec_dict)] return errors - return None + except (OSError, AttributeError) as err: + return str(err) - def create_api_from_yaml(self, yaml_file): - # response = client.import_rest_api( - # failOnWarnings=True|False, - # parameters={ - # 'string': 'string' - # }, - # body=b'bytes'|file - # ) - pass + def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): + """ Creates an API for AWS API Gateway from a YAML swagger file + + Args: + yaml_file (yaml file, required): + The OpenAPI swagger file to create API from + fail_on_warnings (bool, optional): + Specifies if the method will error on warnings. Default: `True` + """ + _yaml_valid = self.validate_yaml(yaml_file) + if _yaml_valid: + return { + "error": 'Bad Request. YAML is not valid.', + "yaml_err": _yaml_valid + } + + try: + if not type(yaml_file) == 'string': + with open(yaml_file, 'r') as file: + response = self.client.import_rest_api( + failOnWarnings=fail_on_warnings, + body=file + ) + else: + response = self.client.import_rest_api( + failOnWarnings=fail_on_warnings, + body=yaml_file + ) + + if response: + return { + 'success': True, + response: response + } + # TODO: Specify boto exceptions + except Exception as err: + return err def update_api_from_yaml(self, yaml_file): # response = client.put_rest_api( diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 3f40ad1..3c0a01f 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -77,11 +77,20 @@ def __init__(self, collections, rules=[]): def _convert_url_to_swagger(self, replacement: str, to_replace: str) -> str: return re.sub('<\w+:\w+>', f'{{{replacement}}}', to_replace) - def write_to_yaml(self): + def write_to_yaml(self, dir=''): """ Using the class' variables, write it to a swagger (yaml) file It will create `swagger.yaml` file in current directory, if there is already one, it will print the yaml file instead. + + Args: + dir (str, optional): + custom directory of the swagger file. If there are no provided, create one in current dir. + Returns: + dir (str, required): + directory of the created swagger file + swagger_file (str, required): + the contents of the swagger yaml file """ if not os.path.exists(f'pfunk.json'): raise Exception('Missing JSON Config file.') @@ -107,14 +116,16 @@ def write_to_yaml(self): schemes=schemes, definitions=self.definitions) - if not os.path.exists(f'swagger.yaml'): - with open(f'swagger.yaml', 'x') as swag_doc: + if not os.path.exists(f'{dir}/swagger.yaml'): + with open(f'{dir}/swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) - return t.to_yaml() else: - print('There is an existing swagger file. Kindly move/delete it to generate a new one. Printing instead...') - print(t.to_yaml()) - return t.to_yaml() + print('There is an existing swagger file. Kindly move/delete it to generate a new one.') + # print(t.to_yaml()) + return { + "dir": f'{dir}/swagger.yaml', + "swagger_file": t.to_yaml() + } def get_operations(self, col: Collection): """ Acquires all of the endpoint in the collections and make it @@ -234,10 +245,15 @@ def get_model_definitions(self, col: Collection): self.definitions.append(model) return self.definitions - def generate_swagger(self): - """ One-function-to-call needed function to generate a swagger documentation """ + def generate_swagger(self, dir=''): + """ One-function-to-call needed function to generate a swagger documentation + + Args: + dir (str, optional): + directory to create the yaml file + """ for i in self.collections: col = i() self.get_operations(col) self.get_model_definitions(col) - return self.write_to_yaml() + return self.write_to_yaml(dir) \ No newline at end of file From 576833b4e1dc12dff450a07e158c68b7fe02a91f Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 11 Apr 2022 15:55:52 +0800 Subject: [PATCH 130/214] Refactored default dir of swagger file. Refactored unit tests for AWS utils --- pfunk/tests/test_aws.py | 1 - pfunk/utils/swagger.py | 6 +++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index 6ec5841..571f3c1 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -19,7 +19,6 @@ def setUpClass(cls) -> None: swagger = cls.project.generate_swagger() cls.swagger_dir = swagger['dir'] cls.swagger_file = swagger['swagger_file'] - print(cls.swagger_dir) def test_validate_yaml(self): result = self.aws_client.validate_yaml(self.swagger_dir) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 3c0a01f..c041d06 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -116,14 +116,14 @@ def write_to_yaml(self, dir=''): schemes=schemes, definitions=self.definitions) - if not os.path.exists(f'{dir}/swagger.yaml'): - with open(f'{dir}/swagger.yaml', 'x') as swag_doc: + if not os.path.exists(f'{dir}swagger.yaml'): + with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) else: print('There is an existing swagger file. Kindly move/delete it to generate a new one.') # print(t.to_yaml()) return { - "dir": f'{dir}/swagger.yaml', + "dir": f'{dir}swagger.yaml', "swagger_file": t.to_yaml() } From ed44f71aa542b8c3c9612077104517c499e73c65 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 12 Apr 2022 16:15:50 +0800 Subject: [PATCH 131/214] Finished create/update api from yaml. Added writing to config file if API is created. Added reading from config file if API is to be updated --- pfunk/tests/test_aws.py | 23 +++++--- pfunk/utils/aws.py | 128 ++++++++++++++++++++++++++++++++-------- 2 files changed, 119 insertions(+), 32 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index 571f3c1..2789dc2 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -11,7 +11,8 @@ class ApiGatewayTests(unittest.TestCase): @classmethod - def setUpClass(cls) -> None: + @mock.patch('boto3.client') + def setUpClass(cls, mocked) -> None: cls.project = Project() cls.aws_client = ApiGateway() cls.project.add_resources([Person, Sport, Group, User]) @@ -43,11 +44,17 @@ def test_create_api_from_wrong_yaml(self, mocked): result = self.aws_client.create_api_from_yaml(tmp.name) self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') - # @mock.patch('boto3.client') - # def test_update_api_from_yaml(self): - # result = self.aws_client.update_api_from_yaml(yaml_file=self.api_yaml) - # self.assertTrue(result['success']) + @mock.patch('boto3.client') + def test_update_api_from_yaml(self, mocked): + result = self.aws_client.update_api_from_yaml( + yaml_file=self.swagger_dir, mode='merge') + print(result) + self.assertTrue(result['success']) - # def test_update_api_from_wrong_yaml(self): - # result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') - # self.assertEqual(result, 'Bad Request. YAML is not valid.') + @mock.patch('boto3.client') + def test_update_api_from_wrong_yaml(self, mocked): + with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: + tmp.seek(0) + tmp.write('test wrong yaml') + result = self.aws_client.update_api_from_yaml(tmp.name, mode='merge') + self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index b1c26c0..7427db3 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,4 +1,6 @@ +import datetime import boto3 +import json import swaggyp as sw # from botocore.exceptions import BadReq from envs import env @@ -6,12 +8,54 @@ from openapi_spec_validator.readers import read_from_filename from openapi_spec_validator.exceptions import OpenAPIValidationError +AWS_ACCESS_KEY = env('AWS_ACCESS_KEY') +AWS_SECRET_ACCESS_KEY = env('AWS_SECRET_ACCESS_KEY') +AWS_DEFAULT_REGION = env('AWS_DEFAULT_REGION') + + +def _json_dt_helper(o): + """ Helps serializing `datetime` objects to a readable string """ + if isinstance(o, (datetime.date, datetime.datetime)): + return o.isoformat() + + +def write_to_config(obj, config_file_dir='pfunk.json'): + """ Writes to pfunk config file + + Args: + obj (dict, required): + key, value pairs to write to json file + config_file_dir (str, optional): + directory of the config json file, default='pfunk.json' + Returns: + config_file (dict, required): + the current value of config file (pfunk.json) + """ + with open(config_file_dir, 'r+') as f: + data = json.load(f) + data.update(obj) + f.seek(0) + f.truncate() + json.dump(data, f, indent=4, sort_keys=True, default=_json_dt_helper) + return data + + +def read_from_config_file(config_file_dir='pfunk.json'): + """ Returns data from config file in dict form """ + with open(config_file_dir, 'r') as f: + data = json.load(f) + return data + class ApiGateway(object): region_name = env('SES_REGION_NAME', 'us-east-1') def __init__(self): - self.client = boto3.client('apigateway', region_name=self.region_name) + self.client = boto3.client( + 'apigateway', + aws_access_key_id=AWS_ACCESS_KEY, + aws_secret_access_key=AWS_SECRET_ACCESS_KEY, + region_name=AWS_DEFAULT_REGION) def validate_yaml(self, yaml_file): """ Validate YAML file if it is valid for using OpenAPI Spec v2""" @@ -63,33 +107,69 @@ def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): try: if not type(yaml_file) == 'string': with open(yaml_file, 'r') as file: - response = self.client.import_rest_api( - failOnWarnings=fail_on_warnings, - body=file - ) - else: - response = self.client.import_rest_api( - failOnWarnings=fail_on_warnings, - body=yaml_file - ) + yaml_file = file.read() + response = self.client.import_rest_api( + failOnWarnings=fail_on_warnings, + body=yaml_file) + + # TODO: Fix -- if using mocked obj, don't write anything + if response: + write_to_config({'api': response}) + return { + 'success': True, + 'response': response + } + # TODO: Specify boto exceptions + except Exception as err: + return { + 'error': str(err) + } + + def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnings=True): + """ Updates rest API using yaml file + + Args: + rest_api_id (string, required): + ID of the API for updating, if not provided, use API ID from `pfunk.json` + yaml_file (yaml file, required): + The OpenAPI swagger file to create API from + mode (string, required): + Mode of update, choice=['merge', 'overwrite'] + fail_on_warnings (bool, optional): + Specifies if the method will error on warnings. Default: `True` + """ + _yaml_valid = self.validate_yaml(yaml_file) + if _yaml_valid: + return { + "error": 'Bad Request. YAML is not valid.', + "yaml_err": _yaml_valid + } + + try: + if not type(yaml_file) == 'string': + with open(yaml_file, 'r') as file: + yaml_file = file.read() + # Acquire REST API ID from config file if not provided + if not rest_api_id: + data = read_from_config_file() + if data.get('api'): + rest_api_id = (data.get('api') + .get('id')) + + response = self.client.put_rest_api( + restApiId=rest_api_id, + mode=mode, + failOnWarnings=fail_on_warnings, + body=yaml_file + ) if response: return { 'success': True, - response: response + 'response': response } # TODO: Specify boto exceptions except Exception as err: - return err - - def update_api_from_yaml(self, yaml_file): - # response = client.put_rest_api( - # restApiId='string', - # mode='merge'|'overwrite', - # failOnWarnings=True|False, - # parameters={ - # 'string': 'string' - # }, - # body=b'bytes'|file - # ) - pass + return { + 'error': str(err) + } From 3c8fef5528c558ca0bf60b70d11619991f2e63db Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 13 Apr 2022 13:44:30 +0800 Subject: [PATCH 132/214] Reworked what exceptions does aws util react to. Added doc for aws util tests --- pfunk/tests/test_aws.py | 10 +++++++++- pfunk/utils/aws.py | 15 ++++++--------- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index 2789dc2..d28c852 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -9,6 +9,15 @@ class ApiGatewayTests(unittest.TestCase): + """ Unit tests for creation of API from Swagger file + + Note that the unittests uses mocked boto3 normally. If + you want to test against a real endpoint, remove the + patch decorator at `setUpClass` and the `mocked` + param. Also make sure you have the required + env vars for AWS credentials and you have + the json config in the current env. + """ @classmethod @mock.patch('boto3.client') @@ -48,7 +57,6 @@ def test_create_api_from_wrong_yaml(self, mocked): def test_update_api_from_yaml(self, mocked): result = self.aws_client.update_api_from_yaml( yaml_file=self.swagger_dir, mode='merge') - print(result) self.assertTrue(result['success']) @mock.patch('boto3.client') diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 7427db3..13164f8 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -2,7 +2,7 @@ import boto3 import json import swaggyp as sw -# from botocore.exceptions import BadReq +from botocore.exceptions import ClientError, NoCredentialsError from envs import env from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator from openapi_spec_validator.readers import read_from_filename @@ -20,7 +20,7 @@ def _json_dt_helper(o): def write_to_config(obj, config_file_dir='pfunk.json'): - """ Writes to pfunk config file + """ Appends object to pfunk config file Args: obj (dict, required): @@ -111,16 +111,14 @@ def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): response = self.client.import_rest_api( failOnWarnings=fail_on_warnings, body=yaml_file) - - # TODO: Fix -- if using mocked obj, don't write anything + if response: write_to_config({'api': response}) return { 'success': True, 'response': response } - # TODO: Specify boto exceptions - except Exception as err: + except (ClientError, NoCredentialsError) as err: return { 'error': str(err) } @@ -154,7 +152,7 @@ def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnin data = read_from_config_file() if data.get('api'): rest_api_id = (data.get('api') - .get('id')) + .get('id')) response = self.client.put_rest_api( restApiId=rest_api_id, @@ -168,8 +166,7 @@ def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnin 'success': True, 'response': response } - # TODO: Specify boto exceptions - except Exception as err: + except (ClientError, NoCredentialsError) as err: return { 'error': str(err) } From 59030c4279862374d01a117a37246d3cbee36c69 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 27 Apr 2022 14:41:51 +0800 Subject: [PATCH 133/214] Did cleaning up of swaggerdoc docstrings --- pfunk/utils/swagger.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index c041d06..3c760c1 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -42,6 +42,7 @@ def __init__(self, collections, rules=[]): """ Generates swagger doc. Details are going to be acquired from the collections The acquisition of the information needed for docs are as follows: + ``` Response: Description (str): View's `get_query` docstrings Status Code (int): @@ -56,9 +57,16 @@ def __init__(self, collections, rules=[]): Model: Name (str): The class name of the `collection` Properties (str): The fields of the collection and their type - + ``` + + Args: + collections ([`pfunk.collection.Collection`]): + array of collection of the project to generate models from + rules ([`werkzeug.routing.Rule`]): + array of additional URLs that the given collection doesn't have Returns: - Generated YAML file + swagger.yaml (yaml, required): + Generated YAML file """ self.collections = collections self.rules = rules From 712e30ca5a62a2d4a15d2b5654b0f8ef191c690f Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 4 May 2022 14:52:41 +0800 Subject: [PATCH 134/214] added generate-swagger as a cli command, creates a swagger file from the provided json config file of a project --- pfunk/cli.py | 41 +++++++++++++++++++++++++++++++++-------- pfunk/project.py | 18 +++++++++++++++--- pfunk/utils/swagger.py | 21 +++++++++++++++------ 3 files changed, 63 insertions(+), 17 deletions(-) diff --git a/pfunk/cli.py b/pfunk/cli.py index c23d6f1..830f3c7 100644 --- a/pfunk/cli.py +++ b/pfunk/cli.py @@ -39,9 +39,7 @@ def load_config_file(filename): @click.option('--description', prompt=True, help='Project Description') @click.option('--api_type', type=click.Choice(['web', 'rest', 'none']), prompt=True, help='API Type (web, rest, none)') @click.argument('name') -def init(name: str, api_type: str, description: str, host: str, fauna_key: str, bucket: str, email: str, - stage_name: str, generate_local_key: bool): - +def init(name: str, api_type: str, fauna_key: str, bucket: str, email: str, stage_name: str, description: str, host: str, generate_local_key: bool): """ Creates a PFunk project Args: @@ -76,7 +74,8 @@ def init(name: str, api_type: str, description: str, host: str, fauna_key: str, }, f, indent=4, sort_keys=True) open(f'{name}/__init__.py', 'x').close() with open(f'{name}/wsgi.py', 'x') as f: - f.write(wsgi_template.render(PFUNK_PROJECT=f'{name}.project.project')) + f.write(wsgi_template.render( + PFUNK_PROJECT=f'{name}.project.project')) with open(f'{name}/project.py', 'x') as f: f.write(project_template.render()) with open(f'{name}/collections.py', 'x') as f: @@ -89,9 +88,11 @@ def init(name: str, api_type: str, description: str, host: str, fauna_key: str, q.create_database({'name': db_name}) ) key = client.query( - q.create_key({'database': q.database(db_name), 'role': 'admin'}) + q.create_key( + {'database': q.database(db_name), 'role': 'admin'}) ) - click.secho(f'Fauna Local Secret (copy into your .env or pipenv file): {key}', fg='green') + click.secho( + f'Fauna Local Secret (copy into your .env or pipenv file): {key}', fg='green') else: click.echo('There is already a project file in this directory.') @@ -147,7 +148,8 @@ def local(hostname: str, port: int, wsgi: str, config_file: str, use_debugger: b sys.path.insert(0, os.getcwd()) wsgi_path = wsgi or f'{config.get("name")}.wsgi.app' app = import_util(wsgi_path) - run_simple(hostname, port, app, use_debugger=use_debugger, use_reloader=use_reloader) + run_simple(hostname, port, app, use_debugger=use_debugger, + use_reloader=use_reloader) @pfunk.command() @@ -257,7 +259,8 @@ def create_admin_user(stage_name: str, group_slug: str, last_name: str, first_na project = import_util(project_path) perm_list = [] for i in project.collections: - perm_list.append(PermissionGroup(collection=i, permissions=['create', 'write', 'read', 'delete'])) + perm_list.append(PermissionGroup(collection=i, permissions=[ + 'create', 'write', 'read', 'delete'])) user.add_permissions(group, perm_list) @@ -283,5 +286,27 @@ def deploy(stage_name: str, config_path: str): d.deploy(stage_name) +@pfunk.command() +@click.option('--config_path', help='Configuration file path', default='pfunk.json') +@click.option('--yaml_path', help='Dir to create yaml swagger file to', default='') +def generate_swagger(config_path: str, yaml_path: str): + """ Generates the swagger file of the project from a config json file + + Args: + config_path (str, optional): + dir of the json config file to use + yaml_path (str, optional): + dir to put the generated swagger file + + Returns: + + """ + config = load_config_file(config_path) + sys.path.insert(0, os.getcwd()) + project_path = f'{config.get("name")}.project.project' + project = import_util(project_path) + project.generate_swagger(yaml_dir=yaml_path, config_file=config_path) + + if __name__ == '__main__': pfunk() diff --git a/pfunk/project.py b/pfunk/project.py index 0eca94f..313a464 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -299,9 +299,21 @@ def wsgi_app(self, environ, start_response): start_response(status_str, response.wsgi_headers) return [str.encode(response.body)] - def generate_swagger(self): + def generate_swagger(self, yaml_dir='', config_file='pfunk.json'): + """ Generates a swagger file that houses all endpoints + + Args: + yaml_dir (str, optional): + which directory to create the swagger yaml file + config_file (str, optional): + which directory to look for the config file + + Returns: + swagger file + """ swag = SwaggerDoc( collections=self.collections, - rules=[GraphQLView.url()]) - swag_file = swag.generate_swagger() + rules=[GraphQLView.url()], + config_file=config_file) + swag_file = swag.generate_swagger(dir=yaml_dir) return swag_file diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 3c760c1..344f60b 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -38,7 +38,7 @@ class SwaggerDoc(object): - def __init__(self, collections, rules=[]): + def __init__(self, collections, rules=[], config_file='pfunk.json'): """ Generates swagger doc. Details are going to be acquired from the collections The acquisition of the information needed for docs are as follows: @@ -64,6 +64,9 @@ def __init__(self, collections, rules=[]): array of collection of the project to generate models from rules ([`werkzeug.routing.Rule`]): array of additional URLs that the given collection doesn't have + config_file (str, optional): + directory of the config_file + Returns: swagger.yaml (yaml, required): Generated YAML file @@ -73,6 +76,7 @@ def __init__(self, collections, rules=[]): self.paths = [] self.definitions = [] self.responses = [] + self.config_file = config_file self._response_classes = [ 'response_class', 'not_found_class', @@ -100,10 +104,10 @@ def write_to_yaml(self, dir=''): swagger_file (str, required): the contents of the swagger yaml file """ - if not os.path.exists(f'pfunk.json'): - raise Exception('Missing JSON Config file.') + if not os.path.exists(self.config_file): + raise Exception('Missing JSON Config file.') else: - with open(f'pfunk.json', 'r') as f: + with open(self.config_file, 'r') as f: data = json.loads(f.read()) proj_title = data.get('name') proj_desc = data.get('description', 'A Pfunk project') @@ -112,6 +116,10 @@ def write_to_yaml(self, dir=''): basePath = data.get('basePath', '/') schemes = ['https'] + if dir: + if not dir.endswith('/'): + dir = dir + "/" + info = sw.Info( title=proj_title, description=proj_desc, @@ -128,7 +136,8 @@ def write_to_yaml(self, dir=''): with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) else: - print('There is an existing swagger file. Kindly move/delete it to generate a new one.') + print( + 'There is an existing swagger file. Kindly move/delete it to generate a new one.') # print(t.to_yaml()) return { "dir": f'{dir}swagger.yaml', @@ -264,4 +273,4 @@ def generate_swagger(self, dir=''): col = i() self.get_operations(col) self.get_model_definitions(col) - return self.write_to_yaml(dir) \ No newline at end of file + return self.write_to_yaml(dir) From 86bac0356df86e7ff5d3723b15b6202b07a6bdbf Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 2 Jun 2022 15:11:30 +0800 Subject: [PATCH 135/214] added support for splitting the docs to get params of the view --- pfunk/tests/test_project.py | 36 ++++++++++++------------ pfunk/utils/swagger.py | 56 ++++++++++++++++++++++++++++++++----- 2 files changed, 67 insertions(+), 25 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index 2665808..c47a132 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -34,24 +34,24 @@ def tearDown(self) -> None: except FileNotFoundError: pass - def test_add_resource(self): - self.project.add_resource(Person) - self.project.add_resource(Person) - # Test that no duplicates are there - self.assertEqual(self.project.collections, set([Person])) - - def test_add_resources(self): - self.project.add_resources([Person, Sport]) - self.assertEqual(self.project.collections, set([Person, Sport])) - - def test_render(self): - self.project.add_resources([Person, Sport]) - gql = self.project.render() - self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - self.assertTrue('enum gender_pronouns' in gql) - self.assertTrue('type Person' in gql) - self.assertTrue('type Sport' in gql) - self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + # def test_add_resource(self): + # self.project.add_resource(Person) + # self.project.add_resource(Person) + # # Test that no duplicates are there + # self.assertEqual(self.project.collections, set([Person])) + + # def test_add_resources(self): + # self.project.add_resources([Person, Sport]) + # self.assertEqual(self.project.collections, set([Person, Sport])) + + # def test_render(self): + # self.project.add_resources([Person, Sport]) + # gql = self.project.render() + # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + # self.assertTrue('enum gender_pronouns' in gql) + # self.assertTrue('type Person' in gql) + # self.assertTrue('type Sport' in gql) + # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): self.project.add_resources([Person, Sport, Group, User]) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 344f60b..abbe8fc 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -160,6 +160,7 @@ def get_operations(self, col: Collection): An array of `Path` that can be consumed using `swaggyp.SwaggerTemplate` to show available paths + ``` """ for view in col.collection_views: route = view.url(col) @@ -188,6 +189,7 @@ def get_operations(self, col: Collection): # Skip HEAD operations continue + # Acquire path parameters of URL if args is None or len(args) == 0: # if `defaults` weren't used in URL building, use the argument defined in the URL string for converter, arguments, variable in parse_rule(rule): @@ -196,10 +198,10 @@ def get_operations(self, col: Collection): args = variable arg_type = converter - # Replace werkzeug params () to swagger-style params ({id}) - swagger_rule = self._convert_url_to_swagger(args, rule) + params = [] + # Construct path parameters for swagger generation if arg_type: - params = sw.Parameter( + path_params = sw.Parameter( name=args, _type=WERKZEUG_URL_TO_YAML_TYPES.get(arg_type), _in='path', @@ -207,18 +209,55 @@ def get_operations(self, col: Collection): required=True, allowEmptyValue=False ) + params.append(path_params) + + # Acquire payload of the view from the View's docstring + # where to cut the docstrings to use the definition for the payload of the view + oas_str_split = '[OAS]\n' + view_docs = view.__doc__ + view_payload = None + if (view_docs and len(view_docs.split(oas_str_split)) > 1): + view_payload = view_docs.split(oas_str_split)[1] + + # Construct payload for swagger generation + if view_payload: + for field in json.loads(view_payload).get('data'): + param = sw.Parameter( + name=field.get('name'), + _type=field.get('type'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + allowEmptyValue=False + ) + params.append(param) + + docs_description = view_docs if not len(view_docs.split( + oas_str_split)) > 1 else view_docs.split(oas_str_split)[0] + consumes = ['application/json', + 'application/x-www-form-urlencoded'] + produces = ['application/json', + 'application/x-www-form-urlencoded'] + if params: op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=view.__doc__, + description=docs_description, responses=responses, - parameters=[params]) + consumes=consumes, + produces=produces, + parameters=params) else: op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=view.__doc__, - responses=responses) + description=docs_description, + responses=responses, + consumes=consumes, + produces=produces) + + # Replace werkzeug params () to swagger-style params ({id}) + swagger_rule = self._convert_url_to_swagger(args, rule) p = sw.Path(endpoint=swagger_rule, operations=[op]) self.paths.append(p) return self.paths @@ -240,6 +279,9 @@ def get_model_definitions(self, col: Collection): An array of `Definition` that can be consumed using `swaggyp.SwaggerTemplate` to show available models + + Payload: + """ # Define model definitions by iterating through collection's fields for its properties From c0bccbe91f731c6dac1b848ef2939472c5cf2232 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 2 Jun 2022 15:30:35 +0800 Subject: [PATCH 136/214] switch acquiring of swagger docs for payload from docstrings to function return instead --- pfunk/utils/swagger.py | 20 ++++++++------------ pfunk/web/views/json.py | 32 +++++++++++++++++++++++++++++--- 2 files changed, 37 insertions(+), 15 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index abbe8fc..049ebe4 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -163,6 +163,7 @@ def get_operations(self, col: Collection): ``` """ for view in col.collection_views: + v = view() route = view.url(col) rule = route.rule methods = route.methods @@ -211,17 +212,13 @@ def get_operations(self, col: Collection): ) params.append(path_params) - # Acquire payload of the view from the View's docstring - # where to cut the docstrings to use the definition for the payload of the view - oas_str_split = '[OAS]\n' - view_docs = view.__doc__ - view_payload = None - if (view_docs and len(view_docs.split(oas_str_split)) > 1): - view_payload = view_docs.split(oas_str_split)[1] + # Acquire payload of the view from the View's `_payload_docs` + view_payload = view()._payload_docs() # Construct payload for swagger generation + # TODO: support referencing of models if view_payload: - for field in json.loads(view_payload).get('data'): + for field in view_payload.get('data'): param = sw.Parameter( name=field.get('name'), _type=field.get('type'), @@ -232,17 +229,16 @@ def get_operations(self, col: Collection): ) params.append(param) - docs_description = view_docs if not len(view_docs.split( - oas_str_split)) > 1 else view_docs.split(oas_str_split)[0] consumes = ['application/json', 'application/x-www-form-urlencoded'] produces = ['application/json', 'application/x-www-form-urlencoded'] + view_docs = view.__doc__ if params: op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=docs_description, + description=view_docs, responses=responses, consumes=consumes, produces=produces, @@ -251,7 +247,7 @@ def get_operations(self, col: Collection): op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=docs_description, + description=view_docs, responses=responses, consumes=consumes, produces=produces) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 93b957a..fc18535 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -26,6 +26,29 @@ def get_response(self): headers=self.get_headers() ) + def _payload_docs(self): + """ Used in defining payload parameters for the view. + + Should return a dict that has the fields of a swagger parameter e.g. + {"data": [ + { + "name":"name", + "in":"formData", + "description":"name of the pet", + "required": true, + "type": "string" + }, + { + "name": "status", + "in": "formData", + "description": "status of the pet", + "required":true, + "type":"string" + } + ]} + """ + return {} + class CreateView(UpdateMixin, JSONActionMixin, JSONView): """ Define a `Create` view that allows `creation` of an entity in the collection """ @@ -35,7 +58,8 @@ class CreateView(UpdateMixin, JSONActionMixin, JSONView): def get_query(self): """ Entity created in a collection """ - obj = self.collection.create(**self.get_query_kwargs(), _token=self.request.token) + obj = self.collection.create( + **self.get_query_kwargs(), _token=self.request.token) return obj def get_m2m_kwargs(self, obj): @@ -51,7 +75,8 @@ def get_m2m_kwargs(self, obj): """ data = self.request.get_json() - fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') + fields = self.collection.get_foreign_fields_by_type( + 'pfunk.fields.ManyToManyField') for k, v in fields.items(): current_value = data.get(k) col = v.get('foreign_class')() @@ -71,7 +96,8 @@ class UpdateView(UpdateMixin, JSONIDMixin, JSONView): def get_query(self): """ Entity in collection updated by an ID """ - obj = self.collection.get(self.request.kwargs.get('id'), _token=self.request.token) + obj = self.collection.get(self.request.kwargs.get( + 'id'), _token=self.request.token) obj._data.update(self.get_query_kwargs()) obj.save() return obj From 6e62a74b646c67e44000bc52e38ceaaef6372e13 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:09:34 +0800 Subject: [PATCH 137/214] added ability to also detect model references from _payload_docs function --- pfunk/tests/test_project.py | 36 ++++++++++++++++++------------------ pfunk/utils/swagger.py | 29 ++++++++++++++++++++--------- pfunk/web/views/json.py | 14 +++++++------- 3 files changed, 45 insertions(+), 34 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index c47a132..2665808 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -34,24 +34,24 @@ def tearDown(self) -> None: except FileNotFoundError: pass - # def test_add_resource(self): - # self.project.add_resource(Person) - # self.project.add_resource(Person) - # # Test that no duplicates are there - # self.assertEqual(self.project.collections, set([Person])) - - # def test_add_resources(self): - # self.project.add_resources([Person, Sport]) - # self.assertEqual(self.project.collections, set([Person, Sport])) - - # def test_render(self): - # self.project.add_resources([Person, Sport]) - # gql = self.project.render() - # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - # self.assertTrue('enum gender_pronouns' in gql) - # self.assertTrue('type Person' in gql) - # self.assertTrue('type Sport' in gql) - # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + def test_add_resource(self): + self.project.add_resource(Person) + self.project.add_resource(Person) + # Test that no duplicates are there + self.assertEqual(self.project.collections, set([Person])) + + def test_add_resources(self): + self.project.add_resources([Person, Sport]) + self.assertEqual(self.project.collections, set([Person, Sport])) + + def test_render(self): + self.project.add_resources([Person, Sport]) + gql = self.project.render() + self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + self.assertTrue('enum gender_pronouns' in gql) + self.assertTrue('type Person' in gql) + self.assertTrue('type Sport' in gql) + self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): self.project.add_resources([Person, Sport, Group, User]) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 049ebe4..592b6c0 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -216,17 +216,28 @@ def get_operations(self, col: Collection): view_payload = view()._payload_docs() # Construct payload for swagger generation - # TODO: support referencing of models if view_payload: for field in view_payload.get('data'): - param = sw.Parameter( - name=field.get('name'), - _type=field.get('type'), - _in=field.get('in'), - description=field.get('description'), - required=field.get('required'), - allowEmptyValue=False - ) + if field.get('schema'): + schema = sw.SwagSchema( + ref=field.get('schema') + ) + param = sw.Parameter( + name=field.get('name'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + schema=schema + ) + else: + param = sw.Parameter( + name=field.get('name'), + _type=field.get('type'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + allowEmptyValue=False + ) params.append(param) consumes = ['application/json', diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index fc18535..94071c4 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -27,7 +27,7 @@ def get_response(self): ) def _payload_docs(self): - """ Used in defining payload parameters for the view. + """ Used in custom defining payload parameters for the view. Should return a dict that has the fields of a swagger parameter e.g. {"data": [ @@ -35,15 +35,15 @@ def _payload_docs(self): "name":"name", "in":"formData", "description":"name of the pet", - "required": true, + "required": True, "type": "string" }, { - "name": "status", - "in": "formData", - "description": "status of the pet", - "required":true, - "type":"string" + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": "#/definitions/Person" } ]} """ From ce2b467eaeda8d1eb1abc428ace752ddd0c20282 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:18:08 +0800 Subject: [PATCH 138/214] Updated the docstrings of _payload_docs --- pfunk/utils/swagger.py | 4 +--- pfunk/web/views/json.py | 51 ++++++++++++++++++++++++++++++++++------- 2 files changed, 44 insertions(+), 11 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 592b6c0..a2fbc2e 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -219,9 +219,7 @@ def get_operations(self, col: Collection): if view_payload: for field in view_payload.get('data'): if field.get('schema'): - schema = sw.SwagSchema( - ref=field.get('schema') - ) + schema = sw.SwagSchema(ref=field.get('schema')) param = sw.Parameter( name=field.get('name'), _in=field.get('in'), diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 94071c4..bbc40ce 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -29,15 +29,31 @@ def get_response(self): def _payload_docs(self): """ Used in custom defining payload parameters for the view. - Should return a dict that has the fields of a swagger parameter e.g. + Should return a dict that has the fields of a swagger parameter. + If there is an error in the swagger, it will not be raised. + Usage of `https://editor.swagger.io` to validate is recommended + e.g. + ``` + # Defining formdata + {"data": [ + { + "name":"name", + "in":"formData", + "description":"name of the pet", + "required": true, + "type": "string" + }, + { + "name": "status", + "in": "formData", + "description": "status of the pet", + "required":true, + "type":"string" + } + ]} + + # Defining a payload that references a model {"data": [ - { - "name":"name", - "in":"formData", - "description":"name of the pet", - "required": True, - "type": "string" - }, { "name": "body", "in": "body", @@ -46,6 +62,7 @@ def _payload_docs(self): "schema": "#/definitions/Person" } ]} + ``` """ return {} @@ -87,6 +104,24 @@ def get_m2m_kwargs(self, obj): ) ) + def _payload_docs(self): + return {"data": [ + { + "name": "name", + "in": "formData", + "description": "name of the pet", + "required": True, + "type": "string" + }, + { + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": "#/definitions/Person" + } + ]} + class UpdateView(UpdateMixin, JSONIDMixin, JSONView): """ Define a view to allow `Update` operations """ From e1312f4ee108a16bde4c2c66234c558b64d2fdae Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:18:41 +0800 Subject: [PATCH 139/214] Updated the docstrings of _payload_docs --- pfunk/web/views/json.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index bbc40ce..5399bd1 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -27,7 +27,7 @@ def get_response(self): ) def _payload_docs(self): - """ Used in custom defining payload parameters for the view. + """ Used in custom defining payload parameters for the view in Swagger generation. Should return a dict that has the fields of a swagger parameter. If there is an error in the swagger, it will not be raised. From 228a039c0c042f18ae2c40d2dfcf547f6dc42283 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 22 Jun 2022 15:19:06 +0800 Subject: [PATCH 140/214] updated swaggyp package --- pfunk/utils/swagger.py | 1 - pfunk/web/views/json.py | 27 +++++++++++---------------- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 4 files changed, 16 insertions(+), 22 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index a2fbc2e..eb2c57b 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -163,7 +163,6 @@ def get_operations(self, col: Collection): ``` """ for view in col.collection_views: - v = view() route = view.url(col) rule = route.rule methods = route.methods diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 5399bd1..5b5079f 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -105,22 +105,17 @@ def get_m2m_kwargs(self, obj): ) def _payload_docs(self): - return {"data": [ - { - "name": "name", - "in": "formData", - "description": "name of the pet", - "required": True, - "type": "string" - }, - { - "name": "body", - "in": "body", - "description": "Collection object to add", - "required": True, - "schema": "#/definitions/Person" - } - ]} + # TODO: Get view's collection class name by default + if self.collection: + return {"data": [ + { + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": f"#/definitions/Collection" + } + ]} class UpdateView(UpdateMixin, JSONIDMixin, JSONView): diff --git a/poetry.lock b/poetry.lock index 66ee101..a3f2ed3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1318,7 +1318,7 @@ requests = {version = ">=2.20", markers = "python_version >= \"3.0\""} [[package]] name = "swaggyp" -version = "0.2.0" +version = "0.3.0" description = "Python library for generating Swagger templates based on valley" category = "main" optional = false @@ -1469,7 +1469,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "6b06fa36eba0e2e7e8d19a1efecff127af145281ccb3d10c02e1baebb062d147" +content-hash = "b98fdaeac6227f48d169512de02fc0155103f626a8e7c5d074c25fef2bef9cd5" [metadata.files] appnope = [ @@ -2184,8 +2184,8 @@ stripe = [ {file = "stripe-2.76.0.tar.gz", hash = "sha256:fd3fc6935c3b6189967191607b6f38ebe490005a590b4d0d43fbe3aba45deca8"}, ] swaggyp = [ - {file = "swaggyp-0.2.0-py3-none-any.whl", hash = "sha256:030dca0f0a24469abcf7e1f047eaa01c84c206a8d891cfb1a4a24f40e2a6a146"}, - {file = "swaggyp-0.2.0.tar.gz", hash = "sha256:b39ab7f8a49fdb17af7862c5ac81b872a5f7f95c3b0989a6a285fde3f4ee4885"}, + {file = "swaggyp-0.3.0-py3-none-any.whl", hash = "sha256:d678daa026f374c09690bfae93670e8824c616fe7186375cca531dd9530d2c41"}, + {file = "swaggyp-0.3.0.tar.gz", hash = "sha256:2af8b74e014ba83287d4b2fcbe355636ca8ec09ca70bffba4290b3f833c8ad46"}, ] terminado = [ {file = "terminado-0.15.0-py3-none-any.whl", hash = "sha256:0d5f126fbfdb5887b25ae7d9d07b0d716b1cc0ccaacc71c1f3c14d228e065197"}, diff --git a/pyproject.toml b/pyproject.toml index 5070eac..6cea052 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,8 +23,8 @@ graphql-py = "^0.8.1" sammy = "^0.4.3" stripe = "^2.61.0" bleach = "^4.1.0" -swaggyp = "^0.2.0" openapi-spec-validator = "^0.4.0" +swaggyp = "^0.3.0" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From 664c7b145d6644a330b9a76f0f4db2f12f986fc9 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 23 Jun 2022 15:13:06 +0800 Subject: [PATCH 141/214] Fixed calling of view's _payload_docs to pass the correct argument. Made Create and Update views have default reference to model --- pfunk/utils/swagger.py | 2 +- pfunk/web/views/json.py | 17 +++++++++++++++-- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index eb2c57b..a765760 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -212,7 +212,7 @@ def get_operations(self, col: Collection): params.append(path_params) # Acquire payload of the view from the View's `_payload_docs` - view_payload = view()._payload_docs() + view_payload = view(col)._payload_docs() # Construct payload for swagger generation if view_payload: diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 5b5079f..649f08c 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -105,7 +105,7 @@ def get_m2m_kwargs(self, obj): ) def _payload_docs(self): - # TODO: Get view's collection class name by default + # Reference the collection by default if self.collection: return {"data": [ { @@ -113,7 +113,7 @@ def _payload_docs(self): "in": "body", "description": "Collection object to add", "required": True, - "schema": f"#/definitions/Collection" + "schema": f"#/definitions/{self.collection.__class__.__name__}" } ]} @@ -132,6 +132,19 @@ def get_query(self): obj.save() return obj + def _payload_docs(self): + # Reference the collection by default + if self.collection: + return {"data": [ + { + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": f"#/definitions/{self.collection.__class__.__name__}" + } + ]} + class DetailView(ObjectMixin, JSONIDMixin, JSONView): """ Define a view to allow single entity operations """ From 8cc26a6d86b02e721a999c349812729018557c50 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 5 Apr 2022 15:08:35 +0800 Subject: [PATCH 142/214] Added aws.py as a skeleton for upcoming AWS features --- pfunk/utils/aws.py | 192 +++++++-------------------------------------- 1 file changed, 28 insertions(+), 164 deletions(-) diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 13164f8..7413120 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,172 +1,36 @@ -import datetime import boto3 -import json import swaggyp as sw -from botocore.exceptions import ClientError, NoCredentialsError -from envs import env -from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator -from openapi_spec_validator.readers import read_from_filename -from openapi_spec_validator.exceptions import OpenAPIValidationError - -AWS_ACCESS_KEY = env('AWS_ACCESS_KEY') -AWS_SECRET_ACCESS_KEY = env('AWS_SECRET_ACCESS_KEY') -AWS_DEFAULT_REGION = env('AWS_DEFAULT_REGION') - - -def _json_dt_helper(o): - """ Helps serializing `datetime` objects to a readable string """ - if isinstance(o, (datetime.date, datetime.datetime)): - return o.isoformat() - - -def write_to_config(obj, config_file_dir='pfunk.json'): - """ Appends object to pfunk config file - - Args: - obj (dict, required): - key, value pairs to write to json file - config_file_dir (str, optional): - directory of the config json file, default='pfunk.json' - Returns: - config_file (dict, required): - the current value of config file (pfunk.json) - """ - with open(config_file_dir, 'r+') as f: - data = json.load(f) - data.update(obj) - f.seek(0) - f.truncate() - json.dump(data, f, indent=4, sort_keys=True, default=_json_dt_helper) - return data - - -def read_from_config_file(config_file_dir='pfunk.json'): - """ Returns data from config file in dict form """ - with open(config_file_dir, 'r') as f: - data = json.load(f) - return data - class ApiGateway(object): - region_name = env('SES_REGION_NAME', 'us-east-1') def __init__(self): - self.client = boto3.client( - 'apigateway', - aws_access_key_id=AWS_ACCESS_KEY, - aws_secret_access_key=AWS_SECRET_ACCESS_KEY, - region_name=AWS_DEFAULT_REGION) - + self.client = boto3.client('apigateway') + pass + + def create_api_from_yaml(self, yaml_file): + # response = client.import_rest_api( + # failOnWarnings=True|False, + # parameters={ + # 'string': 'string' + # }, + # body=b'bytes'|file + # ) + pass + + def update_api_from_yaml(self, yaml_file): + # response = client.put_rest_api( + # restApiId='string', + # mode='merge'|'overwrite', + # failOnWarnings=True|False, + # parameters={ + # 'string': 'string' + # }, + # body=b'bytes'|file + # ) + pass + def validate_yaml(self, yaml_file): - """ Validate YAML file if it is valid for using OpenAPI Spec v2""" - try: - spec_dict, spec_url = read_from_filename(yaml_file) - validate_v2_spec(spec_dict) - except (OSError, AttributeError) as err: - return {'errors': str(err)} - except OpenAPIValidationError as err: - return self._iterate_validator_errors(spec_dict) - return None - - def _iterate_validator_errors(self, spec_dict): - """ Iterates through list of errors that the `openapi_spec_validator` returned - - This method was implemented due to `openapi_spec_validator` design - that if an error happened while iterating through the YAML file - it returns a Python error. - - Args: - spec_dict (dict, required): - `spec_dict` generated from `openapi_spec_validator.readers.read_from_filename` - Returns: - list of errors - """ - try: - errors = [{err.message: err.json_path} - for err in openapi_v2_spec_validator.iter_errors(spec_dict)] - return errors - except (OSError, AttributeError) as err: - return str(err) - - def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): - """ Creates an API for AWS API Gateway from a YAML swagger file - - Args: - yaml_file (yaml file, required): - The OpenAPI swagger file to create API from - fail_on_warnings (bool, optional): - Specifies if the method will error on warnings. Default: `True` - """ - _yaml_valid = self.validate_yaml(yaml_file) - if _yaml_valid: - return { - "error": 'Bad Request. YAML is not valid.', - "yaml_err": _yaml_valid - } - - try: - if not type(yaml_file) == 'string': - with open(yaml_file, 'r') as file: - yaml_file = file.read() - response = self.client.import_rest_api( - failOnWarnings=fail_on_warnings, - body=yaml_file) - - if response: - write_to_config({'api': response}) - return { - 'success': True, - 'response': response - } - except (ClientError, NoCredentialsError) as err: - return { - 'error': str(err) - } - - def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnings=True): - """ Updates rest API using yaml file - - Args: - rest_api_id (string, required): - ID of the API for updating, if not provided, use API ID from `pfunk.json` - yaml_file (yaml file, required): - The OpenAPI swagger file to create API from - mode (string, required): - Mode of update, choice=['merge', 'overwrite'] - fail_on_warnings (bool, optional): - Specifies if the method will error on warnings. Default: `True` - """ - _yaml_valid = self.validate_yaml(yaml_file) - if _yaml_valid: - return { - "error": 'Bad Request. YAML is not valid.', - "yaml_err": _yaml_valid - } - - try: - if not type(yaml_file) == 'string': - with open(yaml_file, 'r') as file: - yaml_file = file.read() - # Acquire REST API ID from config file if not provided - if not rest_api_id: - data = read_from_config_file() - if data.get('api'): - rest_api_id = (data.get('api') - .get('id')) - - response = self.client.put_rest_api( - restApiId=rest_api_id, - mode=mode, - failOnWarnings=fail_on_warnings, - body=yaml_file - ) + """ Validate YAML file if it is valid for using """ + pass - if response: - return { - 'success': True, - 'response': response - } - except (ClientError, NoCredentialsError) as err: - return { - 'error': str(err) - } + def \ No newline at end of file From ee82735890df4e0a0d6bfc082863be60e7fb5b59 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 5 Apr 2022 15:11:41 +0800 Subject: [PATCH 143/214] Added skeleton tests for aws features --- pfunk/tests/test_aws.py | 64 ++++++----------------------------------- 1 file changed, 8 insertions(+), 56 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index d28c852..c3cdc45 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -1,68 +1,20 @@ -import os import unittest -import tempfile -from unittest import mock from pfunk.utils.aws import ApiGateway -from pfunk.tests import User, Group, Person, Sport +from pfunk.tests import User, Group from pfunk.project import Project class ApiGatewayTests(unittest.TestCase): - """ Unit tests for creation of API from Swagger file - - Note that the unittests uses mocked boto3 normally. If - you want to test against a real endpoint, remove the - patch decorator at `setUpClass` and the `mocked` - param. Also make sure you have the required - env vars for AWS credentials and you have - the json config in the current env. - """ - @classmethod - @mock.patch('boto3.client') - def setUpClass(cls, mocked) -> None: - cls.project = Project() - cls.aws_client = ApiGateway() - cls.project.add_resources([Person, Sport, Group, User]) - - swagger = cls.project.generate_swagger() - cls.swagger_dir = swagger['dir'] - cls.swagger_file = swagger['swagger_file'] + def setUp(self) -> None: + self.project = Project() def test_validate_yaml(self): - result = self.aws_client.validate_yaml(self.swagger_dir) - self.assertIsNone(result) # if there are no errors, then spec is valid - - def test_validate_wrong_yaml(self): - result = self.aws_client.validate_yaml('wrong yaml...33::39') - # if there are returned objs, there is an error - self.assertIsNotNone(result) - - @mock.patch('boto3.client') - def test_create_api_from_yaml(self, mocked): - result = self.aws_client.create_api_from_yaml( - yaml_file=self.swagger_dir) - self.assertTrue(result['success']) - - @mock.patch('boto3.client') - def test_create_api_from_wrong_yaml(self, mocked): - with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: - tmp.seek(0) - tmp.write('test wrong yaml') - result = self.aws_client.create_api_from_yaml(tmp.name) - self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') + pass - @mock.patch('boto3.client') - def test_update_api_from_yaml(self, mocked): - result = self.aws_client.update_api_from_yaml( - yaml_file=self.swagger_dir, mode='merge') - self.assertTrue(result['success']) + def test_create_api_from_yaml(self): + pass - @mock.patch('boto3.client') - def test_update_api_from_wrong_yaml(self, mocked): - with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: - tmp.seek(0) - tmp.write('test wrong yaml') - result = self.aws_client.update_api_from_yaml(tmp.name, mode='merge') - self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') + def test_update_api_from_yaml(self): + pass From d2a371b2c4ef7694ab99f45a0eb72abc9df3e5c8 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 7 Apr 2022 15:29:22 +0800 Subject: [PATCH 144/214] Added yaml validator and have it return specific errors --- pfunk/tests/test_aws.py | 34 +++++++++++++++++++++++++++------ pfunk/utils/aws.py | 21 +++++++++++++++------ pfunk/utils/swagger.py | 1 + poetry.lock | 42 ++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 2 +- 5 files changed, 86 insertions(+), 14 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index c3cdc45..a6b1314 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -1,20 +1,42 @@ import unittest +from unittest import mock from pfunk.utils.aws import ApiGateway -from pfunk.tests import User, Group +from pfunk.tests import User, Group, Person, Sport from pfunk.project import Project class ApiGatewayTests(unittest.TestCase): - def setUp(self) -> None: - self.project = Project() + @classmethod + def setUpCls(cls) -> None: + cls.project = Project() + cls.aws_client = ApiGateway() + cls.project.add_resources([Person, Sport, Group, User]) + cls.api_yaml = cls.project.generate_swagger() def test_validate_yaml(self): - pass + result = self.aws_client.validate_yaml(self.api_yaml) + self.assertIsNone(result) # if there are no errors, then spec is valid + def test_validate_wrong_yaml(self): + result = self.aws_client.validate_yaml('wrong yaml...33::39') + self.assertIsNotNone(result) # if there are returned objs, there is an error + + @mock.patch('boto3.client') def test_create_api_from_yaml(self): - pass + result = self.aws_client.create_api_from_yaml() + self.assertTrue(result['success']) + @mock.patch('boto3.client') def test_update_api_from_yaml(self): - pass + result = self.aws_client.create_api_from_yaml() + self.assertTrue(result['success']) + + def test_create_api_from_wrong_yaml(self): + result = self.aws_client.create_api_from_yaml('wrong yaml...21320:: asdkas') + self.assertEqual(result, 'Bad Request. YAML is not valid.') + + def test_update_api_from_wrong_yaml(self): + result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') + self.assertEqual(result, 'Bad Request. YAML is not valid.') \ No newline at end of file diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 7413120..4c61506 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,5 +1,9 @@ import boto3 import swaggyp as sw +from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator +from openapi_spec_validator.readers import read_from_filename +from openapi_spec_validator.exceptions import OpenAPIValidationError + class ApiGateway(object): @@ -7,6 +11,17 @@ def __init__(self): self.client = boto3.client('apigateway') pass + def validate_yaml(self, yaml_file): + """ Validate YAML file if it is valid for using OpenAPI Spec v2""" + try: + spec_dict, spec_url = read_from_filename(yaml_file) + validate_v2_spec(spec_dict) + except OpenAPIValidationError as err: + errors = [{err.message: err.json_path} + for err in openapi_v2_spec_validator.iter_errors(spec_dict)] + return errors + return None + def create_api_from_yaml(self, yaml_file): # response = client.import_rest_api( # failOnWarnings=True|False, @@ -28,9 +43,3 @@ def update_api_from_yaml(self, yaml_file): # body=b'bytes'|file # ) pass - - def validate_yaml(self, yaml_file): - """ Validate YAML file if it is valid for using """ - pass - - def \ No newline at end of file diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index a765760..5367db1 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -135,6 +135,7 @@ def write_to_yaml(self, dir=''): if not os.path.exists(f'{dir}swagger.yaml'): with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) + return t.to_yaml() else: print( 'There is an existing swagger file. Kindly move/delete it to generate a new one.') diff --git a/poetry.lock b/poetry.lock index a3f2ed3..26a6f60 100644 --- a/poetry.lock +++ b/poetry.lock @@ -921,6 +921,38 @@ PyYAML = ">=5.1" [package.extras] requests = ["requests"] +[[package]] +name = "openapi-schema-validator" +version = "0.2.3" +description = "OpenAPI schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.0.0,<5.0.0" + +[package.extras] +rfc3339-validator = ["rfc3339-validator"] +strict-rfc3339 = ["strict-rfc3339"] +isodate = ["isodate"] + +[[package]] +name = "openapi-spec-validator" +version = "0.4.0" +description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3.0 spec validator" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.2.0,<5.0.0" +openapi-schema-validator = ">=0.2.0,<0.3.0" +PyYAML = ">=5.1" + +[package.extras] +requests = ["requests"] + [[package]] name = "packaging" version = "21.3" @@ -1469,7 +1501,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "b98fdaeac6227f48d169512de02fc0155103f626a8e7c5d074c25fef2bef9cd5" +content-hash = "6b06fa36eba0e2e7e8d19a1efecff127af145281ccb3d10c02e1baebb062d147" [metadata.files] appnope = [ @@ -1898,6 +1930,14 @@ openapi-spec-validator = [ {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, ] +openapi-schema-validator = [ + {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, + {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, +] +openapi-spec-validator = [ + {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, + {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, +] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, diff --git a/pyproject.toml b/pyproject.toml index 6cea052..5070eac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,8 +23,8 @@ graphql-py = "^0.8.1" sammy = "^0.4.3" stripe = "^2.61.0" bleach = "^4.1.0" +swaggyp = "^0.2.0" openapi-spec-validator = "^0.4.0" -swaggyp = "^0.3.0" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From f4f9cae97361709ea491be7622648b842cac1f69 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 11 Apr 2022 15:54:06 +0800 Subject: [PATCH 145/214] Made yaml validation to properly separate openapi errors and python errors. Refactored swagger.py to return the swagger file directory --- pfunk/tests/test_aws.py | 46 +++++++++++++++---------- pfunk/utils/aws.py | 74 ++++++++++++++++++++++++++++++++++------- pfunk/utils/swagger.py | 12 +++---- 3 files changed, 96 insertions(+), 36 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index a6b1314..6ec5841 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -1,4 +1,6 @@ +import os import unittest +import tempfile from unittest import mock from pfunk.utils.aws import ApiGateway @@ -9,34 +11,44 @@ class ApiGatewayTests(unittest.TestCase): @classmethod - def setUpCls(cls) -> None: + def setUpClass(cls) -> None: cls.project = Project() cls.aws_client = ApiGateway() cls.project.add_resources([Person, Sport, Group, User]) - cls.api_yaml = cls.project.generate_swagger() + + swagger = cls.project.generate_swagger() + cls.swagger_dir = swagger['dir'] + cls.swagger_file = swagger['swagger_file'] + print(cls.swagger_dir) def test_validate_yaml(self): - result = self.aws_client.validate_yaml(self.api_yaml) + result = self.aws_client.validate_yaml(self.swagger_dir) self.assertIsNone(result) # if there are no errors, then spec is valid def test_validate_wrong_yaml(self): result = self.aws_client.validate_yaml('wrong yaml...33::39') - self.assertIsNotNone(result) # if there are returned objs, there is an error + # if there are returned objs, there is an error + self.assertIsNotNone(result) @mock.patch('boto3.client') - def test_create_api_from_yaml(self): - result = self.aws_client.create_api_from_yaml() + def test_create_api_from_yaml(self, mocked): + result = self.aws_client.create_api_from_yaml( + yaml_file=self.swagger_dir) self.assertTrue(result['success']) @mock.patch('boto3.client') - def test_update_api_from_yaml(self): - result = self.aws_client.create_api_from_yaml() - self.assertTrue(result['success']) - - def test_create_api_from_wrong_yaml(self): - result = self.aws_client.create_api_from_yaml('wrong yaml...21320:: asdkas') - self.assertEqual(result, 'Bad Request. YAML is not valid.') - - def test_update_api_from_wrong_yaml(self): - result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') - self.assertEqual(result, 'Bad Request. YAML is not valid.') \ No newline at end of file + def test_create_api_from_wrong_yaml(self, mocked): + with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: + tmp.seek(0) + tmp.write('test wrong yaml') + result = self.aws_client.create_api_from_yaml(tmp.name) + self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') + + # @mock.patch('boto3.client') + # def test_update_api_from_yaml(self): + # result = self.aws_client.update_api_from_yaml(yaml_file=self.api_yaml) + # self.assertTrue(result['success']) + + # def test_update_api_from_wrong_yaml(self): + # result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') + # self.assertEqual(result, 'Bad Request. YAML is not valid.') diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 4c61506..b1c26c0 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,36 +1,86 @@ import boto3 import swaggyp as sw +# from botocore.exceptions import BadReq +from envs import env from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator from openapi_spec_validator.readers import read_from_filename from openapi_spec_validator.exceptions import OpenAPIValidationError class ApiGateway(object): + region_name = env('SES_REGION_NAME', 'us-east-1') def __init__(self): - self.client = boto3.client('apigateway') - pass + self.client = boto3.client('apigateway', region_name=self.region_name) def validate_yaml(self, yaml_file): """ Validate YAML file if it is valid for using OpenAPI Spec v2""" try: spec_dict, spec_url = read_from_filename(yaml_file) validate_v2_spec(spec_dict) + except (OSError, AttributeError) as err: + return {'errors': str(err)} except OpenAPIValidationError as err: + return self._iterate_validator_errors(spec_dict) + return None + + def _iterate_validator_errors(self, spec_dict): + """ Iterates through list of errors that the `openapi_spec_validator` returned + + This method was implemented due to `openapi_spec_validator` design + that if an error happened while iterating through the YAML file + it returns a Python error. + + Args: + spec_dict (dict, required): + `spec_dict` generated from `openapi_spec_validator.readers.read_from_filename` + Returns: + list of errors + """ + try: errors = [{err.message: err.json_path} for err in openapi_v2_spec_validator.iter_errors(spec_dict)] return errors - return None + except (OSError, AttributeError) as err: + return str(err) - def create_api_from_yaml(self, yaml_file): - # response = client.import_rest_api( - # failOnWarnings=True|False, - # parameters={ - # 'string': 'string' - # }, - # body=b'bytes'|file - # ) - pass + def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): + """ Creates an API for AWS API Gateway from a YAML swagger file + + Args: + yaml_file (yaml file, required): + The OpenAPI swagger file to create API from + fail_on_warnings (bool, optional): + Specifies if the method will error on warnings. Default: `True` + """ + _yaml_valid = self.validate_yaml(yaml_file) + if _yaml_valid: + return { + "error": 'Bad Request. YAML is not valid.', + "yaml_err": _yaml_valid + } + + try: + if not type(yaml_file) == 'string': + with open(yaml_file, 'r') as file: + response = self.client.import_rest_api( + failOnWarnings=fail_on_warnings, + body=file + ) + else: + response = self.client.import_rest_api( + failOnWarnings=fail_on_warnings, + body=yaml_file + ) + + if response: + return { + 'success': True, + response: response + } + # TODO: Specify boto exceptions + except Exception as err: + return err def update_api_from_yaml(self, yaml_file): # response = client.put_rest_api( diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 5367db1..8d72eb7 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -132,16 +132,14 @@ def write_to_yaml(self, dir=''): schemes=schemes, definitions=self.definitions) - if not os.path.exists(f'{dir}swagger.yaml'): - with open(f'{dir}swagger.yaml', 'x') as swag_doc: + if not os.path.exists(f'{dir}/swagger.yaml'): + with open(f'{dir}/swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) - return t.to_yaml() else: - print( - 'There is an existing swagger file. Kindly move/delete it to generate a new one.') + print('There is an existing swagger file. Kindly move/delete it to generate a new one.') # print(t.to_yaml()) return { - "dir": f'{dir}swagger.yaml', + "dir": f'{dir}/swagger.yaml', "swagger_file": t.to_yaml() } @@ -320,4 +318,4 @@ def generate_swagger(self, dir=''): col = i() self.get_operations(col) self.get_model_definitions(col) - return self.write_to_yaml(dir) + return self.write_to_yaml(dir) \ No newline at end of file From 49d45f1876b79daf340ad818654da8bc14afeaef Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 11 Apr 2022 15:55:52 +0800 Subject: [PATCH 146/214] Refactored default dir of swagger file. Refactored unit tests for AWS utils --- pfunk/tests/test_aws.py | 1 - pfunk/utils/swagger.py | 6 +++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index 6ec5841..571f3c1 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -19,7 +19,6 @@ def setUpClass(cls) -> None: swagger = cls.project.generate_swagger() cls.swagger_dir = swagger['dir'] cls.swagger_file = swagger['swagger_file'] - print(cls.swagger_dir) def test_validate_yaml(self): result = self.aws_client.validate_yaml(self.swagger_dir) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 8d72eb7..c27184e 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -132,14 +132,14 @@ def write_to_yaml(self, dir=''): schemes=schemes, definitions=self.definitions) - if not os.path.exists(f'{dir}/swagger.yaml'): - with open(f'{dir}/swagger.yaml', 'x') as swag_doc: + if not os.path.exists(f'{dir}swagger.yaml'): + with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) else: print('There is an existing swagger file. Kindly move/delete it to generate a new one.') # print(t.to_yaml()) return { - "dir": f'{dir}/swagger.yaml', + "dir": f'{dir}swagger.yaml', "swagger_file": t.to_yaml() } From 3d7fb493d2be5e7aa9aa677aaea6cb468e8b5750 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 12 Apr 2022 16:15:50 +0800 Subject: [PATCH 147/214] Finished create/update api from yaml. Added writing to config file if API is created. Added reading from config file if API is to be updated --- pfunk/tests/test_aws.py | 23 +++++--- pfunk/utils/aws.py | 128 ++++++++++++++++++++++++++++++++-------- 2 files changed, 119 insertions(+), 32 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index 571f3c1..2789dc2 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -11,7 +11,8 @@ class ApiGatewayTests(unittest.TestCase): @classmethod - def setUpClass(cls) -> None: + @mock.patch('boto3.client') + def setUpClass(cls, mocked) -> None: cls.project = Project() cls.aws_client = ApiGateway() cls.project.add_resources([Person, Sport, Group, User]) @@ -43,11 +44,17 @@ def test_create_api_from_wrong_yaml(self, mocked): result = self.aws_client.create_api_from_yaml(tmp.name) self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') - # @mock.patch('boto3.client') - # def test_update_api_from_yaml(self): - # result = self.aws_client.update_api_from_yaml(yaml_file=self.api_yaml) - # self.assertTrue(result['success']) + @mock.patch('boto3.client') + def test_update_api_from_yaml(self, mocked): + result = self.aws_client.update_api_from_yaml( + yaml_file=self.swagger_dir, mode='merge') + print(result) + self.assertTrue(result['success']) - # def test_update_api_from_wrong_yaml(self): - # result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') - # self.assertEqual(result, 'Bad Request. YAML is not valid.') + @mock.patch('boto3.client') + def test_update_api_from_wrong_yaml(self, mocked): + with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: + tmp.seek(0) + tmp.write('test wrong yaml') + result = self.aws_client.update_api_from_yaml(tmp.name, mode='merge') + self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index b1c26c0..7427db3 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,4 +1,6 @@ +import datetime import boto3 +import json import swaggyp as sw # from botocore.exceptions import BadReq from envs import env @@ -6,12 +8,54 @@ from openapi_spec_validator.readers import read_from_filename from openapi_spec_validator.exceptions import OpenAPIValidationError +AWS_ACCESS_KEY = env('AWS_ACCESS_KEY') +AWS_SECRET_ACCESS_KEY = env('AWS_SECRET_ACCESS_KEY') +AWS_DEFAULT_REGION = env('AWS_DEFAULT_REGION') + + +def _json_dt_helper(o): + """ Helps serializing `datetime` objects to a readable string """ + if isinstance(o, (datetime.date, datetime.datetime)): + return o.isoformat() + + +def write_to_config(obj, config_file_dir='pfunk.json'): + """ Writes to pfunk config file + + Args: + obj (dict, required): + key, value pairs to write to json file + config_file_dir (str, optional): + directory of the config json file, default='pfunk.json' + Returns: + config_file (dict, required): + the current value of config file (pfunk.json) + """ + with open(config_file_dir, 'r+') as f: + data = json.load(f) + data.update(obj) + f.seek(0) + f.truncate() + json.dump(data, f, indent=4, sort_keys=True, default=_json_dt_helper) + return data + + +def read_from_config_file(config_file_dir='pfunk.json'): + """ Returns data from config file in dict form """ + with open(config_file_dir, 'r') as f: + data = json.load(f) + return data + class ApiGateway(object): region_name = env('SES_REGION_NAME', 'us-east-1') def __init__(self): - self.client = boto3.client('apigateway', region_name=self.region_name) + self.client = boto3.client( + 'apigateway', + aws_access_key_id=AWS_ACCESS_KEY, + aws_secret_access_key=AWS_SECRET_ACCESS_KEY, + region_name=AWS_DEFAULT_REGION) def validate_yaml(self, yaml_file): """ Validate YAML file if it is valid for using OpenAPI Spec v2""" @@ -63,33 +107,69 @@ def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): try: if not type(yaml_file) == 'string': with open(yaml_file, 'r') as file: - response = self.client.import_rest_api( - failOnWarnings=fail_on_warnings, - body=file - ) - else: - response = self.client.import_rest_api( - failOnWarnings=fail_on_warnings, - body=yaml_file - ) + yaml_file = file.read() + response = self.client.import_rest_api( + failOnWarnings=fail_on_warnings, + body=yaml_file) + + # TODO: Fix -- if using mocked obj, don't write anything + if response: + write_to_config({'api': response}) + return { + 'success': True, + 'response': response + } + # TODO: Specify boto exceptions + except Exception as err: + return { + 'error': str(err) + } + + def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnings=True): + """ Updates rest API using yaml file + + Args: + rest_api_id (string, required): + ID of the API for updating, if not provided, use API ID from `pfunk.json` + yaml_file (yaml file, required): + The OpenAPI swagger file to create API from + mode (string, required): + Mode of update, choice=['merge', 'overwrite'] + fail_on_warnings (bool, optional): + Specifies if the method will error on warnings. Default: `True` + """ + _yaml_valid = self.validate_yaml(yaml_file) + if _yaml_valid: + return { + "error": 'Bad Request. YAML is not valid.', + "yaml_err": _yaml_valid + } + + try: + if not type(yaml_file) == 'string': + with open(yaml_file, 'r') as file: + yaml_file = file.read() + # Acquire REST API ID from config file if not provided + if not rest_api_id: + data = read_from_config_file() + if data.get('api'): + rest_api_id = (data.get('api') + .get('id')) + + response = self.client.put_rest_api( + restApiId=rest_api_id, + mode=mode, + failOnWarnings=fail_on_warnings, + body=yaml_file + ) if response: return { 'success': True, - response: response + 'response': response } # TODO: Specify boto exceptions except Exception as err: - return err - - def update_api_from_yaml(self, yaml_file): - # response = client.put_rest_api( - # restApiId='string', - # mode='merge'|'overwrite', - # failOnWarnings=True|False, - # parameters={ - # 'string': 'string' - # }, - # body=b'bytes'|file - # ) - pass + return { + 'error': str(err) + } From 49de24943821593cf7596c13b11b77f5dfed13c8 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 13 Apr 2022 13:44:30 +0800 Subject: [PATCH 148/214] Reworked what exceptions does aws util react to. Added doc for aws util tests --- pfunk/tests/test_aws.py | 10 +++++++++- pfunk/utils/aws.py | 15 ++++++--------- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index 2789dc2..d28c852 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -9,6 +9,15 @@ class ApiGatewayTests(unittest.TestCase): + """ Unit tests for creation of API from Swagger file + + Note that the unittests uses mocked boto3 normally. If + you want to test against a real endpoint, remove the + patch decorator at `setUpClass` and the `mocked` + param. Also make sure you have the required + env vars for AWS credentials and you have + the json config in the current env. + """ @classmethod @mock.patch('boto3.client') @@ -48,7 +57,6 @@ def test_create_api_from_wrong_yaml(self, mocked): def test_update_api_from_yaml(self, mocked): result = self.aws_client.update_api_from_yaml( yaml_file=self.swagger_dir, mode='merge') - print(result) self.assertTrue(result['success']) @mock.patch('boto3.client') diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 7427db3..13164f8 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -2,7 +2,7 @@ import boto3 import json import swaggyp as sw -# from botocore.exceptions import BadReq +from botocore.exceptions import ClientError, NoCredentialsError from envs import env from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator from openapi_spec_validator.readers import read_from_filename @@ -20,7 +20,7 @@ def _json_dt_helper(o): def write_to_config(obj, config_file_dir='pfunk.json'): - """ Writes to pfunk config file + """ Appends object to pfunk config file Args: obj (dict, required): @@ -111,16 +111,14 @@ def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): response = self.client.import_rest_api( failOnWarnings=fail_on_warnings, body=yaml_file) - - # TODO: Fix -- if using mocked obj, don't write anything + if response: write_to_config({'api': response}) return { 'success': True, 'response': response } - # TODO: Specify boto exceptions - except Exception as err: + except (ClientError, NoCredentialsError) as err: return { 'error': str(err) } @@ -154,7 +152,7 @@ def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnin data = read_from_config_file() if data.get('api'): rest_api_id = (data.get('api') - .get('id')) + .get('id')) response = self.client.put_rest_api( restApiId=rest_api_id, @@ -168,8 +166,7 @@ def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnin 'success': True, 'response': response } - # TODO: Specify boto exceptions - except Exception as err: + except (ClientError, NoCredentialsError) as err: return { 'error': str(err) } From 199fb26e911df4f24033e32d4b2b8025ea6621bc Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 27 Apr 2022 14:41:51 +0800 Subject: [PATCH 149/214] Did cleaning up of swaggerdoc docstrings --- pfunk/utils/swagger.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index c27184e..2756f21 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -64,9 +64,6 @@ def __init__(self, collections, rules=[], config_file='pfunk.json'): array of collection of the project to generate models from rules ([`werkzeug.routing.Rule`]): array of additional URLs that the given collection doesn't have - config_file (str, optional): - directory of the config_file - Returns: swagger.yaml (yaml, required): Generated YAML file From b826a9b06c5f6fbbf6659f86aaee4728cc2c668e Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 4 May 2022 14:52:41 +0800 Subject: [PATCH 150/214] added generate-swagger as a cli command, creates a swagger file from the provided json config file of a project --- pfunk/utils/swagger.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 2756f21..a765760 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -64,6 +64,9 @@ def __init__(self, collections, rules=[], config_file='pfunk.json'): array of collection of the project to generate models from rules ([`werkzeug.routing.Rule`]): array of additional URLs that the given collection doesn't have + config_file (str, optional): + directory of the config_file + Returns: swagger.yaml (yaml, required): Generated YAML file @@ -133,7 +136,8 @@ def write_to_yaml(self, dir=''): with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) else: - print('There is an existing swagger file. Kindly move/delete it to generate a new one.') + print( + 'There is an existing swagger file. Kindly move/delete it to generate a new one.') # print(t.to_yaml()) return { "dir": f'{dir}swagger.yaml', @@ -315,4 +319,4 @@ def generate_swagger(self, dir=''): col = i() self.get_operations(col) self.get_model_definitions(col) - return self.write_to_yaml(dir) \ No newline at end of file + return self.write_to_yaml(dir) From 741988f0200d4051e9a08f14501f902f8324e817 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 2 Jun 2022 15:11:30 +0800 Subject: [PATCH 151/214] added support for splitting the docs to get params of the view --- pfunk/tests/test_project.py | 36 +++++++++++++++--------------- pfunk/utils/swagger.py | 44 +++++++++++++++++-------------------- 2 files changed, 38 insertions(+), 42 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index 2665808..c47a132 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -34,24 +34,24 @@ def tearDown(self) -> None: except FileNotFoundError: pass - def test_add_resource(self): - self.project.add_resource(Person) - self.project.add_resource(Person) - # Test that no duplicates are there - self.assertEqual(self.project.collections, set([Person])) - - def test_add_resources(self): - self.project.add_resources([Person, Sport]) - self.assertEqual(self.project.collections, set([Person, Sport])) - - def test_render(self): - self.project.add_resources([Person, Sport]) - gql = self.project.render() - self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - self.assertTrue('enum gender_pronouns' in gql) - self.assertTrue('type Person' in gql) - self.assertTrue('type Sport' in gql) - self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + # def test_add_resource(self): + # self.project.add_resource(Person) + # self.project.add_resource(Person) + # # Test that no duplicates are there + # self.assertEqual(self.project.collections, set([Person])) + + # def test_add_resources(self): + # self.project.add_resources([Person, Sport]) + # self.assertEqual(self.project.collections, set([Person, Sport])) + + # def test_render(self): + # self.project.add_resources([Person, Sport]) + # gql = self.project.render() + # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + # self.assertTrue('enum gender_pronouns' in gql) + # self.assertTrue('type Person' in gql) + # self.assertTrue('type Sport' in gql) + # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): self.project.add_resources([Person, Sport, Group, User]) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index a765760..abbe8fc 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -211,42 +211,38 @@ def get_operations(self, col: Collection): ) params.append(path_params) - # Acquire payload of the view from the View's `_payload_docs` - view_payload = view(col)._payload_docs() + # Acquire payload of the view from the View's docstring + # where to cut the docstrings to use the definition for the payload of the view + oas_str_split = '[OAS]\n' + view_docs = view.__doc__ + view_payload = None + if (view_docs and len(view_docs.split(oas_str_split)) > 1): + view_payload = view_docs.split(oas_str_split)[1] # Construct payload for swagger generation if view_payload: - for field in view_payload.get('data'): - if field.get('schema'): - schema = sw.SwagSchema(ref=field.get('schema')) - param = sw.Parameter( - name=field.get('name'), - _in=field.get('in'), - description=field.get('description'), - required=field.get('required'), - schema=schema - ) - else: - param = sw.Parameter( - name=field.get('name'), - _type=field.get('type'), - _in=field.get('in'), - description=field.get('description'), - required=field.get('required'), - allowEmptyValue=False - ) + for field in json.loads(view_payload).get('data'): + param = sw.Parameter( + name=field.get('name'), + _type=field.get('type'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + allowEmptyValue=False + ) params.append(param) + docs_description = view_docs if not len(view_docs.split( + oas_str_split)) > 1 else view_docs.split(oas_str_split)[0] consumes = ['application/json', 'application/x-www-form-urlencoded'] produces = ['application/json', 'application/x-www-form-urlencoded'] - view_docs = view.__doc__ if params: op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=view_docs, + description=docs_description, responses=responses, consumes=consumes, produces=produces, @@ -255,7 +251,7 @@ def get_operations(self, col: Collection): op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=view_docs, + description=docs_description, responses=responses, consumes=consumes, produces=produces) From 8afe87d5a4119e83f0ff32bff11250e0b8f1214d Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 2 Jun 2022 15:30:35 +0800 Subject: [PATCH 152/214] switch acquiring of swagger docs for payload from docstrings to function return instead --- pfunk/utils/swagger.py | 20 ++++++++---------- pfunk/web/views/json.py | 45 +++++++++++++---------------------------- 2 files changed, 22 insertions(+), 43 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index abbe8fc..049ebe4 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -163,6 +163,7 @@ def get_operations(self, col: Collection): ``` """ for view in col.collection_views: + v = view() route = view.url(col) rule = route.rule methods = route.methods @@ -211,17 +212,13 @@ def get_operations(self, col: Collection): ) params.append(path_params) - # Acquire payload of the view from the View's docstring - # where to cut the docstrings to use the definition for the payload of the view - oas_str_split = '[OAS]\n' - view_docs = view.__doc__ - view_payload = None - if (view_docs and len(view_docs.split(oas_str_split)) > 1): - view_payload = view_docs.split(oas_str_split)[1] + # Acquire payload of the view from the View's `_payload_docs` + view_payload = view()._payload_docs() # Construct payload for swagger generation + # TODO: support referencing of models if view_payload: - for field in json.loads(view_payload).get('data'): + for field in view_payload.get('data'): param = sw.Parameter( name=field.get('name'), _type=field.get('type'), @@ -232,17 +229,16 @@ def get_operations(self, col: Collection): ) params.append(param) - docs_description = view_docs if not len(view_docs.split( - oas_str_split)) > 1 else view_docs.split(oas_str_split)[0] consumes = ['application/json', 'application/x-www-form-urlencoded'] produces = ['application/json', 'application/x-www-form-urlencoded'] + view_docs = view.__doc__ if params: op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=docs_description, + description=view_docs, responses=responses, consumes=consumes, produces=produces, @@ -251,7 +247,7 @@ def get_operations(self, col: Collection): op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=docs_description, + description=view_docs, responses=responses, consumes=consumes, produces=produces) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 649f08c..d721de2 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -27,42 +27,25 @@ def get_response(self): ) def _payload_docs(self): - """ Used in custom defining payload parameters for the view in Swagger generation. + """ Used in defining payload parameters for the view. - Should return a dict that has the fields of a swagger parameter. - If there is an error in the swagger, it will not be raised. - Usage of `https://editor.swagger.io` to validate is recommended - e.g. - ``` - # Defining formdata - {"data": [ - { - "name":"name", - "in":"formData", - "description":"name of the pet", - "required": true, - "type": "string" - }, - { - "name": "status", - "in": "formData", - "description": "status of the pet", - "required":true, - "type":"string" - } - ]} - - # Defining a payload that references a model + Should return a dict that has the fields of a swagger parameter e.g. {"data": [ { - "name": "body", - "in": "body", - "description": "Collection object to add", - "required": True, - "schema": "#/definitions/Person" + "name":"name", + "in":"formData", + "description":"name of the pet", + "required": true, + "type": "string" + }, + { + "name": "status", + "in": "formData", + "description": "status of the pet", + "required":true, + "type":"string" } ]} - ``` """ return {} From 3f7dbd7e4d70d36645200dd6789bee3146677449 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:09:34 +0800 Subject: [PATCH 153/214] added ability to also detect model references from _payload_docs function --- pfunk/tests/test_project.py | 36 ++++++++++++++++++------------------ pfunk/utils/swagger.py | 29 ++++++++++++++++++++--------- pfunk/web/views/json.py | 14 +++++++------- 3 files changed, 45 insertions(+), 34 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index c47a132..2665808 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -34,24 +34,24 @@ def tearDown(self) -> None: except FileNotFoundError: pass - # def test_add_resource(self): - # self.project.add_resource(Person) - # self.project.add_resource(Person) - # # Test that no duplicates are there - # self.assertEqual(self.project.collections, set([Person])) - - # def test_add_resources(self): - # self.project.add_resources([Person, Sport]) - # self.assertEqual(self.project.collections, set([Person, Sport])) - - # def test_render(self): - # self.project.add_resources([Person, Sport]) - # gql = self.project.render() - # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - # self.assertTrue('enum gender_pronouns' in gql) - # self.assertTrue('type Person' in gql) - # self.assertTrue('type Sport' in gql) - # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + def test_add_resource(self): + self.project.add_resource(Person) + self.project.add_resource(Person) + # Test that no duplicates are there + self.assertEqual(self.project.collections, set([Person])) + + def test_add_resources(self): + self.project.add_resources([Person, Sport]) + self.assertEqual(self.project.collections, set([Person, Sport])) + + def test_render(self): + self.project.add_resources([Person, Sport]) + gql = self.project.render() + self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + self.assertTrue('enum gender_pronouns' in gql) + self.assertTrue('type Person' in gql) + self.assertTrue('type Sport' in gql) + self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): self.project.add_resources([Person, Sport, Group, User]) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 049ebe4..592b6c0 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -216,17 +216,28 @@ def get_operations(self, col: Collection): view_payload = view()._payload_docs() # Construct payload for swagger generation - # TODO: support referencing of models if view_payload: for field in view_payload.get('data'): - param = sw.Parameter( - name=field.get('name'), - _type=field.get('type'), - _in=field.get('in'), - description=field.get('description'), - required=field.get('required'), - allowEmptyValue=False - ) + if field.get('schema'): + schema = sw.SwagSchema( + ref=field.get('schema') + ) + param = sw.Parameter( + name=field.get('name'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + schema=schema + ) + else: + param = sw.Parameter( + name=field.get('name'), + _type=field.get('type'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + allowEmptyValue=False + ) params.append(param) consumes = ['application/json', diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index d721de2..cd6e203 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -27,7 +27,7 @@ def get_response(self): ) def _payload_docs(self): - """ Used in defining payload parameters for the view. + """ Used in custom defining payload parameters for the view. Should return a dict that has the fields of a swagger parameter e.g. {"data": [ @@ -35,15 +35,15 @@ def _payload_docs(self): "name":"name", "in":"formData", "description":"name of the pet", - "required": true, + "required": True, "type": "string" }, { - "name": "status", - "in": "formData", - "description": "status of the pet", - "required":true, - "type":"string" + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": "#/definitions/Person" } ]} """ From 11b3bbb58388ba916d3c2ef8178778052ec3c8a6 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:18:08 +0800 Subject: [PATCH 154/214] Updated the docstrings of _payload_docs --- pfunk/utils/swagger.py | 4 +-- pfunk/web/views/json.py | 60 ++++++++++++++++++++++++++++------------- 2 files changed, 42 insertions(+), 22 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 592b6c0..a2fbc2e 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -219,9 +219,7 @@ def get_operations(self, col: Collection): if view_payload: for field in view_payload.get('data'): if field.get('schema'): - schema = sw.SwagSchema( - ref=field.get('schema') - ) + schema = sw.SwagSchema(ref=field.get('schema')) param = sw.Parameter( name=field.get('name'), _in=field.get('in'), diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index cd6e203..8290b8f 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -29,15 +29,31 @@ def get_response(self): def _payload_docs(self): """ Used in custom defining payload parameters for the view. - Should return a dict that has the fields of a swagger parameter e.g. + Should return a dict that has the fields of a swagger parameter. + If there is an error in the swagger, it will not be raised. + Usage of `https://editor.swagger.io` to validate is recommended + e.g. + ``` + # Defining formdata + {"data": [ + { + "name":"name", + "in":"formData", + "description":"name of the pet", + "required": true, + "type": "string" + }, + { + "name": "status", + "in": "formData", + "description": "status of the pet", + "required":true, + "type":"string" + } + ]} + + # Defining a payload that references a model {"data": [ - { - "name":"name", - "in":"formData", - "description":"name of the pet", - "required": True, - "type": "string" - }, { "name": "body", "in": "body", @@ -46,6 +62,7 @@ def _payload_docs(self): "schema": "#/definitions/Person" } ]} + ``` """ return {} @@ -88,17 +105,22 @@ def get_m2m_kwargs(self, obj): ) def _payload_docs(self): - # Reference the collection by default - if self.collection: - return {"data": [ - { - "name": "body", - "in": "body", - "description": "Collection object to add", - "required": True, - "schema": f"#/definitions/{self.collection.__class__.__name__}" - } - ]} + return {"data": [ + { + "name": "name", + "in": "formData", + "description": "name of the pet", + "required": True, + "type": "string" + }, + { + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": "#/definitions/Person" + } + ]} class UpdateView(UpdateMixin, JSONIDMixin, JSONView): From 052122659547d76c852ae618b61a37be0d4b1f65 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:18:41 +0800 Subject: [PATCH 155/214] Updated the docstrings of _payload_docs --- pfunk/web/views/json.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 8290b8f..ac711d4 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -27,7 +27,7 @@ def get_response(self): ) def _payload_docs(self): - """ Used in custom defining payload parameters for the view. + """ Used in custom defining payload parameters for the view in Swagger generation. Should return a dict that has the fields of a swagger parameter. If there is an error in the swagger, it will not be raised. From 0950d0a0217b6dd443fa682af8899ec964e6d16e Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 22 Jun 2022 15:19:06 +0800 Subject: [PATCH 156/214] updated swaggyp package --- pfunk/utils/swagger.py | 1 - pfunk/web/views/json.py | 27 +++++++++++---------------- poetry.lock | 2 +- pyproject.toml | 2 +- 4 files changed, 13 insertions(+), 19 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index a2fbc2e..eb2c57b 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -163,7 +163,6 @@ def get_operations(self, col: Collection): ``` """ for view in col.collection_views: - v = view() route = view.url(col) rule = route.rule methods = route.methods diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index ac711d4..9dcff50 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -105,22 +105,17 @@ def get_m2m_kwargs(self, obj): ) def _payload_docs(self): - return {"data": [ - { - "name": "name", - "in": "formData", - "description": "name of the pet", - "required": True, - "type": "string" - }, - { - "name": "body", - "in": "body", - "description": "Collection object to add", - "required": True, - "schema": "#/definitions/Person" - } - ]} + # TODO: Get view's collection class name by default + if self.collection: + return {"data": [ + { + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": f"#/definitions/Collection" + } + ]} class UpdateView(UpdateMixin, JSONIDMixin, JSONView): diff --git a/poetry.lock b/poetry.lock index 26a6f60..5f60058 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1501,7 +1501,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "6b06fa36eba0e2e7e8d19a1efecff127af145281ccb3d10c02e1baebb062d147" +content-hash = "b98fdaeac6227f48d169512de02fc0155103f626a8e7c5d074c25fef2bef9cd5" [metadata.files] appnope = [ diff --git a/pyproject.toml b/pyproject.toml index 5070eac..6cea052 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,8 +23,8 @@ graphql-py = "^0.8.1" sammy = "^0.4.3" stripe = "^2.61.0" bleach = "^4.1.0" -swaggyp = "^0.2.0" openapi-spec-validator = "^0.4.0" +swaggyp = "^0.3.0" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From 41cb07738fbf41e46e97696209600c9a174b81d5 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 23 Jun 2022 15:13:06 +0800 Subject: [PATCH 157/214] Fixed calling of view's _payload_docs to pass the correct argument. Made Create and Update views have default reference to model --- pfunk/utils/swagger.py | 2 +- pfunk/web/views/json.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index eb2c57b..a765760 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -212,7 +212,7 @@ def get_operations(self, col: Collection): params.append(path_params) # Acquire payload of the view from the View's `_payload_docs` - view_payload = view()._payload_docs() + view_payload = view(col)._payload_docs() # Construct payload for swagger generation if view_payload: diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 9dcff50..649f08c 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -105,7 +105,7 @@ def get_m2m_kwargs(self, obj): ) def _payload_docs(self): - # TODO: Get view's collection class name by default + # Reference the collection by default if self.collection: return {"data": [ { @@ -113,7 +113,7 @@ def _payload_docs(self): "in": "body", "description": "Collection object to add", "required": True, - "schema": f"#/definitions/Collection" + "schema": f"#/definitions/{self.collection.__class__.__name__}" } ]} From 377893c1504054459f7da3c3cbd57f415e3f1aab Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 7 Apr 2022 15:29:22 +0800 Subject: [PATCH 158/214] Added yaml validator and have it return specific errors --- pfunk/utils/swagger.py | 1 + poetry.lock | 40 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index a765760..5367db1 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -135,6 +135,7 @@ def write_to_yaml(self, dir=''): if not os.path.exists(f'{dir}swagger.yaml'): with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) + return t.to_yaml() else: print( 'There is an existing swagger file. Kindly move/delete it to generate a new one.') diff --git a/poetry.lock b/poetry.lock index 5f60058..9d28690 100644 --- a/poetry.lock +++ b/poetry.lock @@ -953,6 +953,38 @@ PyYAML = ">=5.1" [package.extras] requests = ["requests"] +[[package]] +name = "openapi-schema-validator" +version = "0.2.3" +description = "OpenAPI schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.0.0,<5.0.0" + +[package.extras] +rfc3339-validator = ["rfc3339-validator"] +strict-rfc3339 = ["strict-rfc3339"] +isodate = ["isodate"] + +[[package]] +name = "openapi-spec-validator" +version = "0.4.0" +description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3.0 spec validator" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.2.0,<5.0.0" +openapi-schema-validator = ">=0.2.0,<0.3.0" +PyYAML = ">=5.1" + +[package.extras] +requests = ["requests"] + [[package]] name = "packaging" version = "21.3" @@ -1938,6 +1970,14 @@ openapi-spec-validator = [ {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, ] +openapi-schema-validator = [ + {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, + {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, +] +openapi-spec-validator = [ + {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, + {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, +] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, From 39d50f78f148d0ff216a47afd87923fbb5fb71d5 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 11 Apr 2022 15:54:06 +0800 Subject: [PATCH 159/214] Made yaml validation to properly separate openapi errors and python errors. Refactored swagger.py to return the swagger file directory --- pfunk/utils/swagger.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 5367db1..a765760 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -135,7 +135,6 @@ def write_to_yaml(self, dir=''): if not os.path.exists(f'{dir}swagger.yaml'): with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) - return t.to_yaml() else: print( 'There is an existing swagger file. Kindly move/delete it to generate a new one.') From 1b10e284cf2ced716280626c864734aef72e41e8 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 12 Apr 2022 16:15:50 +0800 Subject: [PATCH 160/214] Finished create/update api from yaml. Added writing to config file if API is created. Added reading from config file if API is to be updated --- pfunk/utils/aws.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 13164f8..28449eb 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -111,7 +111,6 @@ def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): response = self.client.import_rest_api( failOnWarnings=fail_on_warnings, body=yaml_file) - if response: write_to_config({'api': response}) return { From 78bcffbece527b9b86926f62c3651abe74d56db0 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 2 Jun 2022 15:11:30 +0800 Subject: [PATCH 161/214] added support for splitting the docs to get params of the view --- pfunk/tests/test_project.py | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index 2665808..c47a132 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -34,24 +34,24 @@ def tearDown(self) -> None: except FileNotFoundError: pass - def test_add_resource(self): - self.project.add_resource(Person) - self.project.add_resource(Person) - # Test that no duplicates are there - self.assertEqual(self.project.collections, set([Person])) - - def test_add_resources(self): - self.project.add_resources([Person, Sport]) - self.assertEqual(self.project.collections, set([Person, Sport])) - - def test_render(self): - self.project.add_resources([Person, Sport]) - gql = self.project.render() - self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - self.assertTrue('enum gender_pronouns' in gql) - self.assertTrue('type Person' in gql) - self.assertTrue('type Sport' in gql) - self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + # def test_add_resource(self): + # self.project.add_resource(Person) + # self.project.add_resource(Person) + # # Test that no duplicates are there + # self.assertEqual(self.project.collections, set([Person])) + + # def test_add_resources(self): + # self.project.add_resources([Person, Sport]) + # self.assertEqual(self.project.collections, set([Person, Sport])) + + # def test_render(self): + # self.project.add_resources([Person, Sport]) + # gql = self.project.render() + # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + # self.assertTrue('enum gender_pronouns' in gql) + # self.assertTrue('type Person' in gql) + # self.assertTrue('type Sport' in gql) + # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): self.project.add_resources([Person, Sport, Group, User]) From 8399174aae889844ec7ea1e70885442002f6e6f5 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 2 Jun 2022 15:30:35 +0800 Subject: [PATCH 162/214] switch acquiring of swagger docs for payload from docstrings to function return instead --- pfunk/utils/swagger.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index a765760..d3e3501 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -163,6 +163,7 @@ def get_operations(self, col: Collection): ``` """ for view in col.collection_views: + v = view() route = view.url(col) rule = route.rule methods = route.methods @@ -215,6 +216,7 @@ def get_operations(self, col: Collection): view_payload = view(col)._payload_docs() # Construct payload for swagger generation + # TODO: support referencing of models if view_payload: for field in view_payload.get('data'): if field.get('schema'): From b022db5d30ae31d3936241ad6b1d7f615cb26025 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:09:34 +0800 Subject: [PATCH 163/214] added ability to also detect model references from _payload_docs function --- pfunk/tests/test_project.py | 36 ++++++++++++++++++------------------ pfunk/utils/swagger.py | 1 - 2 files changed, 18 insertions(+), 19 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index c47a132..2665808 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -34,24 +34,24 @@ def tearDown(self) -> None: except FileNotFoundError: pass - # def test_add_resource(self): - # self.project.add_resource(Person) - # self.project.add_resource(Person) - # # Test that no duplicates are there - # self.assertEqual(self.project.collections, set([Person])) - - # def test_add_resources(self): - # self.project.add_resources([Person, Sport]) - # self.assertEqual(self.project.collections, set([Person, Sport])) - - # def test_render(self): - # self.project.add_resources([Person, Sport]) - # gql = self.project.render() - # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - # self.assertTrue('enum gender_pronouns' in gql) - # self.assertTrue('type Person' in gql) - # self.assertTrue('type Sport' in gql) - # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + def test_add_resource(self): + self.project.add_resource(Person) + self.project.add_resource(Person) + # Test that no duplicates are there + self.assertEqual(self.project.collections, set([Person])) + + def test_add_resources(self): + self.project.add_resources([Person, Sport]) + self.assertEqual(self.project.collections, set([Person, Sport])) + + def test_render(self): + self.project.add_resources([Person, Sport]) + gql = self.project.render() + self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + self.assertTrue('enum gender_pronouns' in gql) + self.assertTrue('type Person' in gql) + self.assertTrue('type Sport' in gql) + self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): self.project.add_resources([Person, Sport, Group, User]) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index d3e3501..ae158c2 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -216,7 +216,6 @@ def get_operations(self, col: Collection): view_payload = view(col)._payload_docs() # Construct payload for swagger generation - # TODO: support referencing of models if view_payload: for field in view_payload.get('data'): if field.get('schema'): From bb0c2f5f6bd009c65bfcff897a458e7267d6f375 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 22 Jun 2022 15:19:06 +0800 Subject: [PATCH 164/214] updated swaggyp package --- pfunk/utils/swagger.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index ae158c2..a765760 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -163,7 +163,6 @@ def get_operations(self, col: Collection): ``` """ for view in col.collection_views: - v = view() route = view.url(col) rule = route.rule methods = route.methods From a3eea4acc7154f3f2cdda8de23c20d0526d9037d Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 5 Apr 2022 15:08:35 +0800 Subject: [PATCH 165/214] Added aws.py as a skeleton for upcoming AWS features --- pfunk/utils/aws.py | 191 +++++++-------------------------------------- 1 file changed, 28 insertions(+), 163 deletions(-) diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 28449eb..7413120 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,171 +1,36 @@ -import datetime import boto3 -import json import swaggyp as sw -from botocore.exceptions import ClientError, NoCredentialsError -from envs import env -from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator -from openapi_spec_validator.readers import read_from_filename -from openapi_spec_validator.exceptions import OpenAPIValidationError - -AWS_ACCESS_KEY = env('AWS_ACCESS_KEY') -AWS_SECRET_ACCESS_KEY = env('AWS_SECRET_ACCESS_KEY') -AWS_DEFAULT_REGION = env('AWS_DEFAULT_REGION') - - -def _json_dt_helper(o): - """ Helps serializing `datetime` objects to a readable string """ - if isinstance(o, (datetime.date, datetime.datetime)): - return o.isoformat() - - -def write_to_config(obj, config_file_dir='pfunk.json'): - """ Appends object to pfunk config file - - Args: - obj (dict, required): - key, value pairs to write to json file - config_file_dir (str, optional): - directory of the config json file, default='pfunk.json' - Returns: - config_file (dict, required): - the current value of config file (pfunk.json) - """ - with open(config_file_dir, 'r+') as f: - data = json.load(f) - data.update(obj) - f.seek(0) - f.truncate() - json.dump(data, f, indent=4, sort_keys=True, default=_json_dt_helper) - return data - - -def read_from_config_file(config_file_dir='pfunk.json'): - """ Returns data from config file in dict form """ - with open(config_file_dir, 'r') as f: - data = json.load(f) - return data - class ApiGateway(object): - region_name = env('SES_REGION_NAME', 'us-east-1') def __init__(self): - self.client = boto3.client( - 'apigateway', - aws_access_key_id=AWS_ACCESS_KEY, - aws_secret_access_key=AWS_SECRET_ACCESS_KEY, - region_name=AWS_DEFAULT_REGION) - + self.client = boto3.client('apigateway') + pass + + def create_api_from_yaml(self, yaml_file): + # response = client.import_rest_api( + # failOnWarnings=True|False, + # parameters={ + # 'string': 'string' + # }, + # body=b'bytes'|file + # ) + pass + + def update_api_from_yaml(self, yaml_file): + # response = client.put_rest_api( + # restApiId='string', + # mode='merge'|'overwrite', + # failOnWarnings=True|False, + # parameters={ + # 'string': 'string' + # }, + # body=b'bytes'|file + # ) + pass + def validate_yaml(self, yaml_file): - """ Validate YAML file if it is valid for using OpenAPI Spec v2""" - try: - spec_dict, spec_url = read_from_filename(yaml_file) - validate_v2_spec(spec_dict) - except (OSError, AttributeError) as err: - return {'errors': str(err)} - except OpenAPIValidationError as err: - return self._iterate_validator_errors(spec_dict) - return None - - def _iterate_validator_errors(self, spec_dict): - """ Iterates through list of errors that the `openapi_spec_validator` returned - - This method was implemented due to `openapi_spec_validator` design - that if an error happened while iterating through the YAML file - it returns a Python error. - - Args: - spec_dict (dict, required): - `spec_dict` generated from `openapi_spec_validator.readers.read_from_filename` - Returns: - list of errors - """ - try: - errors = [{err.message: err.json_path} - for err in openapi_v2_spec_validator.iter_errors(spec_dict)] - return errors - except (OSError, AttributeError) as err: - return str(err) - - def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): - """ Creates an API for AWS API Gateway from a YAML swagger file - - Args: - yaml_file (yaml file, required): - The OpenAPI swagger file to create API from - fail_on_warnings (bool, optional): - Specifies if the method will error on warnings. Default: `True` - """ - _yaml_valid = self.validate_yaml(yaml_file) - if _yaml_valid: - return { - "error": 'Bad Request. YAML is not valid.', - "yaml_err": _yaml_valid - } - - try: - if not type(yaml_file) == 'string': - with open(yaml_file, 'r') as file: - yaml_file = file.read() - response = self.client.import_rest_api( - failOnWarnings=fail_on_warnings, - body=yaml_file) - if response: - write_to_config({'api': response}) - return { - 'success': True, - 'response': response - } - except (ClientError, NoCredentialsError) as err: - return { - 'error': str(err) - } - - def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnings=True): - """ Updates rest API using yaml file - - Args: - rest_api_id (string, required): - ID of the API for updating, if not provided, use API ID from `pfunk.json` - yaml_file (yaml file, required): - The OpenAPI swagger file to create API from - mode (string, required): - Mode of update, choice=['merge', 'overwrite'] - fail_on_warnings (bool, optional): - Specifies if the method will error on warnings. Default: `True` - """ - _yaml_valid = self.validate_yaml(yaml_file) - if _yaml_valid: - return { - "error": 'Bad Request. YAML is not valid.', - "yaml_err": _yaml_valid - } - - try: - if not type(yaml_file) == 'string': - with open(yaml_file, 'r') as file: - yaml_file = file.read() - # Acquire REST API ID from config file if not provided - if not rest_api_id: - data = read_from_config_file() - if data.get('api'): - rest_api_id = (data.get('api') - .get('id')) - - response = self.client.put_rest_api( - restApiId=rest_api_id, - mode=mode, - failOnWarnings=fail_on_warnings, - body=yaml_file - ) + """ Validate YAML file if it is valid for using """ + pass - if response: - return { - 'success': True, - 'response': response - } - except (ClientError, NoCredentialsError) as err: - return { - 'error': str(err) - } + def \ No newline at end of file From b05e1bac1c6dd56eff1cd66ffb2f7465a9da56d6 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 5 Apr 2022 15:11:41 +0800 Subject: [PATCH 166/214] Added skeleton tests for aws features --- pfunk/tests/test_aws.py | 64 ++++++----------------------------------- 1 file changed, 8 insertions(+), 56 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index d28c852..c3cdc45 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -1,68 +1,20 @@ -import os import unittest -import tempfile -from unittest import mock from pfunk.utils.aws import ApiGateway -from pfunk.tests import User, Group, Person, Sport +from pfunk.tests import User, Group from pfunk.project import Project class ApiGatewayTests(unittest.TestCase): - """ Unit tests for creation of API from Swagger file - - Note that the unittests uses mocked boto3 normally. If - you want to test against a real endpoint, remove the - patch decorator at `setUpClass` and the `mocked` - param. Also make sure you have the required - env vars for AWS credentials and you have - the json config in the current env. - """ - @classmethod - @mock.patch('boto3.client') - def setUpClass(cls, mocked) -> None: - cls.project = Project() - cls.aws_client = ApiGateway() - cls.project.add_resources([Person, Sport, Group, User]) - - swagger = cls.project.generate_swagger() - cls.swagger_dir = swagger['dir'] - cls.swagger_file = swagger['swagger_file'] + def setUp(self) -> None: + self.project = Project() def test_validate_yaml(self): - result = self.aws_client.validate_yaml(self.swagger_dir) - self.assertIsNone(result) # if there are no errors, then spec is valid - - def test_validate_wrong_yaml(self): - result = self.aws_client.validate_yaml('wrong yaml...33::39') - # if there are returned objs, there is an error - self.assertIsNotNone(result) - - @mock.patch('boto3.client') - def test_create_api_from_yaml(self, mocked): - result = self.aws_client.create_api_from_yaml( - yaml_file=self.swagger_dir) - self.assertTrue(result['success']) - - @mock.patch('boto3.client') - def test_create_api_from_wrong_yaml(self, mocked): - with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: - tmp.seek(0) - tmp.write('test wrong yaml') - result = self.aws_client.create_api_from_yaml(tmp.name) - self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') + pass - @mock.patch('boto3.client') - def test_update_api_from_yaml(self, mocked): - result = self.aws_client.update_api_from_yaml( - yaml_file=self.swagger_dir, mode='merge') - self.assertTrue(result['success']) + def test_create_api_from_yaml(self): + pass - @mock.patch('boto3.client') - def test_update_api_from_wrong_yaml(self, mocked): - with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: - tmp.seek(0) - tmp.write('test wrong yaml') - result = self.aws_client.update_api_from_yaml(tmp.name, mode='merge') - self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') + def test_update_api_from_yaml(self): + pass From da5675d8a66672099af40771f3b3e684e87e1cd3 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 7 Apr 2022 15:29:22 +0800 Subject: [PATCH 167/214] Added yaml validator and have it return specific errors --- pfunk/tests/test_aws.py | 34 +++++++++++++++++---- pfunk/utils/aws.py | 21 +++++++++---- pfunk/utils/swagger.py | 1 + poetry.lock | 65 ++++++++++++++++++++++++++++++++++++----- pyproject.toml | 2 +- 5 files changed, 103 insertions(+), 20 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index c3cdc45..a6b1314 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -1,20 +1,42 @@ import unittest +from unittest import mock from pfunk.utils.aws import ApiGateway -from pfunk.tests import User, Group +from pfunk.tests import User, Group, Person, Sport from pfunk.project import Project class ApiGatewayTests(unittest.TestCase): - def setUp(self) -> None: - self.project = Project() + @classmethod + def setUpCls(cls) -> None: + cls.project = Project() + cls.aws_client = ApiGateway() + cls.project.add_resources([Person, Sport, Group, User]) + cls.api_yaml = cls.project.generate_swagger() def test_validate_yaml(self): - pass + result = self.aws_client.validate_yaml(self.api_yaml) + self.assertIsNone(result) # if there are no errors, then spec is valid + def test_validate_wrong_yaml(self): + result = self.aws_client.validate_yaml('wrong yaml...33::39') + self.assertIsNotNone(result) # if there are returned objs, there is an error + + @mock.patch('boto3.client') def test_create_api_from_yaml(self): - pass + result = self.aws_client.create_api_from_yaml() + self.assertTrue(result['success']) + @mock.patch('boto3.client') def test_update_api_from_yaml(self): - pass + result = self.aws_client.create_api_from_yaml() + self.assertTrue(result['success']) + + def test_create_api_from_wrong_yaml(self): + result = self.aws_client.create_api_from_yaml('wrong yaml...21320:: asdkas') + self.assertEqual(result, 'Bad Request. YAML is not valid.') + + def test_update_api_from_wrong_yaml(self): + result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') + self.assertEqual(result, 'Bad Request. YAML is not valid.') \ No newline at end of file diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 7413120..4c61506 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,5 +1,9 @@ import boto3 import swaggyp as sw +from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator +from openapi_spec_validator.readers import read_from_filename +from openapi_spec_validator.exceptions import OpenAPIValidationError + class ApiGateway(object): @@ -7,6 +11,17 @@ def __init__(self): self.client = boto3.client('apigateway') pass + def validate_yaml(self, yaml_file): + """ Validate YAML file if it is valid for using OpenAPI Spec v2""" + try: + spec_dict, spec_url = read_from_filename(yaml_file) + validate_v2_spec(spec_dict) + except OpenAPIValidationError as err: + errors = [{err.message: err.json_path} + for err in openapi_v2_spec_validator.iter_errors(spec_dict)] + return errors + return None + def create_api_from_yaml(self, yaml_file): # response = client.import_rest_api( # failOnWarnings=True|False, @@ -28,9 +43,3 @@ def update_api_from_yaml(self, yaml_file): # body=b'bytes'|file # ) pass - - def validate_yaml(self, yaml_file): - """ Validate YAML file if it is valid for using """ - pass - - def \ No newline at end of file diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index a765760..5367db1 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -135,6 +135,7 @@ def write_to_yaml(self, dir=''): if not os.path.exists(f'{dir}swagger.yaml'): with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) + return t.to_yaml() else: print( 'There is an existing swagger file. Kindly move/delete it to generate a new one.') diff --git a/poetry.lock b/poetry.lock index 9d28690..937aec5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -414,10 +414,10 @@ optional = false python-versions = ">=3.5" [[package]] -name = "importlib-metadata" -version = "5.0.0" -description = "Read metadata from Python packages" -category = "dev" +name = "importlib-resources" +version = "5.6.0" +description = "Read resources from Python packages" +category = "main" optional = false python-versions = ">=3.7" @@ -985,6 +985,38 @@ PyYAML = ">=5.1" [package.extras] requests = ["requests"] +[[package]] +name = "openapi-schema-validator" +version = "0.2.3" +description = "OpenAPI schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.0.0,<5.0.0" + +[package.extras] +rfc3339-validator = ["rfc3339-validator"] +strict-rfc3339 = ["strict-rfc3339"] +isodate = ["isodate"] + +[[package]] +name = "openapi-spec-validator" +version = "0.4.0" +description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3.0 spec validator" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.2.0,<5.0.0" +openapi-schema-validator = ">=0.2.0,<0.3.0" +PyYAML = ">=5.1" + +[package.extras] +requests = ["requests"] + [[package]] name = "packaging" version = "21.3" @@ -1512,10 +1544,21 @@ watchdog = ["watchdog"] [[package]] name = "widgetsnbextension" -version = "4.0.3" -description = "Jupyter interactive widgets for Jupyter Notebook" +version = "3.6.0" +description = "IPython HTML widgets for Jupyter" category = "dev" optional = false +python-versions = "*" + +[package.dependencies] +notebook = ">=4.4.1" + +[[package]] +name = "zipp" +version = "3.7.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" +optional = false python-versions = ">=3.7" [[package]] @@ -1533,7 +1576,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "b98fdaeac6227f48d169512de02fc0155103f626a8e7c5d074c25fef2bef9cd5" +content-hash = "6b06fa36eba0e2e7e8d19a1efecff127af145281ccb3d10c02e1baebb062d147" [metadata.files] appnope = [ @@ -1978,6 +2021,14 @@ openapi-spec-validator = [ {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, ] +openapi-schema-validator = [ + {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, + {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, +] +openapi-spec-validator = [ + {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, + {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, +] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, diff --git a/pyproject.toml b/pyproject.toml index 6cea052..5070eac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,8 +23,8 @@ graphql-py = "^0.8.1" sammy = "^0.4.3" stripe = "^2.61.0" bleach = "^4.1.0" +swaggyp = "^0.2.0" openapi-spec-validator = "^0.4.0" -swaggyp = "^0.3.0" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From b289194f980ee2cf89f7aabb006e61f985fb1df2 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 11 Apr 2022 15:54:06 +0800 Subject: [PATCH 168/214] Made yaml validation to properly separate openapi errors and python errors. Refactored swagger.py to return the swagger file directory --- pfunk/tests/test_aws.py | 46 +++++++++++++++---------- pfunk/utils/aws.py | 74 ++++++++++++++++++++++++++++++++++------- pfunk/utils/swagger.py | 12 +++---- 3 files changed, 96 insertions(+), 36 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index a6b1314..6ec5841 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -1,4 +1,6 @@ +import os import unittest +import tempfile from unittest import mock from pfunk.utils.aws import ApiGateway @@ -9,34 +11,44 @@ class ApiGatewayTests(unittest.TestCase): @classmethod - def setUpCls(cls) -> None: + def setUpClass(cls) -> None: cls.project = Project() cls.aws_client = ApiGateway() cls.project.add_resources([Person, Sport, Group, User]) - cls.api_yaml = cls.project.generate_swagger() + + swagger = cls.project.generate_swagger() + cls.swagger_dir = swagger['dir'] + cls.swagger_file = swagger['swagger_file'] + print(cls.swagger_dir) def test_validate_yaml(self): - result = self.aws_client.validate_yaml(self.api_yaml) + result = self.aws_client.validate_yaml(self.swagger_dir) self.assertIsNone(result) # if there are no errors, then spec is valid def test_validate_wrong_yaml(self): result = self.aws_client.validate_yaml('wrong yaml...33::39') - self.assertIsNotNone(result) # if there are returned objs, there is an error + # if there are returned objs, there is an error + self.assertIsNotNone(result) @mock.patch('boto3.client') - def test_create_api_from_yaml(self): - result = self.aws_client.create_api_from_yaml() + def test_create_api_from_yaml(self, mocked): + result = self.aws_client.create_api_from_yaml( + yaml_file=self.swagger_dir) self.assertTrue(result['success']) @mock.patch('boto3.client') - def test_update_api_from_yaml(self): - result = self.aws_client.create_api_from_yaml() - self.assertTrue(result['success']) - - def test_create_api_from_wrong_yaml(self): - result = self.aws_client.create_api_from_yaml('wrong yaml...21320:: asdkas') - self.assertEqual(result, 'Bad Request. YAML is not valid.') - - def test_update_api_from_wrong_yaml(self): - result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') - self.assertEqual(result, 'Bad Request. YAML is not valid.') \ No newline at end of file + def test_create_api_from_wrong_yaml(self, mocked): + with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: + tmp.seek(0) + tmp.write('test wrong yaml') + result = self.aws_client.create_api_from_yaml(tmp.name) + self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') + + # @mock.patch('boto3.client') + # def test_update_api_from_yaml(self): + # result = self.aws_client.update_api_from_yaml(yaml_file=self.api_yaml) + # self.assertTrue(result['success']) + + # def test_update_api_from_wrong_yaml(self): + # result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') + # self.assertEqual(result, 'Bad Request. YAML is not valid.') diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 4c61506..b1c26c0 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,36 +1,86 @@ import boto3 import swaggyp as sw +# from botocore.exceptions import BadReq +from envs import env from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator from openapi_spec_validator.readers import read_from_filename from openapi_spec_validator.exceptions import OpenAPIValidationError class ApiGateway(object): + region_name = env('SES_REGION_NAME', 'us-east-1') def __init__(self): - self.client = boto3.client('apigateway') - pass + self.client = boto3.client('apigateway', region_name=self.region_name) def validate_yaml(self, yaml_file): """ Validate YAML file if it is valid for using OpenAPI Spec v2""" try: spec_dict, spec_url = read_from_filename(yaml_file) validate_v2_spec(spec_dict) + except (OSError, AttributeError) as err: + return {'errors': str(err)} except OpenAPIValidationError as err: + return self._iterate_validator_errors(spec_dict) + return None + + def _iterate_validator_errors(self, spec_dict): + """ Iterates through list of errors that the `openapi_spec_validator` returned + + This method was implemented due to `openapi_spec_validator` design + that if an error happened while iterating through the YAML file + it returns a Python error. + + Args: + spec_dict (dict, required): + `spec_dict` generated from `openapi_spec_validator.readers.read_from_filename` + Returns: + list of errors + """ + try: errors = [{err.message: err.json_path} for err in openapi_v2_spec_validator.iter_errors(spec_dict)] return errors - return None + except (OSError, AttributeError) as err: + return str(err) - def create_api_from_yaml(self, yaml_file): - # response = client.import_rest_api( - # failOnWarnings=True|False, - # parameters={ - # 'string': 'string' - # }, - # body=b'bytes'|file - # ) - pass + def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): + """ Creates an API for AWS API Gateway from a YAML swagger file + + Args: + yaml_file (yaml file, required): + The OpenAPI swagger file to create API from + fail_on_warnings (bool, optional): + Specifies if the method will error on warnings. Default: `True` + """ + _yaml_valid = self.validate_yaml(yaml_file) + if _yaml_valid: + return { + "error": 'Bad Request. YAML is not valid.', + "yaml_err": _yaml_valid + } + + try: + if not type(yaml_file) == 'string': + with open(yaml_file, 'r') as file: + response = self.client.import_rest_api( + failOnWarnings=fail_on_warnings, + body=file + ) + else: + response = self.client.import_rest_api( + failOnWarnings=fail_on_warnings, + body=yaml_file + ) + + if response: + return { + 'success': True, + response: response + } + # TODO: Specify boto exceptions + except Exception as err: + return err def update_api_from_yaml(self, yaml_file): # response = client.put_rest_api( diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 5367db1..8d72eb7 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -132,16 +132,14 @@ def write_to_yaml(self, dir=''): schemes=schemes, definitions=self.definitions) - if not os.path.exists(f'{dir}swagger.yaml'): - with open(f'{dir}swagger.yaml', 'x') as swag_doc: + if not os.path.exists(f'{dir}/swagger.yaml'): + with open(f'{dir}/swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) - return t.to_yaml() else: - print( - 'There is an existing swagger file. Kindly move/delete it to generate a new one.') + print('There is an existing swagger file. Kindly move/delete it to generate a new one.') # print(t.to_yaml()) return { - "dir": f'{dir}swagger.yaml', + "dir": f'{dir}/swagger.yaml', "swagger_file": t.to_yaml() } @@ -320,4 +318,4 @@ def generate_swagger(self, dir=''): col = i() self.get_operations(col) self.get_model_definitions(col) - return self.write_to_yaml(dir) + return self.write_to_yaml(dir) \ No newline at end of file From 2382d1d25e554dc3e60c089550f177a3b38876d8 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 11 Apr 2022 15:55:52 +0800 Subject: [PATCH 169/214] Refactored default dir of swagger file. Refactored unit tests for AWS utils --- pfunk/tests/test_aws.py | 1 - pfunk/utils/swagger.py | 6 +++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index 6ec5841..571f3c1 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -19,7 +19,6 @@ def setUpClass(cls) -> None: swagger = cls.project.generate_swagger() cls.swagger_dir = swagger['dir'] cls.swagger_file = swagger['swagger_file'] - print(cls.swagger_dir) def test_validate_yaml(self): result = self.aws_client.validate_yaml(self.swagger_dir) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 8d72eb7..c27184e 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -132,14 +132,14 @@ def write_to_yaml(self, dir=''): schemes=schemes, definitions=self.definitions) - if not os.path.exists(f'{dir}/swagger.yaml'): - with open(f'{dir}/swagger.yaml', 'x') as swag_doc: + if not os.path.exists(f'{dir}swagger.yaml'): + with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) else: print('There is an existing swagger file. Kindly move/delete it to generate a new one.') # print(t.to_yaml()) return { - "dir": f'{dir}/swagger.yaml', + "dir": f'{dir}swagger.yaml', "swagger_file": t.to_yaml() } From ce9cdf9e8ec52b2b2ca647260635073d65ef365a Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 12 Apr 2022 16:15:50 +0800 Subject: [PATCH 170/214] Finished create/update api from yaml. Added writing to config file if API is created. Added reading from config file if API is to be updated --- pfunk/tests/test_aws.py | 23 +++++--- pfunk/utils/aws.py | 128 ++++++++++++++++++++++++++++++++-------- 2 files changed, 119 insertions(+), 32 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index 571f3c1..2789dc2 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -11,7 +11,8 @@ class ApiGatewayTests(unittest.TestCase): @classmethod - def setUpClass(cls) -> None: + @mock.patch('boto3.client') + def setUpClass(cls, mocked) -> None: cls.project = Project() cls.aws_client = ApiGateway() cls.project.add_resources([Person, Sport, Group, User]) @@ -43,11 +44,17 @@ def test_create_api_from_wrong_yaml(self, mocked): result = self.aws_client.create_api_from_yaml(tmp.name) self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') - # @mock.patch('boto3.client') - # def test_update_api_from_yaml(self): - # result = self.aws_client.update_api_from_yaml(yaml_file=self.api_yaml) - # self.assertTrue(result['success']) + @mock.patch('boto3.client') + def test_update_api_from_yaml(self, mocked): + result = self.aws_client.update_api_from_yaml( + yaml_file=self.swagger_dir, mode='merge') + print(result) + self.assertTrue(result['success']) - # def test_update_api_from_wrong_yaml(self): - # result = self.aws_client.update_api_from_yaml('wrong yaml...21320:: asdkas') - # self.assertEqual(result, 'Bad Request. YAML is not valid.') + @mock.patch('boto3.client') + def test_update_api_from_wrong_yaml(self, mocked): + with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: + tmp.seek(0) + tmp.write('test wrong yaml') + result = self.aws_client.update_api_from_yaml(tmp.name, mode='merge') + self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index b1c26c0..7427db3 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -1,4 +1,6 @@ +import datetime import boto3 +import json import swaggyp as sw # from botocore.exceptions import BadReq from envs import env @@ -6,12 +8,54 @@ from openapi_spec_validator.readers import read_from_filename from openapi_spec_validator.exceptions import OpenAPIValidationError +AWS_ACCESS_KEY = env('AWS_ACCESS_KEY') +AWS_SECRET_ACCESS_KEY = env('AWS_SECRET_ACCESS_KEY') +AWS_DEFAULT_REGION = env('AWS_DEFAULT_REGION') + + +def _json_dt_helper(o): + """ Helps serializing `datetime` objects to a readable string """ + if isinstance(o, (datetime.date, datetime.datetime)): + return o.isoformat() + + +def write_to_config(obj, config_file_dir='pfunk.json'): + """ Writes to pfunk config file + + Args: + obj (dict, required): + key, value pairs to write to json file + config_file_dir (str, optional): + directory of the config json file, default='pfunk.json' + Returns: + config_file (dict, required): + the current value of config file (pfunk.json) + """ + with open(config_file_dir, 'r+') as f: + data = json.load(f) + data.update(obj) + f.seek(0) + f.truncate() + json.dump(data, f, indent=4, sort_keys=True, default=_json_dt_helper) + return data + + +def read_from_config_file(config_file_dir='pfunk.json'): + """ Returns data from config file in dict form """ + with open(config_file_dir, 'r') as f: + data = json.load(f) + return data + class ApiGateway(object): region_name = env('SES_REGION_NAME', 'us-east-1') def __init__(self): - self.client = boto3.client('apigateway', region_name=self.region_name) + self.client = boto3.client( + 'apigateway', + aws_access_key_id=AWS_ACCESS_KEY, + aws_secret_access_key=AWS_SECRET_ACCESS_KEY, + region_name=AWS_DEFAULT_REGION) def validate_yaml(self, yaml_file): """ Validate YAML file if it is valid for using OpenAPI Spec v2""" @@ -63,33 +107,69 @@ def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): try: if not type(yaml_file) == 'string': with open(yaml_file, 'r') as file: - response = self.client.import_rest_api( - failOnWarnings=fail_on_warnings, - body=file - ) - else: - response = self.client.import_rest_api( - failOnWarnings=fail_on_warnings, - body=yaml_file - ) + yaml_file = file.read() + response = self.client.import_rest_api( + failOnWarnings=fail_on_warnings, + body=yaml_file) + + # TODO: Fix -- if using mocked obj, don't write anything + if response: + write_to_config({'api': response}) + return { + 'success': True, + 'response': response + } + # TODO: Specify boto exceptions + except Exception as err: + return { + 'error': str(err) + } + + def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnings=True): + """ Updates rest API using yaml file + + Args: + rest_api_id (string, required): + ID of the API for updating, if not provided, use API ID from `pfunk.json` + yaml_file (yaml file, required): + The OpenAPI swagger file to create API from + mode (string, required): + Mode of update, choice=['merge', 'overwrite'] + fail_on_warnings (bool, optional): + Specifies if the method will error on warnings. Default: `True` + """ + _yaml_valid = self.validate_yaml(yaml_file) + if _yaml_valid: + return { + "error": 'Bad Request. YAML is not valid.', + "yaml_err": _yaml_valid + } + + try: + if not type(yaml_file) == 'string': + with open(yaml_file, 'r') as file: + yaml_file = file.read() + # Acquire REST API ID from config file if not provided + if not rest_api_id: + data = read_from_config_file() + if data.get('api'): + rest_api_id = (data.get('api') + .get('id')) + + response = self.client.put_rest_api( + restApiId=rest_api_id, + mode=mode, + failOnWarnings=fail_on_warnings, + body=yaml_file + ) if response: return { 'success': True, - response: response + 'response': response } # TODO: Specify boto exceptions except Exception as err: - return err - - def update_api_from_yaml(self, yaml_file): - # response = client.put_rest_api( - # restApiId='string', - # mode='merge'|'overwrite', - # failOnWarnings=True|False, - # parameters={ - # 'string': 'string' - # }, - # body=b'bytes'|file - # ) - pass + return { + 'error': str(err) + } From 637948974f98d65c240ddd2c2fd3f2f0c20d94d4 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 13 Apr 2022 13:44:30 +0800 Subject: [PATCH 171/214] Reworked what exceptions does aws util react to. Added doc for aws util tests --- pfunk/tests/test_aws.py | 10 +++++++++- pfunk/utils/aws.py | 15 ++++++--------- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index 2789dc2..d28c852 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -9,6 +9,15 @@ class ApiGatewayTests(unittest.TestCase): + """ Unit tests for creation of API from Swagger file + + Note that the unittests uses mocked boto3 normally. If + you want to test against a real endpoint, remove the + patch decorator at `setUpClass` and the `mocked` + param. Also make sure you have the required + env vars for AWS credentials and you have + the json config in the current env. + """ @classmethod @mock.patch('boto3.client') @@ -48,7 +57,6 @@ def test_create_api_from_wrong_yaml(self, mocked): def test_update_api_from_yaml(self, mocked): result = self.aws_client.update_api_from_yaml( yaml_file=self.swagger_dir, mode='merge') - print(result) self.assertTrue(result['success']) @mock.patch('boto3.client') diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py index 7427db3..13164f8 100644 --- a/pfunk/utils/aws.py +++ b/pfunk/utils/aws.py @@ -2,7 +2,7 @@ import boto3 import json import swaggyp as sw -# from botocore.exceptions import BadReq +from botocore.exceptions import ClientError, NoCredentialsError from envs import env from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator from openapi_spec_validator.readers import read_from_filename @@ -20,7 +20,7 @@ def _json_dt_helper(o): def write_to_config(obj, config_file_dir='pfunk.json'): - """ Writes to pfunk config file + """ Appends object to pfunk config file Args: obj (dict, required): @@ -111,16 +111,14 @@ def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): response = self.client.import_rest_api( failOnWarnings=fail_on_warnings, body=yaml_file) - - # TODO: Fix -- if using mocked obj, don't write anything + if response: write_to_config({'api': response}) return { 'success': True, 'response': response } - # TODO: Specify boto exceptions - except Exception as err: + except (ClientError, NoCredentialsError) as err: return { 'error': str(err) } @@ -154,7 +152,7 @@ def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnin data = read_from_config_file() if data.get('api'): rest_api_id = (data.get('api') - .get('id')) + .get('id')) response = self.client.put_rest_api( restApiId=rest_api_id, @@ -168,8 +166,7 @@ def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnin 'success': True, 'response': response } - # TODO: Specify boto exceptions - except Exception as err: + except (ClientError, NoCredentialsError) as err: return { 'error': str(err) } From ef74277d19d1d9e03cf63c59f055673d7d743b20 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 27 Apr 2022 14:41:51 +0800 Subject: [PATCH 172/214] Did cleaning up of swaggerdoc docstrings --- pfunk/utils/swagger.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index c27184e..2756f21 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -64,9 +64,6 @@ def __init__(self, collections, rules=[], config_file='pfunk.json'): array of collection of the project to generate models from rules ([`werkzeug.routing.Rule`]): array of additional URLs that the given collection doesn't have - config_file (str, optional): - directory of the config_file - Returns: swagger.yaml (yaml, required): Generated YAML file From 4e5f530513ca35612ddf76da62180701f0ceace3 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 4 May 2022 14:52:41 +0800 Subject: [PATCH 173/214] added generate-swagger as a cli command, creates a swagger file from the provided json config file of a project --- pfunk/utils/swagger.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 2756f21..a765760 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -64,6 +64,9 @@ def __init__(self, collections, rules=[], config_file='pfunk.json'): array of collection of the project to generate models from rules ([`werkzeug.routing.Rule`]): array of additional URLs that the given collection doesn't have + config_file (str, optional): + directory of the config_file + Returns: swagger.yaml (yaml, required): Generated YAML file @@ -133,7 +136,8 @@ def write_to_yaml(self, dir=''): with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) else: - print('There is an existing swagger file. Kindly move/delete it to generate a new one.') + print( + 'There is an existing swagger file. Kindly move/delete it to generate a new one.') # print(t.to_yaml()) return { "dir": f'{dir}swagger.yaml', @@ -315,4 +319,4 @@ def generate_swagger(self, dir=''): col = i() self.get_operations(col) self.get_model_definitions(col) - return self.write_to_yaml(dir) \ No newline at end of file + return self.write_to_yaml(dir) From 864242b3235451b76ba78012b0a402c286978d2e Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 2 Jun 2022 15:11:30 +0800 Subject: [PATCH 174/214] added support for splitting the docs to get params of the view --- pfunk/tests/test_project.py | 36 +++++++++++++++--------------- pfunk/utils/swagger.py | 44 +++++++++++++++++-------------------- 2 files changed, 38 insertions(+), 42 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index 2665808..c47a132 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -34,24 +34,24 @@ def tearDown(self) -> None: except FileNotFoundError: pass - def test_add_resource(self): - self.project.add_resource(Person) - self.project.add_resource(Person) - # Test that no duplicates are there - self.assertEqual(self.project.collections, set([Person])) - - def test_add_resources(self): - self.project.add_resources([Person, Sport]) - self.assertEqual(self.project.collections, set([Person, Sport])) - - def test_render(self): - self.project.add_resources([Person, Sport]) - gql = self.project.render() - self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - self.assertTrue('enum gender_pronouns' in gql) - self.assertTrue('type Person' in gql) - self.assertTrue('type Sport' in gql) - self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + # def test_add_resource(self): + # self.project.add_resource(Person) + # self.project.add_resource(Person) + # # Test that no duplicates are there + # self.assertEqual(self.project.collections, set([Person])) + + # def test_add_resources(self): + # self.project.add_resources([Person, Sport]) + # self.assertEqual(self.project.collections, set([Person, Sport])) + + # def test_render(self): + # self.project.add_resources([Person, Sport]) + # gql = self.project.render() + # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + # self.assertTrue('enum gender_pronouns' in gql) + # self.assertTrue('type Person' in gql) + # self.assertTrue('type Sport' in gql) + # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): self.project.add_resources([Person, Sport, Group, User]) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index a765760..abbe8fc 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -211,42 +211,38 @@ def get_operations(self, col: Collection): ) params.append(path_params) - # Acquire payload of the view from the View's `_payload_docs` - view_payload = view(col)._payload_docs() + # Acquire payload of the view from the View's docstring + # where to cut the docstrings to use the definition for the payload of the view + oas_str_split = '[OAS]\n' + view_docs = view.__doc__ + view_payload = None + if (view_docs and len(view_docs.split(oas_str_split)) > 1): + view_payload = view_docs.split(oas_str_split)[1] # Construct payload for swagger generation if view_payload: - for field in view_payload.get('data'): - if field.get('schema'): - schema = sw.SwagSchema(ref=field.get('schema')) - param = sw.Parameter( - name=field.get('name'), - _in=field.get('in'), - description=field.get('description'), - required=field.get('required'), - schema=schema - ) - else: - param = sw.Parameter( - name=field.get('name'), - _type=field.get('type'), - _in=field.get('in'), - description=field.get('description'), - required=field.get('required'), - allowEmptyValue=False - ) + for field in json.loads(view_payload).get('data'): + param = sw.Parameter( + name=field.get('name'), + _type=field.get('type'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + allowEmptyValue=False + ) params.append(param) + docs_description = view_docs if not len(view_docs.split( + oas_str_split)) > 1 else view_docs.split(oas_str_split)[0] consumes = ['application/json', 'application/x-www-form-urlencoded'] produces = ['application/json', 'application/x-www-form-urlencoded'] - view_docs = view.__doc__ if params: op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=view_docs, + description=docs_description, responses=responses, consumes=consumes, produces=produces, @@ -255,7 +251,7 @@ def get_operations(self, col: Collection): op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=view_docs, + description=docs_description, responses=responses, consumes=consumes, produces=produces) From 1c1cb92461672d59196183a73cb370a96b8ede68 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 2 Jun 2022 15:30:35 +0800 Subject: [PATCH 175/214] switch acquiring of swagger docs for payload from docstrings to function return instead --- pfunk/utils/swagger.py | 20 +++++++---------- pfunk/web/views/json.py | 50 ++++++++++++++++------------------------- 2 files changed, 27 insertions(+), 43 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index abbe8fc..049ebe4 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -163,6 +163,7 @@ def get_operations(self, col: Collection): ``` """ for view in col.collection_views: + v = view() route = view.url(col) rule = route.rule methods = route.methods @@ -211,17 +212,13 @@ def get_operations(self, col: Collection): ) params.append(path_params) - # Acquire payload of the view from the View's docstring - # where to cut the docstrings to use the definition for the payload of the view - oas_str_split = '[OAS]\n' - view_docs = view.__doc__ - view_payload = None - if (view_docs and len(view_docs.split(oas_str_split)) > 1): - view_payload = view_docs.split(oas_str_split)[1] + # Acquire payload of the view from the View's `_payload_docs` + view_payload = view()._payload_docs() # Construct payload for swagger generation + # TODO: support referencing of models if view_payload: - for field in json.loads(view_payload).get('data'): + for field in view_payload.get('data'): param = sw.Parameter( name=field.get('name'), _type=field.get('type'), @@ -232,17 +229,16 @@ def get_operations(self, col: Collection): ) params.append(param) - docs_description = view_docs if not len(view_docs.split( - oas_str_split)) > 1 else view_docs.split(oas_str_split)[0] consumes = ['application/json', 'application/x-www-form-urlencoded'] produces = ['application/json', 'application/x-www-form-urlencoded'] + view_docs = view.__doc__ if params: op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=docs_description, + description=view_docs, responses=responses, consumes=consumes, produces=produces, @@ -251,7 +247,7 @@ def get_operations(self, col: Collection): op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=docs_description, + description=view_docs, responses=responses, consumes=consumes, produces=produces) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 649f08c..df6672a 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -27,46 +27,30 @@ def get_response(self): ) def _payload_docs(self): - """ Used in custom defining payload parameters for the view in Swagger generation. + """ Used in defining payload parameters for the view. - Should return a dict that has the fields of a swagger parameter. - If there is an error in the swagger, it will not be raised. - Usage of `https://editor.swagger.io` to validate is recommended - e.g. - ``` - # Defining formdata - {"data": [ - { - "name":"name", - "in":"formData", - "description":"name of the pet", - "required": true, - "type": "string" - }, - { - "name": "status", - "in": "formData", - "description": "status of the pet", - "required":true, - "type":"string" - } - ]} - - # Defining a payload that references a model + Should return a dict that has the fields of a swagger parameter e.g. {"data": [ { - "name": "body", - "in": "body", - "description": "Collection object to add", - "required": True, - "schema": "#/definitions/Person" + "name":"name", + "in":"formData", + "description":"name of the pet", + "required": true, + "type": "string" + }, + { + "name": "status", + "in": "formData", + "description": "status of the pet", + "required":true, + "type":"string" } ]} - ``` """ return {} + class CreateView(UpdateMixin, JSONActionMixin, JSONView): """ Define a `Create` view that allows `creation` of an entity in the collection """ action = 'create' @@ -169,3 +153,7 @@ class ListView(QuerysetMixin, JSONActionMixin, JSONView): restrict_content_type = False action = 'list' login_required = True + + +class GraphQLView(HTTPView): + pass From 78284bb8a7da6214f0fb06a7cdec64892f33108a Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:09:34 +0800 Subject: [PATCH 176/214] added ability to also detect model references from _payload_docs function --- pfunk/tests/test_project.py | 36 ++++++++++++++++++------------------ pfunk/utils/swagger.py | 29 ++++++++++++++++++++--------- pfunk/web/views/json.py | 14 +++++++------- 3 files changed, 45 insertions(+), 34 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index c47a132..2665808 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -34,24 +34,24 @@ def tearDown(self) -> None: except FileNotFoundError: pass - # def test_add_resource(self): - # self.project.add_resource(Person) - # self.project.add_resource(Person) - # # Test that no duplicates are there - # self.assertEqual(self.project.collections, set([Person])) - - # def test_add_resources(self): - # self.project.add_resources([Person, Sport]) - # self.assertEqual(self.project.collections, set([Person, Sport])) - - # def test_render(self): - # self.project.add_resources([Person, Sport]) - # gql = self.project.render() - # self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) - # self.assertTrue('enum gender_pronouns' in gql) - # self.assertTrue('type Person' in gql) - # self.assertTrue('type Sport' in gql) - # self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) + def test_add_resource(self): + self.project.add_resource(Person) + self.project.add_resource(Person) + # Test that no duplicates are there + self.assertEqual(self.project.collections, set([Person])) + + def test_add_resources(self): + self.project.add_resources([Person, Sport]) + self.assertEqual(self.project.collections, set([Person, Sport])) + + def test_render(self): + self.project.add_resources([Person, Sport]) + gql = self.project.render() + self.assertEqual(self.project.enums, set([GENDER_PRONOUN])) + self.assertTrue('enum gender_pronouns' in gql) + self.assertTrue('type Person' in gql) + self.assertTrue('type Sport' in gql) + self.assertTrue('allPeople: [Person] @index(name: "all_people")' in gql) def test_swagger(self): self.project.add_resources([Person, Sport, Group, User]) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 049ebe4..592b6c0 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -216,17 +216,28 @@ def get_operations(self, col: Collection): view_payload = view()._payload_docs() # Construct payload for swagger generation - # TODO: support referencing of models if view_payload: for field in view_payload.get('data'): - param = sw.Parameter( - name=field.get('name'), - _type=field.get('type'), - _in=field.get('in'), - description=field.get('description'), - required=field.get('required'), - allowEmptyValue=False - ) + if field.get('schema'): + schema = sw.SwagSchema( + ref=field.get('schema') + ) + param = sw.Parameter( + name=field.get('name'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + schema=schema + ) + else: + param = sw.Parameter( + name=field.get('name'), + _type=field.get('type'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + allowEmptyValue=False + ) params.append(param) consumes = ['application/json', diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index df6672a..a2febd4 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -27,7 +27,7 @@ def get_response(self): ) def _payload_docs(self): - """ Used in defining payload parameters for the view. + """ Used in custom defining payload parameters for the view. Should return a dict that has the fields of a swagger parameter e.g. {"data": [ @@ -35,15 +35,15 @@ def _payload_docs(self): "name":"name", "in":"formData", "description":"name of the pet", - "required": true, + "required": True, "type": "string" }, { - "name": "status", - "in": "formData", - "description": "status of the pet", - "required":true, - "type":"string" + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": "#/definitions/Person" } ]} """ From cb0f05b7319b19561b34bf6e960b8c60cf6f516a Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:18:08 +0800 Subject: [PATCH 177/214] Updated the docstrings of _payload_docs --- pfunk/utils/swagger.py | 4 +-- pfunk/web/views/json.py | 61 ++++++++++++++++++++++++++++------------- 2 files changed, 43 insertions(+), 22 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 592b6c0..a2fbc2e 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -219,9 +219,7 @@ def get_operations(self, col: Collection): if view_payload: for field in view_payload.get('data'): if field.get('schema'): - schema = sw.SwagSchema( - ref=field.get('schema') - ) + schema = sw.SwagSchema(ref=field.get('schema')) param = sw.Parameter( name=field.get('name'), _in=field.get('in'), diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index a2febd4..73981d5 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -29,15 +29,31 @@ def get_response(self): def _payload_docs(self): """ Used in custom defining payload parameters for the view. - Should return a dict that has the fields of a swagger parameter e.g. + Should return a dict that has the fields of a swagger parameter. + If there is an error in the swagger, it will not be raised. + Usage of `https://editor.swagger.io` to validate is recommended + e.g. + ``` + # Defining formdata + {"data": [ + { + "name":"name", + "in":"formData", + "description":"name of the pet", + "required": true, + "type": "string" + }, + { + "name": "status", + "in": "formData", + "description": "status of the pet", + "required":true, + "type":"string" + } + ]} + + # Defining a payload that references a model {"data": [ - { - "name":"name", - "in":"formData", - "description":"name of the pet", - "required": True, - "type": "string" - }, { "name": "body", "in": "body", @@ -46,6 +62,7 @@ def _payload_docs(self): "schema": "#/definitions/Person" } ]} + ``` """ return {} @@ -89,17 +106,23 @@ def get_m2m_kwargs(self, obj): ) def _payload_docs(self): - # Reference the collection by default - if self.collection: - return {"data": [ - { - "name": "body", - "in": "body", - "description": "Collection object to add", - "required": True, - "schema": f"#/definitions/{self.collection.__class__.__name__}" - } - ]} + return {"data": [ + { + "name": "name", + "in": "formData", + "description": "name of the pet", + "required": True, + "type": "string" + }, + { + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": "#/definitions/Person" + } + ]} + class UpdateView(UpdateMixin, JSONIDMixin, JSONView): From 1056368345fa620da8ccdf0665e6dee9927c334b Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 3 Jun 2022 15:18:41 +0800 Subject: [PATCH 178/214] Updated the docstrings of _payload_docs --- pfunk/web/views/json.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 73981d5..36d8082 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -27,7 +27,7 @@ def get_response(self): ) def _payload_docs(self): - """ Used in custom defining payload parameters for the view. + """ Used in custom defining payload parameters for the view in Swagger generation. Should return a dict that has the fields of a swagger parameter. If there is an error in the swagger, it will not be raised. From 5c4a0fda3fc2f75c520d960d9ccdd1c9e3184fc3 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 22 Jun 2022 15:19:06 +0800 Subject: [PATCH 179/214] updated swaggyp package --- pfunk/utils/swagger.py | 1 - pfunk/web/views/json.py | 27 +++++++++++---------------- poetry.lock | 2 +- pyproject.toml | 2 +- 4 files changed, 13 insertions(+), 19 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index a2fbc2e..eb2c57b 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -163,7 +163,6 @@ def get_operations(self, col: Collection): ``` """ for view in col.collection_views: - v = view() route = view.url(col) rule = route.rule methods = route.methods diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 36d8082..1d13cb4 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -106,22 +106,17 @@ def get_m2m_kwargs(self, obj): ) def _payload_docs(self): - return {"data": [ - { - "name": "name", - "in": "formData", - "description": "name of the pet", - "required": True, - "type": "string" - }, - { - "name": "body", - "in": "body", - "description": "Collection object to add", - "required": True, - "schema": "#/definitions/Person" - } - ]} + # TODO: Get view's collection class name by default + if self.collection: + return {"data": [ + { + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": f"#/definitions/Collection" + } + ]} diff --git a/poetry.lock b/poetry.lock index 937aec5..43b717f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1576,7 +1576,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "6b06fa36eba0e2e7e8d19a1efecff127af145281ccb3d10c02e1baebb062d147" +content-hash = "b98fdaeac6227f48d169512de02fc0155103f626a8e7c5d074c25fef2bef9cd5" [metadata.files] appnope = [ diff --git a/pyproject.toml b/pyproject.toml index 5070eac..6cea052 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,8 +23,8 @@ graphql-py = "^0.8.1" sammy = "^0.4.3" stripe = "^2.61.0" bleach = "^4.1.0" -swaggyp = "^0.2.0" openapi-spec-validator = "^0.4.0" +swaggyp = "^0.3.0" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From 847ea2e091f085118d238cd190bee2757f286d87 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 23 Jun 2022 15:13:06 +0800 Subject: [PATCH 180/214] Fixed calling of view's _payload_docs to pass the correct argument. Made Create and Update views have default reference to model --- pfunk/utils/swagger.py | 2 +- pfunk/web/views/json.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index eb2c57b..a765760 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -212,7 +212,7 @@ def get_operations(self, col: Collection): params.append(path_params) # Acquire payload of the view from the View's `_payload_docs` - view_payload = view()._payload_docs() + view_payload = view(col)._payload_docs() # Construct payload for swagger generation if view_payload: diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 1d13cb4..c679b01 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -106,7 +106,7 @@ def get_m2m_kwargs(self, obj): ) def _payload_docs(self): - # TODO: Get view's collection class name by default + # Reference the collection by default if self.collection: return {"data": [ { @@ -114,7 +114,7 @@ def _payload_docs(self): "in": "body", "description": "Collection object to add", "required": True, - "schema": f"#/definitions/Collection" + "schema": f"#/definitions/{self.collection.__class__.__name__}" } ]} @@ -148,6 +148,7 @@ def _payload_docs(self): ]} + class DetailView(ObjectMixin, JSONIDMixin, JSONView): """ Define a view to allow single entity operations """ action = 'detail' From aa85d73bdd327af716c44169d5cd9bcb83a16e45 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 30 Sep 2022 16:17:55 +0800 Subject: [PATCH 181/214] first stab at new contrib for separate out-of-box user and group and extendedUser and BaseGroup --- pfunk/contrib/auth/collections.py | 123 ++++++++++++++++++------------ pfunk/fields.py | 1 - pfunk/project.py | 4 + pfunk/tests/test_dev.py | 116 +++++++++++++++------------- pfunk/tests/unittest_keys.py | 2 + 5 files changed, 145 insertions(+), 101 deletions(-) create mode 100644 pfunk/tests/unittest_keys.py diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index 39cbacb..bf021ad 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -19,43 +19,10 @@ from pfunk.fields import SlugField -class UserGroups(Collection): - """ Many-to-many collection of the user-group relationship - - The native fauna-way of holding many-to-many relationship - is to only have the ID of the 2 object. Here in pfunk, we - leverage the flexibility of the collection to have another - field, which is `permissions`, this field holds the capablities - of a user, allowing us to add easier permission handling. - Instead of manually going to roles and adding individual - collections which can be painful in long term. - - Attributes: - collection_name (str): - Name of the collection in Fauna - userID (str): - Fauna ref of user that is tied to the group - groupID (str): - Fauna ref of a collection that is tied with the user - permissions (str[]): - List of permissions, `['create', 'read', 'delete', 'write']` - """ - collection_name = 'users_groups' - userID = ReferenceField(env('USER_COLLECTION', 'pfunk.contrib.auth.collections.User')) - groupID = ReferenceField(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group')) - permissions = ListField() - - def __unicode__(self): - return f"{self.userID}, {self.groupID}, {self.permissions}" - - -class Group(Collection): +class BaseGroup(Collection): """ Group collection that the user belongs to """ name = StringField(required=True) slug = SlugField(unique=True, required=False) - users = ManyToManyField( - env('USER_COLLECTION', 'pfunk.contrib.auth.collections.User'), - relation_name='users_groups') def __unicode__(self): return self.name # pragma: no cover @@ -85,7 +52,6 @@ class BaseUser(Collection): collection_roles = [Public, UserRole] non_public_fields = ['groups'] use_email_verification = True - group_class = import_util(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group')) # Views collection_views = [LoginView, SignUpView, VerifyEmailView, LogoutView, UpdatePasswordView, ForgotPasswordView, ForgotPasswordChangeView] @@ -99,7 +65,8 @@ class BaseUser(Collection): email = EmailField(required=True, unique=True) verification_key = StringField(required=False, unique=True) forgot_password_key = StringField(required=False, unique=True) - account_status = EnumField(AccountStatus, required=True, default_value="INACTIVE") + account_status = EnumField( + AccountStatus, required=True, default_value="INACTIVE") def __unicode__(self): return self.username # pragma: no cover @@ -113,6 +80,7 @@ def login(cls, username, password, _token=None): """ c = cls() try: + print(f'USERNAME: {username}\nPASSWORD: {password}') return c.client(_token=_token).query( q.call("login_user", { "username": username, "password": password}) @@ -175,12 +143,14 @@ def verify_email(cls, verification_key, verify_type='signup', password=None): attached to the user """ if verify_type == 'signup': - user = cls.get_by('unique_User_verification_key', [verification_key]) + user = cls.get_by('unique_User_verification_key', + [verification_key]) user.verification_key = '' user.account_status = 'ACTIVE' user.save() elif verify_type == 'forgot' and password: - user = cls.get_by('unique_User_forgot_password_key', [verification_key]) + user = cls.get_by('unique_User_forgot_password_key', [ + verification_key]) user.forgot_password_key = '' user.save(_credentials=password) @@ -268,14 +238,17 @@ def update_password(cls, current_password, new_password, new_password_confirm, _ `Wrong current password.` """ if new_password != new_password_confirm: - raise ValidationException('new_password: Password field and password confirm field do not match.') + raise ValidationException( + 'new_password: Password field and password confirm field do not match.') c = cls() try: return c.client(_token=_token).query( - q.call("update_password", {'current_password': current_password, 'new_password': new_password}) + q.call("update_password", { + 'current_password': current_password, 'new_password': new_password}) ) except BadRequest: - raise ValidationException('current_password: Password update failed.') + raise ValidationException( + 'current_password: Password update failed.') @classmethod def get_current_user(cls, _token=None): @@ -294,11 +267,12 @@ def __unicode__(self): return self.username # pragma: no cover -class User(BaseUser): - user_group_class = import_util('pfunk.contrib.auth.collections.UserGroups') - group_class = import_util('pfunk.contrib.auth.collections.Group') - """ User that has permission capabilities. Extension of `BaseUser` """ - groups = ManyToManyField(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group'), 'users_groups') +class ExtendedUser(BaseUser): + """ User that has permission capabilities. Extension of `BaseUser`. + Subclass and define these properties + Provides base methods for group-user permissions. If there are no + supplied `groups` property, will raise `NotImplementedErrror` + """ @classmethod def get_permissions(cls, ref, _token=None): @@ -306,6 +280,8 @@ def get_permissions(cls, ref, _token=None): def get_groups(self, _token=None): """ Returns the groups (collections) that the user is bound with """ + if not self.group_class: + raise NotImplementedError return [self.group_class.get(i.id(), _token=_token) for i in self.client(_token=_token).query( q.paginate(q.match('users_groups_by_user', self.ref)) ).get('data')] @@ -361,11 +337,62 @@ def add_permissions(self, group, permissions: list, _token=None): for i in permissions: perm_list.extend(i.permissions) + if not self.user_group_class: + raise NotImplementedError + try: - user_group = self.user_group_class.get_by('users_groups_by_group_and_user', terms=[group.ref, self.ref]) + user_group = self.user_group_class.get_by( + 'users_groups_by_group_and_user', terms=[group.ref, self.ref]) except DocNotFound: - user_group = self.user_group_class.create(userID=self.ref, groupID=group.ref, permissions=perm_list) + user_group = self.user_group_class.create( + userID=self.ref, groupID=group.ref, permissions=perm_list) if user_group.permissions != perm_list: user_group.permissions = perm_list user_group.save() return user_group + + +class UserGroups(Collection): + """ Many-to-many collection of the user-group relationship + + The native fauna-way of holding many-to-many relationship + is to only have the ID of the 2 object. Here in pfunk, we + leverage the flexibility of the collection to have another + field, which is `permissions`, this field holds the capablities + of a user, allowing us to add easier permission handling. + Instead of manually going to roles and adding individual + collections which can be painful in long term. + + Attributes: + collection_name (str): + Name of the collection in Fauna + userID (str): + Fauna ref of user that is tied to the group + groupID (str): + Fauna ref of a collection that is tied with the user + permissions (str[]): + List of permissions, `['create', 'read', 'delete', 'write']` + """ + collection_name = 'users_groups' + userID = ReferenceField( + env('USER_COLLECTION', 'pfunk.contrib.auth.collections.User')) + groupID = ReferenceField( + env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group')) + permissions = ListField() + + def __unicode__(self): + return f"{self.userID}, {self.groupID}, {self.permissions}" + + +class Group(BaseGroup): + """ A default group that already has predefined M2M relationship with `pfunk.contrib.auth.collections.User` """ + users = ManyToManyField( + 'pfunk.contrib.auth.collections.User', 'users_groups') + + +class User(ExtendedUser): + """ A default user that already has predefined M2M relationship with `pfunk.contrib.auth.collections.Group` """ + user_group_class = import_util('pfunk.contrib.auth.collections.UserGroups') + group_class = import_util('pfunk.contrib.auth.collections.Group') + groups = ManyToManyField( + 'pfunk.contrib.auth.collections.Group', 'users_groups') diff --git a/pfunk/fields.py b/pfunk/fields.py index ddb1c1a..fa5755a 100644 --- a/pfunk/fields.py +++ b/pfunk/fields.py @@ -194,7 +194,6 @@ class ManyToManyField(GraphQLMixin, ForeignListProperty): relation_field = True def __init__(self, foreign_class, relation_name, return_type=None, return_prop=None, **kwargs): - self.foreign_class = foreign_class self.relation_name = relation_name super(ManyToManyField, self).__init__(foreign_class, return_type=return_type, return_prop=return_prop, **kwargs) diff --git a/pfunk/project.py b/pfunk/project.py index 313a464..4c987fe 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -204,6 +204,10 @@ def publish(self, mode: str = 'merge') -> int: auth=BearerAuth(secret), data=gql_io ) + + print(f'\n') + print(self.render()) + print('----------------------------------------\n') if resp.status_code == 200: test_mode = env('PFUNK_TEST_MODE', False, var_type='boolean') if not test_mode: diff --git a/pfunk/tests/test_dev.py b/pfunk/tests/test_dev.py index 3704d30..0ea6dd2 100644 --- a/pfunk/tests/test_dev.py +++ b/pfunk/tests/test_dev.py @@ -1,52 +1,64 @@ -# # test_dev.py - a placeholder test for fixing User - Group circular import errors -# -# import os -# from valley.utils import import_util -# -# from pfunk.contrib.auth.collections import BaseUser, User -# from pfunk.testcase import APITestCase -# from pfunk.contrib.auth.collections import Group -# from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField -# from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField -# -# -# # Simple setup -# # Env var setup for user and group -# os.environ['GROUP_COLLECTION'] = 'pfunk.tests.test_dev.NewGroup' -# os.environ['USER_COLLECTION'] = 'pfunk.tests.test_dev.NewUser' -# -# class NewUser(User): -# # groups = ManyToManyField('pfunk.tests.test_dev.NewGroup') -# pass -# -# class NewGroup(Group): -# users = ManyToManyField('pfunk.tests.test_dev.NewUser', 'group_users') -# -# class Blog(Collection): -# """ Collection for DigitalOcean-Type request """ -# title = StringField(required=True) -# content = StringField(required=True) -# user = ReferenceField(NewUser) -# -# def __unicode__(self): -# return self.title -# -# # Test case to see if user-group is working -# class TestUserGroupError(APITestCase): -# collections = [NewUser, NewGroup, Blog] -# -# def setUp(self) -> None: -# super().setUp() -# self.group = NewGroup.create(name='Power Users', slug='power-users') -# self.user = NewUser.create(username='test', email='tlasso@example.org', first_name='Ted', -# last_name='Lasso', _credentials='abc123', account_status='ACTIVE', -# groups=[self.group]) -# self.blog = Blog.create( -# title='test_blog', content='test content', user=self.user) -# -# self.token, self.exp = NewUser.api_login("test", "abc123") -# print(f'\n\nTOKEN: {self.token}') -# print(f'\n\nEXP: {self.exp}') -# -# def test_mock(self): -# assert True \ No newline at end of file +# test_dev.py - a placeholder test for fixing User - Group circular import errors + +import os +from valley.utils import import_util + +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, Group, User +from pfunk.testcase import APITestCase +from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField +from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField + + +class UserGroups(Collection): + collection_name = 'custom_users_groups' + userID = ReferenceField('pfunk.tests.test_dev.Newuser') + groupID = ReferenceField('pfunk.tests.test_dev.Newgroup') + permissions = ListField() + + def __unicode__(self): + return f"{self.userID}, {self.groupID}, {self.permissions}" + + +class Newgroup(BaseGroup): + users = ManyToManyField('pfunk.tests.test_dev.Newuser', + relation_name='custom_users_groups') + + +class Newuser(ExtendedUser): + group_class = import_util('pfunk.tests.test_dev.Newgroup') + groups = ManyToManyField( + 'pfunk.tests.test_dev.Newgroup', relation_name='custom_users_groups') + # blogs = ManyToManyField('pfunk.tests.test_dev.Blog', relation_name='users_blogs') + + +class Blog(Collection): + """ Collection for DigitalOcean-Type request """ + title = StringField(required=True) + content = StringField(required=True) + # users = ManyToManyField('pfunk.tests.test_dev.Newuser', relation_name='users_blogs') + + def __unicode__(self): + return self.title + + +# Test case to see if user-group is working +class TestUserGroupError(APITestCase): + collections = [Newuser, Newgroup, Blog] + + def setUp(self) -> None: + super().setUp() + self.group = Newgroup.create(name='Power Users', slug='power-users') + self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.group]) + self.blog = Blog.create( + title='test_blog', content='test content', user=self.user) + + # BUG: logging in returns wrong credentials error + print(f'TEST USER: {self.user.__dict__}') + self.token, self.exp = Newuser.api_login("test", "abc123") + print(f'\n\nTOKEN: {self.token}') + print(f'\n\nEXP: {self.exp}') + + def test_mock(self): + assert True diff --git a/pfunk/tests/unittest_keys.py b/pfunk/tests/unittest_keys.py new file mode 100644 index 0000000..d13268d --- /dev/null +++ b/pfunk/tests/unittest_keys.py @@ -0,0 +1,2 @@ + +KEYS = {'06bcf013-403a-481b-b6ea-d00a83b7e464': {'signature_key': 'hYJTHn5rF3GeXARPyJyPL1vJhCF7yr-k1H9mITCH5hA=', 'payload_key': '-GF_6GYvo17Udk7AqtGIityqBXYizkENlxc_PDRODJQ=', 'kid': '06bcf013-403a-481b-b6ea-d00a83b7e464'}, '223e62b9-5686-40cb-9e5f-29eb2709d182': {'signature_key': '0pSOP2OxKberj2-kQdPSZDgDryTIWzAnIo0zU16LBQE=', 'payload_key': 'yk3GPQQomCeZIqk1-DWuXORCQpAA8cJdDI4faq3snTo=', 'kid': '223e62b9-5686-40cb-9e5f-29eb2709d182'}, 'c4545030-eac0-4286-8499-f4d5229d4520': {'signature_key': '1vEP5nxo1bUqkEJYjYOedcDtqDO-BWCTvybia5sbszY=', 'payload_key': 'feMygEXTzYddREZaLsiwKYQQnU8dhY0pxz_xXmQ51aU=', 'kid': 'c4545030-eac0-4286-8499-f4d5229d4520'}, '74d87722-3e42-46fe-ac19-9c24a6a49659': {'signature_key': '1wJp_n09f2lhFDOZY9pwqjHyyKtAZq185hdvbhkr7bg=', 'payload_key': 'nTTCKUoC3wBKhNsAxba65UYvJ2Wow2Lhx1bs95xisIk=', 'kid': '74d87722-3e42-46fe-ac19-9c24a6a49659'}, '34875674-7ace-41f9-b04a-fd0b27f8774f': {'signature_key': 'LHBp-r_TTJXSEMeyl2g2bklk4dg0hArkN_QE2nirKts=', 'payload_key': 'I8kBnhtBZ7SWi2C2EtcZHJ48_QT2J4tWMvGlFNb27w0=', 'kid': '34875674-7ace-41f9-b04a-fd0b27f8774f'}, 'de75b5a2-f950-49bc-91e5-03fba00390ae': {'signature_key': 'BDLMhgjmLHkBZwXYuay9x3eB6_4leetdHHMfR7wHi34=', 'payload_key': 'wNUJQyAnueoHv3zYpkqCOE4eECJvW9O4gUWm5JOLakI=', 'kid': 'de75b5a2-f950-49bc-91e5-03fba00390ae'}, 'a8254567-995a-43a7-a79a-e8855c50af51': {'signature_key': 'ArxuXmhYJ41YAM8yQR1uSFyBk5Y5vABBeA103X0PFYI=', 'payload_key': 'ocrhhe1GkdzYQrEPq4ibKd6qHTiEzXXXhpQZZeOKw2k=', 'kid': 'a8254567-995a-43a7-a79a-e8855c50af51'}, 'd6190824-36b6-421b-9c48-ea9a2fd1a48a': {'signature_key': 'GS0gGJnia3bI01w0o4JbD3YMOvLXzzPqOyF4wD1lFwk=', 'payload_key': 'ZXquXtViWDhSGID7Ltufv40x3op-7T8dymOnnN3NAuE=', 'kid': 'd6190824-36b6-421b-9c48-ea9a2fd1a48a'}, '81239a5d-2ead-44ca-803a-a89fc6113b22': {'signature_key': 'MV8caU2wEw9SpudpKVXEVwiyei_2dpr3D4Va40ObsOk=', 'payload_key': 'G5dY4O038k9oKtCq1YPsYp37PZ1RBKLKKX31JXJKHL8=', 'kid': '81239a5d-2ead-44ca-803a-a89fc6113b22'}, 'b136e397-f3e9-4760-af51-270be165fe3a': {'signature_key': 'dKON8KVgtb1p4n2fe7TnXQ3-hlt85JO1uoZDZeVHt0w=', 'payload_key': 'WphxdwSWT-Oxso1MoC9zRnkUyc2zyf5kL6yVooco7ic=', 'kid': 'b136e397-f3e9-4760-af51-270be165fe3a'}} \ No newline at end of file From b0210ee381c4f3841f1105fca2dc6db1bb416fbe Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 7 Oct 2022 16:06:31 +0800 Subject: [PATCH 182/214] Refactored functions to use index that correctly corresponds to the collection's name --- pfunk/contrib/auth/collections.py | 6 ++++-- pfunk/contrib/auth/resources.py | 2 +- pfunk/tests/test_dev.py | 19 +++++++++---------- 3 files changed, 14 insertions(+), 13 deletions(-) diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index bf021ad..d7dbf4e 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -1,3 +1,4 @@ +from cmath import log import uuid from envs import env @@ -80,12 +81,11 @@ def login(cls, username, password, _token=None): """ c = cls() try: - print(f'USERNAME: {username}\nPASSWORD: {password}') return c.client(_token=_token).query( q.call("login_user", { "username": username, "password": password}) ) - except BadRequest: + except Exception as err: raise LoginFailed( 'The login credentials you entered are incorrect.') @@ -103,7 +103,9 @@ def permissions(self, _token=None): @classmethod def api_login(cls, username, password, _token=None): token = cls.login(username=username, password=password, _token=_token) + print(f'\n\nLOGIN: {token}\n\n') user = cls.get_current_user(_token=token) + print(f'\n\nUSER: {user}\n\n') claims = user.to_dict().copy() try: claims.get('data').pop('verification_key') diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index ab0c65a..1a7944e 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -13,7 +13,7 @@ def get_body(self): return q.query( q.lambda_(["input"], q.let({ - "user": q.match(q.index("unique_User_username"), q.select("username", q.var("input"))) + "user": q.match(q.index(f"unique_{self.collection.__class__.__name__}_username"), q.select("username", q.var("input"))) }, q.if_( q.equals( diff --git a/pfunk/tests/test_dev.py b/pfunk/tests/test_dev.py index 0ea6dd2..b0d3c0f 100644 --- a/pfunk/tests/test_dev.py +++ b/pfunk/tests/test_dev.py @@ -3,7 +3,7 @@ import os from valley.utils import import_util -from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, Group, User +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser from pfunk.testcase import APITestCase from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField @@ -15,9 +15,6 @@ class UserGroups(Collection): groupID = ReferenceField('pfunk.tests.test_dev.Newgroup') permissions = ListField() - def __unicode__(self): - return f"{self.userID}, {self.groupID}, {self.permissions}" - class Newgroup(BaseGroup): users = ManyToManyField('pfunk.tests.test_dev.Newuser', @@ -25,17 +22,19 @@ class Newgroup(BaseGroup): class Newuser(ExtendedUser): + user_group_class = import_util('pfunk.tests.test_dev.UserGroups') group_class = import_util('pfunk.tests.test_dev.Newgroup') groups = ManyToManyField( 'pfunk.tests.test_dev.Newgroup', relation_name='custom_users_groups') - # blogs = ManyToManyField('pfunk.tests.test_dev.Blog', relation_name='users_blogs') + blogs = ManyToManyField('pfunk.tests.test_dev.Blog', + relation_name='users_blogs') class Blog(Collection): - """ Collection for DigitalOcean-Type request """ title = StringField(required=True) content = StringField(required=True) - # users = ManyToManyField('pfunk.tests.test_dev.Newuser', relation_name='users_blogs') + users = ManyToManyField('pfunk.tests.test_dev.Newuser', + relation_name='users_blogs') def __unicode__(self): return self.title @@ -43,7 +42,7 @@ def __unicode__(self): # Test case to see if user-group is working class TestUserGroupError(APITestCase): - collections = [Newuser, Newgroup, Blog] + collections = [Newuser, Newgroup, UserGroups, Blog] def setUp(self) -> None: super().setUp() @@ -52,9 +51,9 @@ def setUp(self) -> None: last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.group]) self.blog = Blog.create( - title='test_blog', content='test content', user=self.user) + title='test_blog', content='test content', user=self.user, token=self.secret) - # BUG: logging in returns wrong credentials error + # BUG: logging in returns missing identity print(f'TEST USER: {self.user.__dict__}') self.token, self.exp = Newuser.api_login("test", "abc123") print(f'\n\nTOKEN: {self.token}') From 4f8a87b8147b79889a861d318b9f5bd8e73f3124 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 11 Oct 2022 14:57:59 +0800 Subject: [PATCH 183/214] added dynamic indexing in auth contrib collections --- pfunk/collection.py | 1 - pfunk/contrib/auth/collections.py | 19 +++++++++++++++---- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/pfunk/collection.py b/pfunk/collection.py index 1e2c78a..a7d4e85 100644 --- a/pfunk/collection.py +++ b/pfunk/collection.py @@ -213,7 +213,6 @@ def client(self, _token=None) -> FaunaClient: Returns: FaunaClient """ - if _token: return FaunaClient(secret=_token) return FaunaClient(secret=env('FAUNA_SECRET')) diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index d7dbf4e..254df59 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -103,9 +103,7 @@ def permissions(self, _token=None): @classmethod def api_login(cls, username, password, _token=None): token = cls.login(username=username, password=password, _token=_token) - print(f'\n\nLOGIN: {token}\n\n') user = cls.get_current_user(_token=token) - print(f'\n\nUSER: {user}\n\n') claims = user.to_dict().copy() try: claims.get('data').pop('verification_key') @@ -284,8 +282,15 @@ def get_groups(self, _token=None): """ Returns the groups (collections) that the user is bound with """ if not self.group_class: raise NotImplementedError + user_class = self.__class__.__name__.lower() + group_class = self.group_class.__name__.lower() + relation_name = self._base_properties.get("groups").relation_name + index_name = f'{user_class}s_{group_class}s_by_{user_class}' + if relation_name: + index_name = f'{relation_name}_by_{user_class}' + return [self.group_class.get(i.id(), _token=_token) for i in self.client(_token=_token).query( - q.paginate(q.match('users_groups_by_user', self.ref)) + q.paginate(q.match(index_name, self.ref)) ).get('data')] def permissions(self, _token=None): @@ -302,9 +307,15 @@ def permissions(self, _token=None): perm_list (str[]): Permissions of the user in list: `['create', 'read', 'delete', 'write']` """ + user_class = self.__class__.__name__.lower() + group_class = self.group_class.__name__.lower() + relation_name = self._base_properties.get("groups").relation_name + index_name = f'{user_class}s_{group_class}s_by_{group_class}_and_{user_class}' + if relation_name: + index_name = f'{relation_name}_by_{group_class}_and_{user_class}' perm_list = [] for i in self.get_groups(_token=_token): - ug = self.user_group_class.get_index('users_groups_by_group_and_user', [ + ug = self.user_group_class.get_index(index_name, [ i.ref, self.ref], _token=_token) for user_group in ug: p = [] From b574e7a5c8182eb7eb7eea3b17655e45d6e6ccf8 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 24 Oct 2022 17:57:01 +0800 Subject: [PATCH 184/214] made permissions in genericUserBasedRole to dynamically use the field that the user and group is using to work properly on custom user and group --- pfunk/collection.py | 29 ++++++++ pfunk/contrib/auth/resources.py | 89 ++++++++++++++++++----- pfunk/resources.py | 6 +- pfunk/tests/test_dev.py | 63 ----------------- pfunk/tests/test_user_subclass.py | 113 ++++++++++++++++++++++++++++++ pfunk/tests/unittest_keys.py | 2 - pfunk/utils/publishing.py | 4 +- 7 files changed, 221 insertions(+), 85 deletions(-) delete mode 100644 pfunk/tests/test_dev.py create mode 100644 pfunk/tests/test_user_subclass.py delete mode 100644 pfunk/tests/unittest_keys.py diff --git a/pfunk/collection.py b/pfunk/collection.py index a7d4e85..5d0a7d2 100644 --- a/pfunk/collection.py +++ b/pfunk/collection.py @@ -124,6 +124,35 @@ def get_fields(self) -> dict: """ return {k: q.select(k, q.var("input")) for k, v in self._base_properties.items() if k not in self.non_public_fields} + + def get_user_field(self) -> str: + """ Acquires the field where the relationship with a user was defined. + + It is required to define the `USER_COLLECTION` in env var if a custom + user will be used. This is to ensure the permissions to work properly + """ + fields = self._base_properties.items() + user_class = env('USER_COLLECTION', 'User') + user_field = None + for k, v in fields: + if user_class in v.get_graphql_type(): + user_field = k + return user_field + + def get_group_field(self) -> str: + """ Acquires the field where the relationship with a group was defined. + + It is required to define the `GROUP_COLLECTION` in env var if a custom + user will be used. This is to ensure the permissions to work properly + + """ + fields = self._base_properties.items() + group_class = env('GROUP_COLLECTION', 'Group') + group_field = None + for k, v in fields: + if group_class in v.get_graphql_type(): + group_field = k + return group_field def get_collection_name(self) -> str: """ diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 1a7944e..c315907 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -171,12 +171,35 @@ def get_lambda(self, resource_type): class GenericAuthorizationRole(Role): - user_table = 'User' - current_user_field = 'user' - name_suffix = 'user_based_crud_role' + + def get_user_collection(self): + user_field = self.collection._base_properties.get("users") + if not user_field: + user_field = self.collection._base_properties.get("user") + return user_field + + def get_user_table(self): + user_field = self.get_user_collection() + if user_field: + return user_field.get_foreign_class().__name__ + else: + return None + + def get_relation_index_name(self): + self.current_user_field = self.collection.__class__.__name__.lower() + self.user_table = self.collection.__class__.__name__ + relation_index_name = (self.get_user_collection().__base_properties.get('groups').relation_name + + '_by_' + + self.collection.group_class.__name__.lower() + + '_' + + self.get_user_table()) + return relation_index_name + + def get_name_suffix(self): + return f'{self.get_user_table().lower()}_based_crud_role' def get_name(self): - return self.name or f"{self.collection.get_class_name()}_{self.name_suffix}" + return self.name or f"{self.collection.get_class_name()}_{self.get_name_suffix()}" def get_privileges(self): priv_list = [ @@ -190,7 +213,7 @@ def get_privileges(self): } }, { - "resource": q.index(self.relation_index_name), + "resource": q.index(self.get_relation_index_name()), "actions": { "read": True } @@ -226,10 +249,23 @@ def get_privileges(self): class GenericUserBasedRole(GenericAuthorizationRole): relation_index_name = 'users_groups_by_user' + def get_relation_index_name(self): + # Acquires the `groups` field from the user collection + user_col = self.get_user_collection().get_foreign_class() + user_groups = user_col._base_properties.get("groups") + + if user_groups: + relation_index_name = (user_groups.relation_name + + '_by_' + + self.get_user_table().lower()) + return relation_index_name + return None + def get_lambda(self, resource_type): + current_user_field = self.collection.get_user_field() if resource_type == 'write': lambda_args = ["old_object", "new_object", "object_ref"] - user_ref = q.select(self.current_user_field, + user_ref = q.select(current_user_field, q.select('data', q.var('old_object'))) return q.query( q.lambda_(lambda_args, @@ -239,7 +275,7 @@ def get_lambda(self, resource_type): q.current_identity() ), q.equals( - q.select(self.current_user_field, q.select('data', q.var('new_object'))), + q.select(current_user_field, q.select('data', q.var('new_object'))), q.current_identity() ) ) @@ -248,11 +284,11 @@ def get_lambda(self, resource_type): ) elif resource_type == 'create': lambda_args = ["new_object"] - user_ref = q.select(self.current_user_field, + user_ref = q.select(current_user_field, q.select('data', q.var('new_object'))) elif resource_type == 'read' or resource_type == 'delete': lambda_args = ["object_ref"] - user_ref = q.select(self.current_user_field, + user_ref = q.select(current_user_field, q.select('data', q.get(q.var('object_ref')))) return q.query( @@ -273,10 +309,31 @@ class GenericGroupBasedRole(GenericAuthorizationRole): user_table = 'User' name_suffix = 'group_based_crud_role' + def get_name_suffix(self): + """ """ + # TODO: Return `group_based_crud_role` with dynamic group name class + pass + + def get_relation_index_name(self): + user_col = self.get_user_collection().get_foreign_class() + user_groups = user_col._base_properties.get("groups") + + if user_groups: + # TODO: be able to return `_by_` .e.g. `users_groups_by_user` + relation_index_name = (user_groups.relation_name + + '_by_' + + self.collection.group_class.__name__.lower() + + '_' + + self.get_user_table().lower()) + return relation_index_name + return None + def get_lambda(self, resource_type): + current_group_field = self.collection.get_group_field() + print(f'\n\nCURRENT GROUP FIELD: {current_group_field}\n\n') perm = f'{self.collection.get_collection_name()}-{resource_type}'.lower() if resource_type == 'write': - group_ref = q.select(self.current_group_field, + group_ref = q.select(current_group_field, q.select('data', q.var('old_object'))) lambda_args = ["old_object", "new_object", "object_ref"] @@ -289,7 +346,7 @@ def get_lambda(self, resource_type): q.select(self.permissions_field, q.get( q.match( - q.index(self.relation_index_name), + q.index(self.get_relation_index_name()), group_ref, q.current_identity() ) @@ -297,18 +354,18 @@ def get_lambda(self, resource_type): perm ), q.equals( - q.select(self.current_group_field, q.select('data', q.var('old_object'))), - q.select(self.current_group_field, q.select('data', q.var('new_object'))), + q.select(current_group_field, q.select('data', q.var('old_object'))), + q.select(current_group_field, q.select('data', q.var('new_object'))), ) ) ) ) elif resource_type == 'create': - group_ref = q.select(self.current_group_field, + group_ref = q.select(current_group_field, q.select('data', q.var('new_object'))) lambda_args = ["new_object"] elif resource_type == 'read' or resource_type == 'delete': - group_ref = q.select(self.current_group_field, + group_ref = q.select(current_group_field, q.select('data', q.get(q.var('object_ref')))) lambda_args = ["object_ref"] @@ -320,7 +377,7 @@ def get_lambda(self, resource_type): q.select(self.permissions_field, q.select("data", q.get(q.match( - q.index(self.relation_index_name), + q.index(self.get_relation_index_name()), group_ref, q.current_identity() )))))), diff --git a/pfunk/resources.py b/pfunk/resources.py index c31f98e..18b4687 100644 --- a/pfunk/resources.py +++ b/pfunk/resources.py @@ -133,6 +133,10 @@ def get_membership_lambda(self): ) )) + def get_user_table(self): + """ Acquires user table from the class name """ + return None + def get_membership(self) -> dict: """ Returns the membership configuration for the role @@ -141,7 +145,7 @@ def get_membership(self) -> dict: """ membership = self.get_membership_lambda() payload_dict = { - 'resource': q.collection(self.user_table or self.collection.get_collection_name()), + 'resource': q.collection(self.get_user_table() or self.collection.get_collection_name()), } if membership: payload_dict['predicate'] = self.get_membership_lambda() diff --git a/pfunk/tests/test_dev.py b/pfunk/tests/test_dev.py deleted file mode 100644 index b0d3c0f..0000000 --- a/pfunk/tests/test_dev.py +++ /dev/null @@ -1,63 +0,0 @@ -# test_dev.py - a placeholder test for fixing User - Group circular import errors - -import os -from valley.utils import import_util - -from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser -from pfunk.testcase import APITestCase -from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField -from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField - - -class UserGroups(Collection): - collection_name = 'custom_users_groups' - userID = ReferenceField('pfunk.tests.test_dev.Newuser') - groupID = ReferenceField('pfunk.tests.test_dev.Newgroup') - permissions = ListField() - - -class Newgroup(BaseGroup): - users = ManyToManyField('pfunk.tests.test_dev.Newuser', - relation_name='custom_users_groups') - - -class Newuser(ExtendedUser): - user_group_class = import_util('pfunk.tests.test_dev.UserGroups') - group_class = import_util('pfunk.tests.test_dev.Newgroup') - groups = ManyToManyField( - 'pfunk.tests.test_dev.Newgroup', relation_name='custom_users_groups') - blogs = ManyToManyField('pfunk.tests.test_dev.Blog', - relation_name='users_blogs') - - -class Blog(Collection): - title = StringField(required=True) - content = StringField(required=True) - users = ManyToManyField('pfunk.tests.test_dev.Newuser', - relation_name='users_blogs') - - def __unicode__(self): - return self.title - - -# Test case to see if user-group is working -class TestUserGroupError(APITestCase): - collections = [Newuser, Newgroup, UserGroups, Blog] - - def setUp(self) -> None: - super().setUp() - self.group = Newgroup.create(name='Power Users', slug='power-users') - self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', - last_name='Lasso', _credentials='abc123', account_status='ACTIVE', - groups=[self.group]) - self.blog = Blog.create( - title='test_blog', content='test content', user=self.user, token=self.secret) - - # BUG: logging in returns missing identity - print(f'TEST USER: {self.user.__dict__}') - self.token, self.exp = Newuser.api_login("test", "abc123") - print(f'\n\nTOKEN: {self.token}') - print(f'\n\nEXP: {self.exp}') - - def test_mock(self): - assert True diff --git a/pfunk/tests/test_user_subclass.py b/pfunk/tests/test_user_subclass.py new file mode 100644 index 0000000..43cd47f --- /dev/null +++ b/pfunk/tests/test_user_subclass.py @@ -0,0 +1,113 @@ +# test_dev.py - a placeholder test for fixing User - Group circular import errors + +import os +from valley.utils import import_util +from pprint import pprint as p + +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser +from pfunk.testcase import APITestCase +from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField +from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField +from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole + + +class UserGroups(Collection): + collection_name = 'users_groups' + userID = ReferenceField('pfunk.tests.test_user_subclass.Newuser') + groupID = ReferenceField('pfunk.tests.test_user_subclass.Newgroup') + permissions = ListField() + + +class Newgroup(BaseGroup): + users = ManyToManyField('pfunk.tests.test_user_subclass.Newuser', + relation_name='custom_users_groups') + + +class Newuser(ExtendedUser): + user_group_class = import_util('pfunk.tests.test_user_subclass.UserGroups') + group_class = import_util('pfunk.tests.test_user_subclass.Newgroup') + groups = ManyToManyField( + 'pfunk.tests.test_user_subclass.Newgroup', relation_name='custom_users_groups') + blogs = ManyToManyField('pfunk.tests.test_user_subclass.Blog', + relation_name='users_blogs') + + +class Blog(Collection): + collection_roles = [GenericUserBasedRole] + title = StringField(required=True) + content = StringField(required=True) + user = ReferenceField('pfunk.tests.test_user_subclass.Newuser', + relation_name='users_blogs') + + def __unicode__(self): + return self.title + + +# Test case to see if user-group is working +class TestUserGroupError(APITestCase): + collections = [Newuser, Newgroup, UserGroups, Blog] + + def setUp(self) -> None: + super().setUp() + self.group = Newgroup.create(name='Power Users', slug='power-users') + self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.group]) + self.blog = Blog.create( + title='test_blog', content='test content', user=self.user, token=self.secret) + self.token, self.exp = Newuser.api_login("test", "abc123") + # p(f'@Test Setup: Blog Created: {self.blog.__dict__}\n') + # p(f'@Test Setup: User Created: {self.user.__dict__}') + + + def test_read(self): + res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) + self.assertEqual("test_blog", res.json['data']['data']['title']) + + def test_read_all(self): + res = self.c.get(f'/json/blog/list/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) + + def test_create(self): + self.assertNotIn("new blog", [ + blog.title for blog in Blog.all()]) + res = self.c.post('/json/blog/create/', + json={ + "title": "new blog", + "content": "I created a new blog.", + "user": self.user.ref.id()}, + headers={ + "Authorization": self.token}) + + self.assertTrue(res.status_code, 200) + self.assertIn("new blog", [ + blog.title for blog in Blog.all()]) + + def test_update(self): + self.assertNotIn("the updated street somewhere", [ + house.address for house in Blog.all()]) + res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', + json={ + "title": "updated blog", + "content": "I updated my blog.", + "user": self.user.ref.id()}, + headers={ + "Authorization": self.token}) + + self.assertTrue(res.status_code, 200) + self.assertIn("updated blog", [ + blog.title for blog in Blog.all()]) + + def test_delete(self): + res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) + + self.assertTrue(res.status_code, 200) diff --git a/pfunk/tests/unittest_keys.py b/pfunk/tests/unittest_keys.py deleted file mode 100644 index d13268d..0000000 --- a/pfunk/tests/unittest_keys.py +++ /dev/null @@ -1,2 +0,0 @@ - -KEYS = {'06bcf013-403a-481b-b6ea-d00a83b7e464': {'signature_key': 'hYJTHn5rF3GeXARPyJyPL1vJhCF7yr-k1H9mITCH5hA=', 'payload_key': '-GF_6GYvo17Udk7AqtGIityqBXYizkENlxc_PDRODJQ=', 'kid': '06bcf013-403a-481b-b6ea-d00a83b7e464'}, '223e62b9-5686-40cb-9e5f-29eb2709d182': {'signature_key': '0pSOP2OxKberj2-kQdPSZDgDryTIWzAnIo0zU16LBQE=', 'payload_key': 'yk3GPQQomCeZIqk1-DWuXORCQpAA8cJdDI4faq3snTo=', 'kid': '223e62b9-5686-40cb-9e5f-29eb2709d182'}, 'c4545030-eac0-4286-8499-f4d5229d4520': {'signature_key': '1vEP5nxo1bUqkEJYjYOedcDtqDO-BWCTvybia5sbszY=', 'payload_key': 'feMygEXTzYddREZaLsiwKYQQnU8dhY0pxz_xXmQ51aU=', 'kid': 'c4545030-eac0-4286-8499-f4d5229d4520'}, '74d87722-3e42-46fe-ac19-9c24a6a49659': {'signature_key': '1wJp_n09f2lhFDOZY9pwqjHyyKtAZq185hdvbhkr7bg=', 'payload_key': 'nTTCKUoC3wBKhNsAxba65UYvJ2Wow2Lhx1bs95xisIk=', 'kid': '74d87722-3e42-46fe-ac19-9c24a6a49659'}, '34875674-7ace-41f9-b04a-fd0b27f8774f': {'signature_key': 'LHBp-r_TTJXSEMeyl2g2bklk4dg0hArkN_QE2nirKts=', 'payload_key': 'I8kBnhtBZ7SWi2C2EtcZHJ48_QT2J4tWMvGlFNb27w0=', 'kid': '34875674-7ace-41f9-b04a-fd0b27f8774f'}, 'de75b5a2-f950-49bc-91e5-03fba00390ae': {'signature_key': 'BDLMhgjmLHkBZwXYuay9x3eB6_4leetdHHMfR7wHi34=', 'payload_key': 'wNUJQyAnueoHv3zYpkqCOE4eECJvW9O4gUWm5JOLakI=', 'kid': 'de75b5a2-f950-49bc-91e5-03fba00390ae'}, 'a8254567-995a-43a7-a79a-e8855c50af51': {'signature_key': 'ArxuXmhYJ41YAM8yQR1uSFyBk5Y5vABBeA103X0PFYI=', 'payload_key': 'ocrhhe1GkdzYQrEPq4ibKd6qHTiEzXXXhpQZZeOKw2k=', 'kid': 'a8254567-995a-43a7-a79a-e8855c50af51'}, 'd6190824-36b6-421b-9c48-ea9a2fd1a48a': {'signature_key': 'GS0gGJnia3bI01w0o4JbD3YMOvLXzzPqOyF4wD1lFwk=', 'payload_key': 'ZXquXtViWDhSGID7Ltufv40x3op-7T8dymOnnN3NAuE=', 'kid': 'd6190824-36b6-421b-9c48-ea9a2fd1a48a'}, '81239a5d-2ead-44ca-803a-a89fc6113b22': {'signature_key': 'MV8caU2wEw9SpudpKVXEVwiyei_2dpr3D4Va40ObsOk=', 'payload_key': 'G5dY4O038k9oKtCq1YPsYp37PZ1RBKLKKX31JXJKHL8=', 'kid': '81239a5d-2ead-44ca-803a-a89fc6113b22'}, 'b136e397-f3e9-4760-af51-270be165fe3a': {'signature_key': 'dKON8KVgtb1p4n2fe7TnXQ3-hlt85JO1uoZDZeVHt0w=', 'payload_key': 'WphxdwSWT-Oxso1MoC9zRnkUyc2zyf5kL6yVooco7ic=', 'kid': 'b136e397-f3e9-4760-af51-270be165fe3a'}} \ No newline at end of file diff --git a/pfunk/utils/publishing.py b/pfunk/utils/publishing.py index 9558a0e..6936280 100644 --- a/pfunk/utils/publishing.py +++ b/pfunk/utils/publishing.py @@ -33,15 +33,13 @@ def create_or_update_role(client, payload: dict = {}): Returns: query """ - try: response = client.query( q.create_role(payload) ) except BadRequest as err: - payload_copy = payload.copy() - role_name = payload_copy.pop("name") + role_name = payload_copy.pop("name") response = client.query( q.update( From 6764e5fede5987d684f110474d763a52ef4f1b7e Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 25 Oct 2022 15:39:40 +0800 Subject: [PATCH 185/214] Did refactoring for better readablity in roles. Applied with pep8 --- pfunk/contrib/auth/resources.py | 108 +++++++++++++----------- pfunk/resources.py | 17 ++-- pfunk/tests/test_user_subclass.py | 11 +-- pfunk/tests/test_user_subclass_m2m.py | 114 ++++++++++++++++++++++++++ pfunk/utils/publishing.py | 2 +- 5 files changed, 189 insertions(+), 63 deletions(-) create mode 100644 pfunk/tests/test_user_subclass_m2m.py diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index c315907..3f79774 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -34,10 +34,11 @@ def get_body(self): } ) ), - q.abort("Account is not active. Please check email for activation.") - ) - ) + q.abort( + "Account is not active. Please check email for activation.") ) + ) + ) ) @@ -61,9 +62,9 @@ def get_body(self): "credentials": {"password": q.select("new_password", q.var("input"))} }), q.abort("Wrong current password.") - ) - ) - ) + ) + ) + ) class CreateUser(AuthFunction): @@ -71,7 +72,8 @@ def get_body(self): data_dict = { "data": self.collection.get_fields(), "credentials": { - self.collection._credential_field: q.select(self.collection._credential_field, q.var("input")) + self.collection._credential_field: q.select( + self.collection._credential_field, q.var("input")) } } return q.query( @@ -79,7 +81,8 @@ def get_body(self): q.let( { 'result': q.create( - q.collection(self.collection.get_collection_name()), + q.collection( + self.collection.get_collection_name()), data_dict), 'input': q.var('input') }, @@ -90,20 +93,20 @@ def get_body(self): q.lambda_( 'group', q.create( - q.collection(self.collection._base_properties.get('groups').relation_name), + q.collection(self.collection._base_properties.get( + 'groups').relation_name), {'data': { 'userID': q.select('ref', q.var('result')), 'groupID': q.var('group') }} ) - ) - , + ), q.select('groups', q.var('input')) ), q.abort('Groups not defined.') ) - ) - )) + ) + )) class Public(Role): @@ -116,11 +119,11 @@ def get_function_lambda(self): q.lambda_(['data'], q.equals( q.select('account_status', q.select('data', - q.match(q.index('unique_User_username', + q.match(q.index(f'unique_{self.collection.__class__.__name__}_username', q.select('username', q.var('data')))))), "ACTIVE" - ) - )) + ) + )) def get_privileges(self): return [ @@ -173,30 +176,32 @@ def get_lambda(self, resource_type): class GenericAuthorizationRole(Role): def get_user_collection(self): - user_field = self.collection._base_properties.get("users") - if not user_field: - user_field = self.collection._base_properties.get("user") - return user_field - - def get_user_table(self): - user_field = self.get_user_collection() - if user_field: - return user_field.get_foreign_class().__name__ + """ Acquires User collection type """ + user_field = self.collection.get_user_field().lower() + col = self.collection._base_properties.get(user_field) + if col: + return col.get_foreign_class() else: return None + def get_user_table(self): + """ Acquires User's class name """ + col = self.get_user_collection() + if col: + return col.__name__ + return None + def get_relation_index_name(self): - self.current_user_field = self.collection.__class__.__name__.lower() - self.user_table = self.collection.__class__.__name__ - relation_index_name = (self.get_user_collection().__base_properties.get('groups').relation_name - + '_by_' - + self.collection.group_class.__name__.lower() - + '_' - + self.get_user_table()) + user_col = self.get_user_collection() + user_groups = user_col._base_properties.get("groups") + self.user_table = self.get_user_table().lower() + relation_index_name = (user_groups.relation_name + + '_by_' + + self.user_table) return relation_index_name def get_name_suffix(self): - return f'{self.get_user_table().lower()}_based_crud_role' + return f'{self.collection.get_user_field().lower()}_based_crud_role' def get_name(self): return self.name or f"{self.collection.get_class_name()}_{self.get_name_suffix()}" @@ -247,17 +252,20 @@ def get_privileges(self): class GenericUserBasedRole(GenericAuthorizationRole): - relation_index_name = 'users_groups_by_user' def get_relation_index_name(self): + """ Returns the user-group by user index name + + Formatted as: {user_group_relation_name}_by_{user_class} + """ # Acquires the `groups` field from the user collection - user_col = self.get_user_collection().get_foreign_class() + user_col = self.get_user_collection() user_groups = user_col._base_properties.get("groups") if user_groups: relation_index_name = (user_groups.relation_name - + '_by_' - + self.get_user_table().lower()) + + '_by_' + + self.get_user_table().lower()) return relation_index_name return None @@ -275,7 +283,8 @@ def get_lambda(self, resource_type): q.current_identity() ), q.equals( - q.select(current_user_field, q.select('data', q.var('new_object'))), + q.select(current_user_field, q.select( + 'data', q.var('new_object'))), q.current_identity() ) ) @@ -317,20 +326,19 @@ def get_name_suffix(self): def get_relation_index_name(self): user_col = self.get_user_collection().get_foreign_class() user_groups = user_col._base_properties.get("groups") - + if user_groups: # TODO: be able to return `_by_` .e.g. `users_groups_by_user` relation_index_name = (user_groups.relation_name - + '_by_' - + self.collection.group_class.__name__.lower() - + '_' - + self.get_user_table().lower()) + + '_by_' + + self.collection.group_class.__name__.lower() + + '_' + + self.get_user_table().lower()) return relation_index_name return None def get_lambda(self, resource_type): current_group_field = self.collection.get_group_field() - print(f'\n\nCURRENT GROUP FIELD: {current_group_field}\n\n') perm = f'{self.collection.get_collection_name()}-{resource_type}'.lower() if resource_type == 'write': group_ref = q.select(current_group_field, @@ -346,7 +354,8 @@ def get_lambda(self, resource_type): q.select(self.permissions_field, q.get( q.match( - q.index(self.get_relation_index_name()), + q.index( + self.get_relation_index_name()), group_ref, q.current_identity() ) @@ -354,8 +363,10 @@ def get_lambda(self, resource_type): perm ), q.equals( - q.select(current_group_field, q.select('data', q.var('old_object'))), - q.select(current_group_field, q.select('data', q.var('new_object'))), + q.select(current_group_field, q.select( + 'data', q.var('old_object'))), + q.select(current_group_field, q.select( + 'data', q.var('new_object'))), ) ) ) @@ -377,7 +388,8 @@ def get_lambda(self, resource_type): q.select(self.permissions_field, q.select("data", q.get(q.match( - q.index(self.get_relation_index_name()), + q.index( + self.get_relation_index_name()), group_ref, q.current_identity() )))))), diff --git a/pfunk/resources.py b/pfunk/resources.py index 18b4687..74786e2 100644 --- a/pfunk/resources.py +++ b/pfunk/resources.py @@ -128,10 +128,11 @@ def get_membership_lambda(self): return q.query( q.lambda_(['object_ref'], q.equals( - q.select('account_status', q.select('data', q.get(q.var('object_ref')))), + q.select('account_status', q.select( + 'data', q.get(q.var('object_ref')))), "ACTIVE" - ) - )) + ) + )) def get_user_table(self): """ Acquires user table from the class name """ @@ -175,7 +176,8 @@ class Index(object): serialized: bool = None terms: list = None values: list = None - _accept_kwargs: list = ['name', 'source', 'unique', 'serialized', 'terms', 'values'] + _accept_kwargs: list = ['name', 'source', + 'unique', 'serialized', 'terms', 'values'] def __init__(self, **kwargs): """ @@ -248,9 +250,10 @@ def get_body(self): q.get(q.var('ref')) ), q.paginate( - q.match(q.index(self.collection.all_index_name())), + q.match( + q.index(self.collection.all_index_name())), q.select('size', q.var('input')) ) - ) - ) + ) + ) ) diff --git a/pfunk/tests/test_user_subclass.py b/pfunk/tests/test_user_subclass.py index 43cd47f..ae37c1f 100644 --- a/pfunk/tests/test_user_subclass.py +++ b/pfunk/tests/test_user_subclass.py @@ -37,7 +37,7 @@ class Blog(Collection): title = StringField(required=True) content = StringField(required=True) user = ReferenceField('pfunk.tests.test_user_subclass.Newuser', - relation_name='users_blogs') + relation_name='users_blogs') def __unicode__(self): return self.title @@ -56,9 +56,6 @@ def setUp(self) -> None: self.blog = Blog.create( title='test_blog', content='test content', user=self.user, token=self.secret) self.token, self.exp = Newuser.api_login("test", "abc123") - # p(f'@Test Setup: Blog Created: {self.blog.__dict__}\n') - # p(f'@Test Setup: User Created: {self.user.__dict__}') - def test_read(self): res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', @@ -93,9 +90,9 @@ def test_update(self): house.address for house in Blog.all()]) res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', json={ - "title": "updated blog", - "content": "I updated my blog.", - "user": self.user.ref.id()}, + "title": "updated blog", + "content": "I updated my blog.", + "user": self.user.ref.id()}, headers={ "Authorization": self.token}) diff --git a/pfunk/tests/test_user_subclass_m2m.py b/pfunk/tests/test_user_subclass_m2m.py new file mode 100644 index 0000000..6fff9c3 --- /dev/null +++ b/pfunk/tests/test_user_subclass_m2m.py @@ -0,0 +1,114 @@ +# test_dev.py - a placeholder test for fixing User - Group circular import errors + +import os +from valley.utils import import_util +from pprint import pprint as p + +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser +from pfunk.testcase import APITestCase +from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField +from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField +from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole + + +class UserGroups(Collection): + collection_name = 'users_groups' + userID = ReferenceField('pfunk.tests.test_user_subclass.Newuser') + groupID = ReferenceField('pfunk.tests.test_user_subclass.Newgroup') + permissions = ListField() + + +class Newgroup(BaseGroup): + users = ManyToManyField('pfunk.tests.test_user_subclass.Newuser', + relation_name='custom_users_groups') + + +class Newuser(ExtendedUser): + user_group_class = import_util('pfunk.tests.test_user_subclass.UserGroups') + group_class = import_util('pfunk.tests.test_user_subclass.Newgroup') + groups = ManyToManyField( + 'pfunk.tests.test_user_subclass.Newgroup', relation_name='custom_users_groups') + blogs = ManyToManyField('pfunk.tests.test_user_subclass.Blog', + relation_name='users_blogs') + + +class Blog(Collection): + collection_roles = [GenericUserBasedRole] + title = StringField(required=True) + content = StringField(required=True) + users = ManyToManyField('pfunk.tests.test_user_subclass.Newuser', + relation_name='users_blogs') + + def __unicode__(self): + return self.title + + +# Test case to see if user-group is working +class TestUserGroupError(APITestCase): + collections = [Newuser, Newgroup, UserGroups, Blog] + + def setUp(self) -> None: + super().setUp() + self.group = Newgroup.create(name='Power Users', slug='power-users') + self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.group]) + self.blog = Blog.create( + title='test_blog', content='test content', users=[self.user], token=self.secret) + self.token, self.exp = Newuser.api_login("test", "abc123") + # p(f'@Test Setup: Blog Created: {self.blog.__dict__}\n') + # p(f'@Test Setup: User Created: {self.user.__dict__}') + + + def test_read(self): + res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token}) + print(f'\n\nRESPONSE: {res.json}\n\n') + self.assertTrue(res.status_code, 200) + self.assertEqual("test_blog", res.json['data']['data']['title']) + + # def test_read_all(self): + # res = self.c.get(f'/json/blog/list/', + # headers={ + # "Authorization": self.token}) + # self.assertTrue(res.status_code, 200) + + # def test_create(self): + # self.assertNotIn("new blog", [ + # blog.title for blog in Blog.all()]) + # res = self.c.post('/json/blog/create/', + # json={ + # "title": "new blog", + # "content": "I created a new blog.", + # "user": self.user.ref.id()}, + # headers={ + # "Authorization": self.token}) + + # self.assertTrue(res.status_code, 200) + # self.assertIn("new blog", [ + # blog.title for blog in Blog.all()]) + + # def test_update(self): + # self.assertNotIn("the updated street somewhere", [ + # house.address for house in Blog.all()]) + # res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', + # json={ + # "title": "updated blog", + # "content": "I updated my blog.", + # "user": self.user.ref.id()}, + # headers={ + # "Authorization": self.token}) + + # self.assertTrue(res.status_code, 200) + # self.assertIn("updated blog", [ + # blog.title for blog in Blog.all()]) + + # def test_delete(self): + # res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', + # headers={ + # "Authorization": self.token, + # "Content-Type": "application/json" + # }) + + # self.assertTrue(res.status_code, 200) diff --git a/pfunk/utils/publishing.py b/pfunk/utils/publishing.py index 6936280..f98efe4 100644 --- a/pfunk/utils/publishing.py +++ b/pfunk/utils/publishing.py @@ -39,7 +39,7 @@ def create_or_update_role(client, payload: dict = {}): ) except BadRequest as err: payload_copy = payload.copy() - role_name = payload_copy.pop("name") + role_name = payload_copy.pop("name") response = client.query( q.update( From 02843fc499884c3a6e8adf9cb1a2e908730673be Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 26 Oct 2022 15:50:44 +0800 Subject: [PATCH 186/214] Fixed GroupBasedPermission to properly use functions and dynamic sytaxes --- pfunk/collection.py | 2 ++ pfunk/contrib/auth/resources.py | 40 +++++++++++++++++++-------- pfunk/tests/test_user_subclass_m2m.py | 1 + 3 files changed, 32 insertions(+), 11 deletions(-) diff --git a/pfunk/collection.py b/pfunk/collection.py index 5d0a7d2..171a1b9 100644 --- a/pfunk/collection.py +++ b/pfunk/collection.py @@ -137,6 +137,7 @@ def get_user_field(self) -> str: for k, v in fields: if user_class in v.get_graphql_type(): user_field = k + break return user_field def get_group_field(self) -> str: @@ -152,6 +153,7 @@ def get_group_field(self) -> str: for k, v in fields: if group_class in v.get_graphql_type(): group_field = k + break return group_field def get_collection_name(self) -> str: diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 3f79774..f745cc1 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -1,3 +1,4 @@ +from tokenize import group from pfunk.client import q from pfunk.resources import Function, Role @@ -184,6 +185,17 @@ def get_user_collection(self): else: return None + def get_group_collection(self): + """ Acquires Group collection type from User's fields """ + user_col = self.get_user_collection() + col = user_col() + group_field = col.get_group_field() + user_groups = user_col._base_properties.get(group_field) + if user_groups: + return user_groups.get_foreign_class() + else: + return None + def get_user_table(self): """ Acquires User's class name """ col = self.get_user_collection() @@ -191,9 +203,17 @@ def get_user_table(self): return col.__name__ return None + def get_group_table(self): + """ Acquires group class name from the user's fields """ + group_col = self.get_group_collection() + if group_col: + return group_col.__name__ + return None + def get_relation_index_name(self): user_col = self.get_user_collection() - user_groups = user_col._base_properties.get("groups") + group_field = user_col.get_group_field() + user_groups = user_col._base_properties.get(group_field) self.user_table = self.get_user_table().lower() relation_index_name = (user_groups.relation_name + '_by_' @@ -319,26 +339,24 @@ class GenericGroupBasedRole(GenericAuthorizationRole): name_suffix = 'group_based_crud_role' def get_name_suffix(self): - """ """ - # TODO: Return `group_based_crud_role` with dynamic group name class - pass + return f'{self.get_group_table().lower()}_based_crud_role' def get_relation_index_name(self): - user_col = self.get_user_collection().get_foreign_class() + """ Returns the index name of the m2m index of group and user e.g. 'users_groups_by_group_and_user' """ + user_col = self.get_user_collection() user_groups = user_col._base_properties.get("groups") - - if user_groups: - # TODO: be able to return `_by_` .e.g. `users_groups_by_user` + group_table = self.get_group_table().lower() + if group_table: relation_index_name = (user_groups.relation_name + '_by_' - + self.collection.group_class.__name__.lower() - + '_' + + group_table + + '_and_' + self.get_user_table().lower()) return relation_index_name return None def get_lambda(self, resource_type): - current_group_field = self.collection.get_group_field() + current_group_field = self.get_group_table() perm = f'{self.collection.get_collection_name()}-{resource_type}'.lower() if resource_type == 'write': group_ref = q.select(current_group_field, diff --git a/pfunk/tests/test_user_subclass_m2m.py b/pfunk/tests/test_user_subclass_m2m.py index 6fff9c3..8516db2 100644 --- a/pfunk/tests/test_user_subclass_m2m.py +++ b/pfunk/tests/test_user_subclass_m2m.py @@ -53,6 +53,7 @@ def setUp(self) -> None: self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.group]) + p(f'@Test Setup: User Created: {self.user.__dict__}') self.blog = Blog.create( title='test_blog', content='test content', users=[self.user], token=self.secret) self.token, self.exp = Newuser.api_login("test", "abc123") From 7d7ce57d2eb210a7c5f1edbb4cade1f5b69ad4a1 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 28 Oct 2022 13:59:15 +0800 Subject: [PATCH 187/214] fixed permission adding group based perm --- pfunk/contrib/auth/collections.py | 104 +++++++++++++----------- pfunk/contrib/auth/resources.py | 105 ++++++++++++++++++++---- pfunk/tests/test_group_subclass.py | 110 ++++++++++++++++++++++++++ pfunk/tests/test_user_subclass_m2m.py | 11 +-- 4 files changed, 265 insertions(+), 65 deletions(-) create mode 100644 pfunk/tests/test_group_subclass.py diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index 254df59..3b78916 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -9,6 +9,7 @@ from pfunk import ReferenceField from pfunk.client import q from pfunk.collection import Collection, Enum +from pfunk.resources import Index from pfunk.contrib.auth.key import Key from pfunk.contrib.auth.resources import LoginUser, UpdatePassword, Public, UserRole, LogoutUser from pfunk.contrib.auth.views import ForgotPasswordChangeView, LoginView, SignUpView, VerifyEmailView, LogoutView, \ @@ -29,6 +30,50 @@ def __unicode__(self): return self.name # pragma: no cover +class UserGroupByUserAndGroupIndex(Index): + name = 'usergroups_by_userID_and_groupID' + source = 'Usergroups' + terms = [ + {'field': ['data', 'userID']}, + {'field': ['data', 'groupID']} + ] + values = [ + {'field': ['ref']} + ] + + +class UserGroups(Collection): + """ Many-to-many collection of the user-group relationship + + The native fauna-way of holding many-to-many relationship + is to only have the ID of the 2 object. Here in pfunk, we + leverage the flexibility of the collection to have another + field, which is `permissions`, this field holds the capablities + of a user, allowing us to add easier permission handling. + Instead of manually going to roles and adding individual + collections which can be painful in long term. + + Attributes: + collection_name (str): + Name of the collection in Fauna + userID (str): + Fauna ref of user that is tied to the group + groupID (str): + Fauna ref of a collection that is tied with the user + permissions (str[]): + List of permissions, `['create', 'read', 'delete', 'write']` + """ + collection_indexes = [UserGroupByUserAndGroupIndex] + userID = ReferenceField( + env('USER_COLLECTION_DIR', 'pfunk.contrib.auth.collections.User')) + groupID = ReferenceField( + env('GROUP_COLLECTION_DIR', 'pfunk.contrib.auth.collections.Group')) + permissions = ListField() + + def __unicode__(self): + return f"{self.userID}, {self.groupID}, {self.permissions}" + + AccountStatus = Enum(name='AccountStatus', choices=['ACTIVE', 'INACTIVE']) @@ -273,6 +318,7 @@ class ExtendedUser(BaseUser): Provides base methods for group-user permissions. If there are no supplied `groups` property, will raise `NotImplementedErrror` """ + user_group_class = import_util('pfunk.contrib.auth.collections.UserGroups') @classmethod def get_permissions(cls, ref, _token=None): @@ -290,7 +336,7 @@ def get_groups(self, _token=None): index_name = f'{relation_name}_by_{user_class}' return [self.group_class.get(i.id(), _token=_token) for i in self.client(_token=_token).query( - q.paginate(q.match(index_name, self.ref)) + q.paginate(q.match(index_name, self.ref)) ).get('data')] def permissions(self, _token=None): @@ -307,21 +353,18 @@ def permissions(self, _token=None): perm_list (str[]): Permissions of the user in list: `['create', 'read', 'delete', 'write']` """ - user_class = self.__class__.__name__.lower() - group_class = self.group_class.__name__.lower() - relation_name = self._base_properties.get("groups").relation_name - index_name = f'{user_class}s_{group_class}s_by_{group_class}_and_{user_class}' - if relation_name: - index_name = f'{relation_name}_by_{group_class}_and_{user_class}' + + index_name = 'usergroups_by_userID_and_groupID' perm_list = [] for i in self.get_groups(_token=_token): ug = self.user_group_class.get_index(index_name, [ - i.ref, self.ref], _token=_token) + self.ref, i.ref], _token=_token) for user_group in ug: + print(f'\n\n@contrib auth: USER GROUP: {user_group}\n\n') p = [] if isinstance(user_group.permissions, list): p = [ - f'{user_group.groupID.slug}-{i}' for i in user_group.permissions] + f'{user_group.groupID}-{i}' for i in user_group.permissions] perm_list.extend(p) return perm_list @@ -347,54 +390,24 @@ def add_permissions(self, group, permissions: list, _token=None): of the user """ perm_list = [] - for i in permissions: - perm_list.extend(i.permissions) + index_name = 'usergroups_by_userID_and_groupID' + for i in permissions: + perm_list.append(i) if not self.user_group_class: raise NotImplementedError try: user_group = self.user_group_class.get_by( - 'users_groups_by_group_and_user', terms=[group.ref, self.ref]) + index_name, terms=[self.ref, group.ref]) except DocNotFound: user_group = self.user_group_class.create( - userID=self.ref, groupID=group.ref, permissions=perm_list) + userID=self, groupID=group, permissions=perm_list, _token=_token) if user_group.permissions != perm_list: user_group.permissions = perm_list user_group.save() - return user_group - - -class UserGroups(Collection): - """ Many-to-many collection of the user-group relationship - The native fauna-way of holding many-to-many relationship - is to only have the ID of the 2 object. Here in pfunk, we - leverage the flexibility of the collection to have another - field, which is `permissions`, this field holds the capablities - of a user, allowing us to add easier permission handling. - Instead of manually going to roles and adding individual - collections which can be painful in long term. - - Attributes: - collection_name (str): - Name of the collection in Fauna - userID (str): - Fauna ref of user that is tied to the group - groupID (str): - Fauna ref of a collection that is tied with the user - permissions (str[]): - List of permissions, `['create', 'read', 'delete', 'write']` - """ - collection_name = 'users_groups' - userID = ReferenceField( - env('USER_COLLECTION', 'pfunk.contrib.auth.collections.User')) - groupID = ReferenceField( - env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group')) - permissions = ListField() - - def __unicode__(self): - return f"{self.userID}, {self.groupID}, {self.permissions}" + return user_group class Group(BaseGroup): @@ -405,7 +418,6 @@ class Group(BaseGroup): class User(ExtendedUser): """ A default user that already has predefined M2M relationship with `pfunk.contrib.auth.collections.Group` """ - user_group_class = import_util('pfunk.contrib.auth.collections.UserGroups') group_class = import_util('pfunk.contrib.auth.collections.Group') groups = ManyToManyField( 'pfunk.contrib.auth.collections.Group', 'users_groups') diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index f745cc1..1cd16ad 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -1,7 +1,12 @@ from tokenize import group +from envs import env + from pfunk.client import q from pfunk.resources import Function, Role +# Global collections +USER_CLASS = env('USER_COLLECTION', 'User') +GROUP_CLASS = env('GROUP_COLLECTION', 'Group') class AuthFunction(Function): @@ -178,7 +183,9 @@ class GenericAuthorizationRole(Role): def get_user_collection(self): """ Acquires User collection type """ - user_field = self.collection.get_user_field().lower() + user_field = self.collection.get_user_field() + if user_field: + user_field = user_field.lower() col = self.collection._base_properties.get(user_field) if col: return col.get_foreign_class() @@ -272,6 +279,7 @@ def get_privileges(self): class GenericUserBasedRole(GenericAuthorizationRole): + """ Generic set of permissions for entity to user relationship """ def get_relation_index_name(self): """ Returns the user-group by user index name @@ -331,32 +339,37 @@ def get_lambda(self, resource_type): class GenericGroupBasedRole(GenericAuthorizationRole): - relation_index_name = 'users_groups_by_group_and_user' - through_user_field = 'userID' - current_group_field = 'group' permissions_field = 'permissions' - user_table = 'User' - name_suffix = 'group_based_crud_role' + user_table = USER_CLASS + group_table = GROUP_CLASS + through_user_field = USER_CLASS.lower() + 'ID' def get_name_suffix(self): - return f'{self.get_group_table().lower()}_based_crud_role' + return f'{self.group_table.lower()}_based_crud_role' def get_relation_index_name(self): """ Returns the index name of the m2m index of group and user e.g. 'users_groups_by_group_and_user' """ - user_col = self.get_user_collection() - user_groups = user_col._base_properties.get("groups") - group_table = self.get_group_table().lower() - if group_table: + group_field = self.collection.get_group_field() + group_col = self.collection._base_properties.get(group_field).get_foreign_class() + group_user_field = group_col().get_user_field() + user_groups = group_col._base_properties.get(group_user_field) + if self.group_table: relation_index_name = (user_groups.relation_name + '_by_' - + group_table + + self.group_table.lower() + '_and_' - + self.get_user_table().lower()) + + self.user_table.lower()) return relation_index_name return None def get_lambda(self, resource_type): - current_group_field = self.get_group_table() + """ Returns the lambda function for giving the permission to Group-based entities + + Allows modification if: + 1. You belong to the group that owns the document + 2. You have the create permission to perform the action (create, read, write, and delete) + """ + current_group_field = self.collection.get_group_field().lower() perm = f'{self.collection.get_collection_name()}-{resource_type}'.lower() if resource_type == 'write': group_ref = q.select(current_group_field, @@ -415,3 +428,67 @@ def get_lambda(self, resource_type): ) ) ) + + +# class GenericUserBasedRoleM2M(GenericAuthorizationRole): +# """ Generic set of permissions for many-to-many entity to user relationship """ + +# def get_name_suffix(self): +# # TODO: return suffix: +# return f'{self.get_group_table().lower()}_based_crud_role' + +# def get_relation_index_name(self): +# # TODO: return index name: `users_blogs_by_blog_and_newuser` +# """ Returns the index name of the m2m index of group and user e.g. 'users_groups_by_group_and_user' """ +# user_col = self.get_user_collection() +# user_groups = user_col._base_properties.get("groups") +# group_table = self.get_group_table().lower() +# if group_table: +# relation_index_name = (user_groups.relation_name +# + '_by_' +# + group_table +# + '_and_' +# + self.get_user_table().lower()) +# return relation_index_name +# return None + +# def get_lambda(self, resource_type): +# # TODO: refactor to look for the M2M index and see if the user has permission for the entity +# current_user_field = self.collection.get_user_field() +# if resource_type == 'write': +# lambda_args = ["old_object", "new_object", "object_ref"] +# user_ref = q.select(current_user_field, +# q.select('data', q.var('old_object'))) +# return q.query( +# q.lambda_(lambda_args, +# q.and_( +# q.equals( +# user_ref, +# q.current_identity() +# ), +# q.equals( +# q.select(current_user_field, q.select( +# 'data', q.var('new_object'))), +# q.current_identity() +# ) +# ) + +# ) +# ) +# elif resource_type == 'create': +# lambda_args = ["new_object"] +# user_ref = q.select(current_user_field, +# q.select('data', q.var('new_object'))) +# elif resource_type == 'read' or resource_type == 'delete': +# lambda_args = ["object_ref"] +# user_ref = q.select(current_user_field, +# q.select('data', q.get(q.var('object_ref')))) + +# return q.query( +# q.lambda_(lambda_args, +# q.equals( +# user_ref, +# q.current_identity() +# ) +# ) +# ) diff --git a/pfunk/tests/test_group_subclass.py b/pfunk/tests/test_group_subclass.py new file mode 100644 index 0000000..45a5394 --- /dev/null +++ b/pfunk/tests/test_group_subclass.py @@ -0,0 +1,110 @@ +# test_dev.py - a placeholder test for fixing User - Group circular import errors + +import os +from valley.utils import import_util +from pprint import pprint as p + +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, UserGroups +from pfunk.testcase import APITestCase +from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField +from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField +from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole + + +class Newgroup(BaseGroup): + users = ManyToManyField('pfunk.tests.test_group_subclass.Newuser', + relation_name='custom_users_groups') + + +class Newuser(ExtendedUser): + group_class = import_util('pfunk.tests.test_group_subclass.Newgroup') + groups = ManyToManyField( + 'pfunk.tests.test_group_subclass.Newgroup', relation_name='custom_users_groups') + + +class Blog(Collection): + collection_roles = [GenericGroupBasedRole] + title = StringField(required=True) + content = StringField(required=True) + group = ReferenceField('pfunk.tests.test_group_subclass.Newgroup', + relation_name='newgroup_blogs') + + def __unicode__(self): + return self.title + + +# Test case to see if user-group is working +class TestUserGroupError(APITestCase): + collections = [Newuser, Newgroup, UserGroups, Blog] + + def setUp(self) -> None: + super().setUp() + self.group = Newgroup.create(name='Power Users', slug='power-users') + self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.group]) + print(f'\n\nALL INDEXES: {self.project.indexes}\n\n') + perms = self.user.add_permissions(self.group, ['create', 'read', 'write', 'delete']) + + p(f'\n\nest setup: Added User permissions: {perms}\n\n') + p(f'@test setup: User permissions: {self.user.permissions()}') + p(f'@Test Setup: User Created: {self.user.__dict__}') + self.blog = Blog.create( + title='test_blog', content='test content', group=self.group, token=self.secret) + self.token, self.exp = Newuser.api_login("test", "abc123") + # p(f'@Test Setup: Blog Created: {self.blog.__dict__}\n') + # p(f'@Test Setup: User Created: {self.user.__dict__}') + + + def test_read(self): + res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token}) + print(f'\n\nRESPONSE: {res.json}\n\n') + self.assertTrue(res.status_code, 200) + self.assertEqual("test_blog", res.json['data']['data']['title']) + + # def test_read_all(self): + # res = self.c.get(f'/json/blog/list/', + # headers={ + # "Authorization": self.token}) + # self.assertTrue(res.status_code, 200) + + # def test_create(self): + # self.assertNotIn("new blog", [ + # blog.title for blog in Blog.all()]) + # res = self.c.post('/json/blog/create/', + # json={ + # "title": "new blog", + # "content": "I created a new blog.", + # "user": self.user.ref.id()}, + # headers={ + # "Authorization": self.token}) + + # self.assertTrue(res.status_code, 200) + # self.assertIn("new blog", [ + # blog.title for blog in Blog.all()]) + + # def test_update(self): + # self.assertNotIn("the updated street somewhere", [ + # house.address for house in Blog.all()]) + # res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', + # json={ + # "title": "updated blog", + # "content": "I updated my blog.", + # "user": self.user.ref.id()}, + # headers={ + # "Authorization": self.token}) + + # self.assertTrue(res.status_code, 200) + # self.assertIn("updated blog", [ + # blog.title for blog in Blog.all()]) + + # def test_delete(self): + # res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', + # headers={ + # "Authorization": self.token, + # "Content-Type": "application/json" + # }) + + # self.assertTrue(res.status_code, 200) diff --git a/pfunk/tests/test_user_subclass_m2m.py b/pfunk/tests/test_user_subclass_m2m.py index 8516db2..a3d4972 100644 --- a/pfunk/tests/test_user_subclass_m2m.py +++ b/pfunk/tests/test_user_subclass_m2m.py @@ -21,6 +21,9 @@ class UserGroups(Collection): class Newgroup(BaseGroup): users = ManyToManyField('pfunk.tests.test_user_subclass.Newuser', relation_name='custom_users_groups') + blogs = ReferenceField('pfunk.tests.test_user_subclass.Blog', + relation_name='newgroup_blogs') + class Newuser(ExtendedUser): @@ -28,16 +31,14 @@ class Newuser(ExtendedUser): group_class = import_util('pfunk.tests.test_user_subclass.Newgroup') groups = ManyToManyField( 'pfunk.tests.test_user_subclass.Newgroup', relation_name='custom_users_groups') - blogs = ManyToManyField('pfunk.tests.test_user_subclass.Blog', - relation_name='users_blogs') class Blog(Collection): collection_roles = [GenericUserBasedRole] title = StringField(required=True) content = StringField(required=True) - users = ManyToManyField('pfunk.tests.test_user_subclass.Newuser', - relation_name='users_blogs') + group = ReferenceField('pfunk.tests.test_user_subclass.Newgroup', + relation_name='newgroup_blogs') def __unicode__(self): return self.title @@ -55,7 +56,7 @@ def setUp(self) -> None: groups=[self.group]) p(f'@Test Setup: User Created: {self.user.__dict__}') self.blog = Blog.create( - title='test_blog', content='test content', users=[self.user], token=self.secret) + title='test_blog', content='test content', group=[self.group], token=self.secret) self.token, self.exp = Newuser.api_login("test", "abc123") # p(f'@Test Setup: Blog Created: {self.blog.__dict__}\n') # p(f'@Test Setup: User Created: {self.user.__dict__}') From dd4a25a6873b594750425ea1d51035bb6e6ceb5d Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 2 Nov 2022 15:11:31 +0800 Subject: [PATCH 188/214] Added priority publish for user-group-usergroup collections to avoid undefined index and permission publish --- pfunk/project.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/pfunk/project.py b/pfunk/project.py index 4c987fe..528caee 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -17,6 +17,7 @@ from pfunk.web.request import HTTPRequest, RESTRequest, WSGIRequest from pfunk.web.response import HttpNotFoundResponse, JSONMethodNotAllowedResponse +from .contrib.auth.collections import User, Group, UserGroups, BaseGroup, BaseUser, ExtendedUser from .collection import Collection from .fields import ForeignList from .template import graphql_template @@ -217,7 +218,19 @@ def publish(self, mode: str = 'merge') -> int: print('----------------------------------------') print(resp.content) return - for col in set(self.collections): + + collections = set(self.collections) + # make publishing prioritize User, Group and UserGroups + for col in collections.copy(): + if (issubclass(col, User) + or issubclass(col, Group) + or issubclass(col, BaseGroup) + or issubclass(col, ExtendedUser) + or issubclass(col, BaseUser) + or issubclass(col, UserGroups)): + col.publish() + collections.remove(col) + for col in collections: col.publish() return resp.status_code From 92a65637b268d3df4c481c6eaf1a61ff675afadc Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 7 Nov 2022 10:24:20 +0800 Subject: [PATCH 189/214] first stab at generic group based refactors --- pfunk/contrib/auth/collections.py | 5 ++-- pfunk/contrib/auth/resources.py | 42 ++++++++++-------------------- pfunk/tests/test_group_subclass.py | 10 +++---- 3 files changed, 21 insertions(+), 36 deletions(-) diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index 3b78916..ba93f10 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -360,11 +360,10 @@ def permissions(self, _token=None): ug = self.user_group_class.get_index(index_name, [ self.ref, i.ref], _token=_token) for user_group in ug: - print(f'\n\n@contrib auth: USER GROUP: {user_group}\n\n') p = [] if isinstance(user_group.permissions, list): p = [ - f'{user_group.groupID}-{i}' for i in user_group.permissions] + f'{user_group.groupID.slug}-{i}' for i in user_group.permissions] perm_list.extend(p) return perm_list @@ -402,7 +401,7 @@ def add_permissions(self, group, permissions: list, _token=None): index_name, terms=[self.ref, group.ref]) except DocNotFound: user_group = self.user_group_class.create( - userID=self, groupID=group, permissions=perm_list, _token=_token) + userID=self.ref, groupID=group.ref, permissions=perm_list, _token=_token) if user_group.permissions != perm_list: user_group.permissions = perm_list user_group.save() diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 1cd16ad..6240904 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -218,14 +218,8 @@ def get_group_table(self): return None def get_relation_index_name(self): - user_col = self.get_user_collection() - group_field = user_col.get_group_field() - user_groups = user_col._base_properties.get(group_field) - self.user_table = self.get_user_table().lower() - relation_index_name = (user_groups.relation_name - + '_by_' - + self.user_table) - return relation_index_name + """ Returns the index name of the created permission index of group and user -> 'usergroups_by_userID_and_groupID' """ + return 'usergroups_by_userID_and_groupID' def get_name_suffix(self): return f'{self.collection.get_user_field().lower()}_based_crud_role' @@ -347,21 +341,6 @@ class GenericGroupBasedRole(GenericAuthorizationRole): def get_name_suffix(self): return f'{self.group_table.lower()}_based_crud_role' - def get_relation_index_name(self): - """ Returns the index name of the m2m index of group and user e.g. 'users_groups_by_group_and_user' """ - group_field = self.collection.get_group_field() - group_col = self.collection._base_properties.get(group_field).get_foreign_class() - group_user_field = group_col().get_user_field() - user_groups = group_col._base_properties.get(group_user_field) - if self.group_table: - relation_index_name = (user_groups.relation_name - + '_by_' - + self.group_table.lower() - + '_and_' - + self.user_table.lower()) - return relation_index_name - return None - def get_lambda(self, resource_type): """ Returns the lambda function for giving the permission to Group-based entities @@ -370,7 +349,10 @@ def get_lambda(self, resource_type): 2. You have the create permission to perform the action (create, read, write, and delete) """ current_group_field = self.collection.get_group_field().lower() + # group_slug = self.collection. + # TODO: perm won't match with the entity that is being queried perm = f'{self.collection.get_collection_name()}-{resource_type}'.lower() + if resource_type == 'write': group_ref = q.select(current_group_field, q.select('data', q.var('old_object'))) @@ -383,14 +365,15 @@ def get_lambda(self, resource_type): # User ID from index q.select(0, q.filter_(lambda i: q.equals(perm, i), q.select(self.permissions_field, + q.select("data", q.get( q.match( q.index( self.get_relation_index_name()), - group_ref, - q.current_identity() + q.current_identity(), + group_ref ) - )))), + ))))), perm ), q.equals( @@ -415,14 +398,17 @@ def get_lambda(self, resource_type): q.lambda_( lambda_args, q.equals( + # NOTE: After acquiring the instance of `UserGroup`, filter the result: permission field + # that matches the `perm` variable AND then see if that is equals to `perm` var + # IMPORTANT: by using this, it will easily filter permissions available, and if there were none, then it is automatically false q.select(0, q.filter_(lambda i: q.equals(perm, i), q.select(self.permissions_field, q.select("data", q.get(q.match( q.index( self.get_relation_index_name()), - group_ref, - q.current_identity() + q.current_identity(), + group_ref )))))), perm ) diff --git a/pfunk/tests/test_group_subclass.py b/pfunk/tests/test_group_subclass.py index 45a5394..a0792cd 100644 --- a/pfunk/tests/test_group_subclass.py +++ b/pfunk/tests/test_group_subclass.py @@ -27,7 +27,7 @@ class Blog(Collection): title = StringField(required=True) content = StringField(required=True) group = ReferenceField('pfunk.tests.test_group_subclass.Newgroup', - relation_name='newgroup_blogs') + relation_name='newgroup_blogs') def __unicode__(self): return self.title @@ -44,18 +44,18 @@ def setUp(self) -> None: last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.group]) print(f'\n\nALL INDEXES: {self.project.indexes}\n\n') - perms = self.user.add_permissions(self.group, ['create', 'read', 'write', 'delete']) - + perms = self.user.add_permissions( + self.group, ['create', 'read', 'write', 'delete']) + p(f'\n\nest setup: Added User permissions: {perms}\n\n') p(f'@test setup: User permissions: {self.user.permissions()}') p(f'@Test Setup: User Created: {self.user.__dict__}') self.blog = Blog.create( - title='test_blog', content='test content', group=self.group, token=self.secret) + title='test_blog', content='test content', group=self.group) self.token, self.exp = Newuser.api_login("test", "abc123") # p(f'@Test Setup: Blog Created: {self.blog.__dict__}\n') # p(f'@Test Setup: User Created: {self.user.__dict__}') - def test_read(self): res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', headers={ From a9d5dbbcd9309d1a503b1bf81094b5dd99c617d5 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 22 Nov 2022 16:27:19 +0800 Subject: [PATCH 190/214] Fixed GenericGroupBasedPerms not properly acquiring user table in membership role --- pfunk/contrib/auth/collections.py | 3 +- pfunk/contrib/auth/resources.py | 12 +++--- pfunk/tests/test_group_subclass.py | 59 +++++++++++++----------------- pfunk/tests/test_user_subclass.py | 9 +---- 4 files changed, 35 insertions(+), 48 deletions(-) diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index ba93f10..2d64d5f 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -328,9 +328,10 @@ def get_groups(self, _token=None): """ Returns the groups (collections) that the user is bound with """ if not self.group_class: raise NotImplementedError + group_class_field = self.get_group_field() user_class = self.__class__.__name__.lower() group_class = self.group_class.__name__.lower() - relation_name = self._base_properties.get("groups").relation_name + relation_name = self._base_properties.get(group_class_field).relation_name index_name = f'{user_class}s_{group_class}s_by_{user_class}' if relation_name: index_name = f'{relation_name}_by_{user_class}' diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 6240904..3888371 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -8,6 +8,7 @@ USER_CLASS = env('USER_COLLECTION', 'User') GROUP_CLASS = env('GROUP_COLLECTION', 'Group') + class AuthFunction(Function): def get_role(self): @@ -340,6 +341,9 @@ class GenericGroupBasedRole(GenericAuthorizationRole): def get_name_suffix(self): return f'{self.group_table.lower()}_based_crud_role' + + def get_user_table(self): + return USER_CLASS def get_lambda(self, resource_type): """ Returns the lambda function for giving the permission to Group-based entities @@ -349,9 +353,7 @@ def get_lambda(self, resource_type): 2. You have the create permission to perform the action (create, read, write, and delete) """ current_group_field = self.collection.get_group_field().lower() - # group_slug = self.collection. - # TODO: perm won't match with the entity that is being queried - perm = f'{self.collection.get_collection_name()}-{resource_type}'.lower() + perm = f'{resource_type}'.lower() if resource_type == 'write': group_ref = q.select(current_group_field, @@ -373,7 +375,7 @@ def get_lambda(self, resource_type): q.current_identity(), group_ref ) - ))))), + ))))), perm ), q.equals( @@ -420,7 +422,7 @@ def get_lambda(self, resource_type): # """ Generic set of permissions for many-to-many entity to user relationship """ # def get_name_suffix(self): -# # TODO: return suffix: +# # TODO: return suffix: # return f'{self.get_group_table().lower()}_based_crud_role' # def get_relation_index_name(self): diff --git a/pfunk/tests/test_group_subclass.py b/pfunk/tests/test_group_subclass.py index a0792cd..d179861 100644 --- a/pfunk/tests/test_group_subclass.py +++ b/pfunk/tests/test_group_subclass.py @@ -34,35 +34,28 @@ def __unicode__(self): # Test case to see if user-group is working -class TestUserGroupError(APITestCase): +class TestCustomGroupBasedPerms(APITestCase): collections = [Newuser, Newgroup, UserGroups, Blog] def setUp(self) -> None: super().setUp() self.group = Newgroup.create(name='Power Users', slug='power-users') - self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', + self.user = Newuser.create(username='test_user', email='tlasso@example.org', first_name='Ted', last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.group]) - print(f'\n\nALL INDEXES: {self.project.indexes}\n\n') perms = self.user.add_permissions( self.group, ['create', 'read', 'write', 'delete']) - - p(f'\n\nest setup: Added User permissions: {perms}\n\n') - p(f'@test setup: User permissions: {self.user.permissions()}') - p(f'@Test Setup: User Created: {self.user.__dict__}') + self.token, self.exp = Newuser.api_login("test_user", "abc123") + self.raw_token = Newuser.login("test_user", "abc123") self.blog = Blog.create( title='test_blog', content='test content', group=self.group) - self.token, self.exp = Newuser.api_login("test", "abc123") - # p(f'@Test Setup: Blog Created: {self.blog.__dict__}\n') - # p(f'@Test Setup: User Created: {self.user.__dict__}') - - def test_read(self): - res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', - headers={ - "Authorization": self.token}) - print(f'\n\nRESPONSE: {res.json}\n\n') - self.assertTrue(res.status_code, 200) - self.assertEqual("test_blog", res.json['data']['data']['title']) + + # def test_read(self): + # res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', + # headers={ + # "Authorization": self.token}) + # self.assertTrue(res.status_code, 200) + # self.assertEqual("test_blog", res.json['data']['data']['title']) # def test_read_all(self): # res = self.c.get(f'/json/blog/list/', @@ -70,20 +63,19 @@ def test_read(self): # "Authorization": self.token}) # self.assertTrue(res.status_code, 200) - # def test_create(self): - # self.assertNotIn("new blog", [ - # blog.title for blog in Blog.all()]) - # res = self.c.post('/json/blog/create/', - # json={ - # "title": "new blog", - # "content": "I created a new blog.", - # "user": self.user.ref.id()}, - # headers={ - # "Authorization": self.token}) - - # self.assertTrue(res.status_code, 200) - # self.assertIn("new blog", [ - # blog.title for blog in Blog.all()]) + def test_create(self): + self.assertNotIn("new blog", [ + blog.title for blog in Blog.all()]) + res = self.c.post('/json/blog/create/', + json={ + "title": "new blog", + "content": "I created a new blog."}, + headers={ + "Authorization": self.token}) + print(f'\n\nRESPONSE: {res.json}\n\n') + self.assertTrue(res.status_code, 200) + self.assertIn("new blog", [ + blog.title for blog in Blog.all()]) # def test_update(self): # self.assertNotIn("the updated street somewhere", [ @@ -91,8 +83,7 @@ def test_read(self): # res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', # json={ # "title": "updated blog", - # "content": "I updated my blog.", - # "user": self.user.ref.id()}, + # "content": "I updated my blog."}, # headers={ # "Authorization": self.token}) diff --git a/pfunk/tests/test_user_subclass.py b/pfunk/tests/test_user_subclass.py index ae37c1f..46eb5ad 100644 --- a/pfunk/tests/test_user_subclass.py +++ b/pfunk/tests/test_user_subclass.py @@ -4,20 +4,13 @@ from valley.utils import import_util from pprint import pprint as p -from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, UserGroups from pfunk.testcase import APITestCase from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole -class UserGroups(Collection): - collection_name = 'users_groups' - userID = ReferenceField('pfunk.tests.test_user_subclass.Newuser') - groupID = ReferenceField('pfunk.tests.test_user_subclass.Newgroup') - permissions = ListField() - - class Newgroup(BaseGroup): users = ManyToManyField('pfunk.tests.test_user_subclass.Newuser', relation_name='custom_users_groups') From b97ac7069613a6849e86cd3cb97cfbf381708aff Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 24 Nov 2022 16:50:44 +0800 Subject: [PATCH 191/214] refactored auth perms for cleaner usage. Refactored unittests for clearer name. --- pfunk/contrib/auth/resources.py | 55 ++++-------- ... => test_custom_user_group_group_perms.py} | 84 ++++++++++--------- ... => test_custom_user_group_users_perms.py} | 18 ++-- 3 files changed, 70 insertions(+), 87 deletions(-) rename pfunk/tests/{test_group_subclass.py => test_custom_user_group_group_perms.py} (51%) rename pfunk/tests/{test_user_subclass.py => test_custom_user_group_users_perms.py} (81%) diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 3888371..7cf9252 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -182,45 +182,15 @@ def get_lambda(self, resource_type): class GenericAuthorizationRole(Role): - def get_user_collection(self): - """ Acquires User collection type """ - user_field = self.collection.get_user_field() - if user_field: - user_field = user_field.lower() - col = self.collection._base_properties.get(user_field) - if col: - return col.get_foreign_class() - else: - return None - - def get_group_collection(self): - """ Acquires Group collection type from User's fields """ - user_col = self.get_user_collection() - col = user_col() - group_field = col.get_group_field() - user_groups = user_col._base_properties.get(group_field) - if user_groups: - return user_groups.get_foreign_class() - else: - return None + def get_relation_index_name(self): + """ Returns the index name of the created permission index of group and user -> 'usergroups_by_userID_and_groupID' """ + return 'usergroups_by_userID_and_groupID' def get_user_table(self): - """ Acquires User's class name """ - col = self.get_user_collection() - if col: - return col.__name__ - return None + return USER_CLASS def get_group_table(self): - """ Acquires group class name from the user's fields """ - group_col = self.get_group_collection() - if group_col: - return group_col.__name__ - return None - - def get_relation_index_name(self): - """ Returns the index name of the created permission index of group and user -> 'usergroups_by_userID_and_groupID' """ - return 'usergroups_by_userID_and_groupID' + return GROUP_CLASS def get_name_suffix(self): return f'{self.collection.get_user_field().lower()}_based_crud_role' @@ -282,7 +252,13 @@ def get_relation_index_name(self): Formatted as: {user_group_relation_name}_by_{user_class} """ # Acquires the `groups` field from the user collection - user_col = self.get_user_collection() + user_field = self.collection.get_user_field() + if user_field: + user_field = user_field.lower() + else: + return None + user_col = self.collection._base_properties.get(user_field) + user_col = user_col.get_foreign_class() user_groups = user_col._base_properties.get("groups") if user_groups: @@ -342,9 +318,6 @@ class GenericGroupBasedRole(GenericAuthorizationRole): def get_name_suffix(self): return f'{self.group_table.lower()}_based_crud_role' - def get_user_table(self): - return USER_CLASS - def get_lambda(self, resource_type): """ Returns the lambda function for giving the permission to Group-based entities @@ -388,13 +361,13 @@ def get_lambda(self, resource_type): ) ) elif resource_type == 'create': + lambda_args = ["new_object"] group_ref = q.select(current_group_field, q.select('data', q.var('new_object'))) - lambda_args = ["new_object"] elif resource_type == 'read' or resource_type == 'delete': + lambda_args = ["object_ref"] group_ref = q.select(current_group_field, q.select('data', q.get(q.var('object_ref')))) - lambda_args = ["object_ref"] return q.query( q.lambda_( diff --git a/pfunk/tests/test_group_subclass.py b/pfunk/tests/test_custom_user_group_group_perms.py similarity index 51% rename from pfunk/tests/test_group_subclass.py rename to pfunk/tests/test_custom_user_group_group_perms.py index d179861..2f36603 100644 --- a/pfunk/tests/test_group_subclass.py +++ b/pfunk/tests/test_custom_user_group_group_perms.py @@ -12,21 +12,21 @@ class Newgroup(BaseGroup): - users = ManyToManyField('pfunk.tests.test_group_subclass.Newuser', + users = ManyToManyField('pfunk.tests.test_custom_user_group_group_perms.Newuser', relation_name='custom_users_groups') class Newuser(ExtendedUser): - group_class = import_util('pfunk.tests.test_group_subclass.Newgroup') + group_class = import_util('pfunk.tests.test_custom_user_group_group_perms.Newgroup') groups = ManyToManyField( - 'pfunk.tests.test_group_subclass.Newgroup', relation_name='custom_users_groups') + 'pfunk.tests.test_custom_user_group_group_perms.Newgroup', relation_name='custom_users_groups') class Blog(Collection): collection_roles = [GenericGroupBasedRole] title = StringField(required=True) content = StringField(required=True) - group = ReferenceField('pfunk.tests.test_group_subclass.Newgroup', + group = ReferenceField('pfunk.tests.test_custom_user_group_group_perms.Newgroup', relation_name='newgroup_blogs') def __unicode__(self): @@ -38,6 +38,10 @@ class TestCustomGroupBasedPerms(APITestCase): collections = [Newuser, Newgroup, UserGroups, Blog] def setUp(self) -> None: + os.environ['USER_COLLECTION'] = 'Newuser' + os.environ['GROUP_COLLECTION'] = 'Newgroup' + os.environ['USER_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_group_perms.Newuser' + os.environ['GROUP_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_group_perms.Newgroup' super().setUp() self.group = Newgroup.create(name='Power Users', slug='power-users') self.user = Newuser.create(username='test_user', email='tlasso@example.org', first_name='Ted', @@ -50,18 +54,18 @@ def setUp(self) -> None: self.blog = Blog.create( title='test_blog', content='test content', group=self.group) - # def test_read(self): - # res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', - # headers={ - # "Authorization": self.token}) - # self.assertTrue(res.status_code, 200) - # self.assertEqual("test_blog", res.json['data']['data']['title']) + def test_read(self): + res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) + self.assertEqual("test_blog", res.json['data']['data']['title']) - # def test_read_all(self): - # res = self.c.get(f'/json/blog/list/', - # headers={ - # "Authorization": self.token}) - # self.assertTrue(res.status_code, 200) + def test_read_all(self): + res = self.c.get(f'/json/blog/list/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) def test_create(self): self.assertNotIn("new blog", [ @@ -69,33 +73,35 @@ def test_create(self): res = self.c.post('/json/blog/create/', json={ "title": "new blog", - "content": "I created a new blog."}, + "content": "I created a new blog.", + "group": self.group.ref.id()}, headers={ "Authorization": self.token}) - print(f'\n\nRESPONSE: {res.json}\n\n') self.assertTrue(res.status_code, 200) self.assertIn("new blog", [ blog.title for blog in Blog.all()]) - # def test_update(self): - # self.assertNotIn("the updated street somewhere", [ - # house.address for house in Blog.all()]) - # res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', - # json={ - # "title": "updated blog", - # "content": "I updated my blog."}, - # headers={ - # "Authorization": self.token}) - - # self.assertTrue(res.status_code, 200) - # self.assertIn("updated blog", [ - # blog.title for blog in Blog.all()]) - - # def test_delete(self): - # res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', - # headers={ - # "Authorization": self.token, - # "Content-Type": "application/json" - # }) - - # self.assertTrue(res.status_code, 200) + def test_update(self): + self.assertNotIn("the updated street somewhere", [ + house.address for house in Blog.all()]) + res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', + json={ + "title": "updated blog", + "content": "I updated my blog."}, + headers={ + "Authorization": self.token}) + + self.assertTrue(res.status_code, 200) + self.assertIn("updated blog", [ + blog.title for blog in Blog.all()]) + + def test_delete(self): + res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) + + self.assertTrue(res.status_code, 200) + self.assertNotIn("test_blog", [ + blog.title for blog in Blog.all()]) diff --git a/pfunk/tests/test_user_subclass.py b/pfunk/tests/test_custom_user_group_users_perms.py similarity index 81% rename from pfunk/tests/test_user_subclass.py rename to pfunk/tests/test_custom_user_group_users_perms.py index 46eb5ad..45495ec 100644 --- a/pfunk/tests/test_user_subclass.py +++ b/pfunk/tests/test_custom_user_group_users_perms.py @@ -12,16 +12,16 @@ class Newgroup(BaseGroup): - users = ManyToManyField('pfunk.tests.test_user_subclass.Newuser', + users = ManyToManyField('pfunk.tests.test_custom_user_group_users_perms.Newuser', relation_name='custom_users_groups') class Newuser(ExtendedUser): - user_group_class = import_util('pfunk.tests.test_user_subclass.UserGroups') - group_class = import_util('pfunk.tests.test_user_subclass.Newgroup') + user_group_class = import_util('pfunk.tests.test_custom_user_group_users_perms.UserGroups') + group_class = import_util('pfunk.tests.test_custom_user_group_users_perms.Newgroup') groups = ManyToManyField( - 'pfunk.tests.test_user_subclass.Newgroup', relation_name='custom_users_groups') - blogs = ManyToManyField('pfunk.tests.test_user_subclass.Blog', + 'pfunk.tests.test_custom_user_group_users_perms.Newgroup', relation_name='custom_users_groups') + blogs = ManyToManyField('pfunk.tests.test_custom_user_group_users_perms.Blog', relation_name='users_blogs') @@ -29,7 +29,7 @@ class Blog(Collection): collection_roles = [GenericUserBasedRole] title = StringField(required=True) content = StringField(required=True) - user = ReferenceField('pfunk.tests.test_user_subclass.Newuser', + user = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newuser', relation_name='users_blogs') def __unicode__(self): @@ -37,10 +37,14 @@ def __unicode__(self): # Test case to see if user-group is working -class TestUserGroupError(APITestCase): +class TestCustomUserBasedPerms(APITestCase): collections = [Newuser, Newgroup, UserGroups, Blog] def setUp(self) -> None: + os.environ['USER_COLLECTION'] = 'Newuser' + os.environ['GROUP_COLLECTION'] = 'Newgroup' + os.environ['USER_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_users_perms.Newuser' + os.environ['GROUP_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_users_perms.Newgroup' super().setUp() self.group = Newgroup.create(name='Power Users', slug='power-users') self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', From 2f1cd4d8a59e1c67303fd7d4226cd11a18189d90 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 1 Dec 2022 18:32:55 +0800 Subject: [PATCH 192/214] Added genericM2MUser perms --- pfunk/contrib/auth/resources.py | 142 ++++++++++++++++++-------------- pfunk/tests/test_m2m.py | 109 ++++++++++++++++++++++++ 2 files changed, 189 insertions(+), 62 deletions(-) create mode 100644 pfunk/tests/test_m2m.py diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 7cf9252..00848ea 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -391,65 +391,83 @@ def get_lambda(self, resource_type): ) -# class GenericUserBasedRoleM2M(GenericAuthorizationRole): -# """ Generic set of permissions for many-to-many entity to user relationship """ - -# def get_name_suffix(self): -# # TODO: return suffix: -# return f'{self.get_group_table().lower()}_based_crud_role' - -# def get_relation_index_name(self): -# # TODO: return index name: `users_blogs_by_blog_and_newuser` -# """ Returns the index name of the m2m index of group and user e.g. 'users_groups_by_group_and_user' """ -# user_col = self.get_user_collection() -# user_groups = user_col._base_properties.get("groups") -# group_table = self.get_group_table().lower() -# if group_table: -# relation_index_name = (user_groups.relation_name -# + '_by_' -# + group_table -# + '_and_' -# + self.get_user_table().lower()) -# return relation_index_name -# return None - -# def get_lambda(self, resource_type): -# # TODO: refactor to look for the M2M index and see if the user has permission for the entity -# current_user_field = self.collection.get_user_field() -# if resource_type == 'write': -# lambda_args = ["old_object", "new_object", "object_ref"] -# user_ref = q.select(current_user_field, -# q.select('data', q.var('old_object'))) -# return q.query( -# q.lambda_(lambda_args, -# q.and_( -# q.equals( -# user_ref, -# q.current_identity() -# ), -# q.equals( -# q.select(current_user_field, q.select( -# 'data', q.var('new_object'))), -# q.current_identity() -# ) -# ) - -# ) -# ) -# elif resource_type == 'create': -# lambda_args = ["new_object"] -# user_ref = q.select(current_user_field, -# q.select('data', q.var('new_object'))) -# elif resource_type == 'read' or resource_type == 'delete': -# lambda_args = ["object_ref"] -# user_ref = q.select(current_user_field, -# q.select('data', q.get(q.var('object_ref')))) - -# return q.query( -# q.lambda_(lambda_args, -# q.equals( -# user_ref, -# q.current_identity() -# ) -# ) -# ) +class GenericUserBasedRoleM2M(GenericAuthorizationRole): + """ Generic set of permissions for many-to-many entity to user relationship """ + + def get_name_suffix(self): + return f'{self.collection.get_user_field().lower()}_based_crud_role' + + def get_relation_index_name(self): + """ Returns the index name of the m2m index of group and user e.g. 'users_blogs_by_blog_and_newuser' """ + user_field = self.collection.get_user_field() + if user_field: + user_field = user_field.lower() + else: + return None + user_col = self.collection._base_properties.get(user_field) + user_col_relation = user_col.relation_name + + group_table = self.get_group_table().lower() + if group_table: + relation_index_name = (user_col_relation + + '_by_' + + self.collection.get_collection_name().lower() + + '_and_' + + self.get_user_table().lower()) + return relation_index_name + return None + + + def get_lambda(self, resource_type): + current_user_field = self.collection.get_user_field() + if resource_type == 'write': + lambda_args = ["old_object", "new_object", "object_ref"] + obj_ref = q.var('old_object') + # BUG: Returning error 'NoneType' object has no attribute 'relation_field' + return q.query( + q.lambda_(lambda_args, + q.and_( + q.equals( + q.select(f'{USER_CLASS.lower()}ID', + q.select("data", + q.get(q.match( + q.index( + self.get_relation_index_name()), + obj_ref, + q.current_identity() + ))) + ), + q.current_identity() + ), + q.equals( + q.select(current_user_field, q.select( + 'data', q.var('new_object'))), + q.current_identity() + ) + ) + ) + ) + elif resource_type == 'create': + lambda_args = ["new_object"] + obj_ref = q.var('new_object') + elif resource_type == 'read' or resource_type == 'delete': + lambda_args = ["object_ref"] + obj_ref = q.var('object_ref') + + return q.query( + q.lambda_( + lambda_args, + q.equals( + q.select(f'{USER_CLASS.lower()}ID', + q.select("data", + q.get(q.match( + q.index( + self.get_relation_index_name()), + obj_ref, + q.current_identity() + ))) + ), + q.current_identity() + ) + ) + ) diff --git a/pfunk/tests/test_m2m.py b/pfunk/tests/test_m2m.py new file mode 100644 index 0000000..a200517 --- /dev/null +++ b/pfunk/tests/test_m2m.py @@ -0,0 +1,109 @@ +# test_dev.py - a placeholder test for fixing User - Group circular import errors + +import os +from valley.utils import import_util +from pprint import pprint as p + +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, UserGroups +from pfunk.testcase import APITestCase +from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField +from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField +from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole, GenericUserBasedRoleM2M + + +class Newgroup(BaseGroup): + users = ManyToManyField('pfunk.tests.test_m2m.Newuser', + relation_name='custom_users_groups') + + +class Newuser(ExtendedUser): + user_group_class = import_util('pfunk.tests.test_m2m.UserGroups') + group_class = import_util('pfunk.tests.test_m2m.Newgroup') + groups = ManyToManyField( + 'pfunk.tests.test_m2m.Newgroup', relation_name='custom_users_groups') + blogs = ManyToManyField('pfunk.tests.test_m2m.Blog', + relation_name='users_blogs') + + +class Blog(Collection): + collection_roles = [GenericUserBasedRoleM2M] + title = StringField(required=True) + content = StringField(required=True) + users = ManyToManyField('pfunk.tests.test_m2m.Newuser', + relation_name='users_blogs') + + def __unicode__(self): + return self.title + + +# Test case to see if user-group is working +class TestCustomUserM2M(APITestCase): + collections = [Newuser, Newgroup, UserGroups, Blog] + + def setUp(self) -> None: + os.environ['USER_COLLECTION'] = 'Newuser' + os.environ['GROUP_COLLECTION'] = 'Newgroup' + os.environ['USER_COLLECTION_DIR'] = 'pfunk.tests.test_m2m.Newuser' + os.environ['GROUP_COLLECTION_DIR'] = 'pfunk.tests.test_m2m.Newgroup' + super().setUp() + self.group = Newgroup.create(name='Power Users', slug='power-users') + self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.group]) + self.blog = Blog.create( + title='test_blog', content='test content', users=[self.user], token=self.secret) + self.token, self.exp = Newuser.api_login("test", "abc123") + + + def test_read(self): + res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) + self.assertEqual("test_blog", res.json['data']['data']['title']) + + def test_read_all(self): + res = self.c.get(f'/json/blog/list/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) + + def test_create(self): + self.assertNotIn("new blog", [ + blog.title for blog in Blog.all()]) + res = self.c.post('/json/blog/create/', + json={ + "title": "new blog", + "content": "I created a new blog.", + "user": self.user.ref.id()}, + headers={ + "Authorization": self.token}) + + self.assertTrue(res.status_code, 200) + self.assertIn("new blog", [ + blog.title for blog in Blog.all()]) + + # def test_update(self): + # self.assertNotIn("the updated street somewhere", [ + # house.address for house in Blog.all()]) + # res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', + # json={ + # "title": "updated blog", + # "content": "I updated my blog.", + # "user": self.user.ref.id()}, + # headers={ + # "Authorization": self.token}) + + # print(f'\n\nRESPONSE: {res.json}\n\n') + # self.assertTrue(res.status_code, 200) + # self.assertIn("updated blog", [ + # blog.title for blog in Blog.all()]) + + def test_delete(self): + res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) + + self.assertTrue(res.status_code, 200) From fcad4d70716cb65931b024f7cbd313a1db67d004 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 12 Dec 2022 10:39:19 +0800 Subject: [PATCH 193/214] Fixed create permissions on m2m operations --- pfunk/collection.py | 2 +- pfunk/contrib/auth/resources.py | 9 +- .../test_custom_user_group_users_perms.py | 4 +- pfunk/tests/test_m2m.py | 90 ++++++++++--------- pfunk/web/views/json.py | 47 +++++----- 5 files changed, 76 insertions(+), 76 deletions(-) diff --git a/pfunk/collection.py b/pfunk/collection.py index 171a1b9..9632a0b 100644 --- a/pfunk/collection.py +++ b/pfunk/collection.py @@ -81,7 +81,7 @@ class Collection(BaseSchema, metaclass=PFunkDeclarativeVariablesMetaclass): def __str__(self): try: return self.__unicode__() # pragma: no cover - except AttributeError: + except (AttributeError, TypeError): return f"{self.__class__.__name__} object" # pragma: no cover diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 00848ea..2a8be01 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -212,7 +212,10 @@ def get_privileges(self): { "resource": q.index(self.get_relation_index_name()), "actions": { - "read": True + "read": True, + "create": True, + "update": True, + "delete": True } }, { @@ -448,8 +451,8 @@ def get_lambda(self, resource_type): ) ) elif resource_type == 'create': - lambda_args = ["new_object"] - obj_ref = q.var('new_object') + # Create ops will always be allowed + return True elif resource_type == 'read' or resource_type == 'delete': lambda_args = ["object_ref"] obj_ref = q.var('object_ref') diff --git a/pfunk/tests/test_custom_user_group_users_perms.py b/pfunk/tests/test_custom_user_group_users_perms.py index 45495ec..fb2c58c 100644 --- a/pfunk/tests/test_custom_user_group_users_perms.py +++ b/pfunk/tests/test_custom_user_group_users_perms.py @@ -83,8 +83,8 @@ def test_create(self): blog.title for blog in Blog.all()]) def test_update(self): - self.assertNotIn("the updated street somewhere", [ - house.address for house in Blog.all()]) + self.assertNotIn("updated blog", [ + blog.title for blog in Blog.all()]) res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', json={ "title": "updated blog", diff --git a/pfunk/tests/test_m2m.py b/pfunk/tests/test_m2m.py index a200517..5eecedd 100644 --- a/pfunk/tests/test_m2m.py +++ b/pfunk/tests/test_m2m.py @@ -50,60 +50,64 @@ def setUp(self) -> None: self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.group]) + self.user2 = Newuser.create(username='test2', email='tlasso2@example.org', first_name='Juliuz', + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.group]) self.blog = Blog.create( title='test_blog', content='test content', users=[self.user], token=self.secret) self.token, self.exp = Newuser.api_login("test", "abc123") - def test_read(self): - res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', - headers={ - "Authorization": self.token}) - self.assertTrue(res.status_code, 200) - self.assertEqual("test_blog", res.json['data']['data']['title']) - - def test_read_all(self): - res = self.c.get(f'/json/blog/list/', - headers={ - "Authorization": self.token}) - self.assertTrue(res.status_code, 200) - - def test_create(self): - self.assertNotIn("new blog", [ - blog.title for blog in Blog.all()]) - res = self.c.post('/json/blog/create/', - json={ - "title": "new blog", - "content": "I created a new blog.", - "user": self.user.ref.id()}, - headers={ - "Authorization": self.token}) - - self.assertTrue(res.status_code, 200) - self.assertIn("new blog", [ - blog.title for blog in Blog.all()]) + # def test_read(self): + # res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', + # headers={ + # "Authorization": self.token}) + # self.assertTrue(res.status_code, 200) + # self.assertEqual("test_blog", res.json['data']['data']['title']) - # def test_update(self): - # self.assertNotIn("the updated street somewhere", [ - # house.address for house in Blog.all()]) - # res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', - # json={ - # "title": "updated blog", - # "content": "I updated my blog.", - # "user": self.user.ref.id()}, + # def test_read_all(self): + # res = self.c.get(f'/json/blog/list/', # headers={ # "Authorization": self.token}) + # self.assertTrue(res.status_code, 200) + + # def test_create(self): + # self.assertNotIn("new blog", [ + # blog.title for blog in Blog.all()]) + # res = self.c.post('/json/blog/create/', + # json={ + # "title": "new blog", + # "content": "I created a new blog.", + # "users": [self.user.ref.id(), self.user2.ref.id()]}, + # headers={ + # "Authorization": self.token}) - # print(f'\n\nRESPONSE: {res.json}\n\n') # self.assertTrue(res.status_code, 200) - # self.assertIn("updated blog", [ + # self.assertIn("new blog", [ # blog.title for blog in Blog.all()]) - def test_delete(self): - res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', - headers={ - "Authorization": self.token, - "Content-Type": "application/json" - }) + def test_update(self): + self.assertNotIn("updated blog", [ + blog.title for blog in Blog.all()]) + res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', + json={ + "title": "updated blog", + "content": "I updated my blog.", + "users": [self.user.ref.id()] + }, + headers={ + "Authorization": self.token}) + print(f'\n\nRESPONSE: {res.json}\n\n') self.assertTrue(res.status_code, 200) + self.assertIn("updated blog", [ + blog.title for blog in Blog.all()]) + + # def test_delete(self): + # res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', + # headers={ + # "Authorization": self.token, + # "Content-Type": "application/json" + # }) + + # self.assertTrue(res.status_code, 200) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index c679b01..9ca3dca 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -1,3 +1,5 @@ +from valley.utils import import_util + from pfunk.client import q from pfunk.web.response import JSONResponse, JSONNotFoundResponse, JSONBadRequestResponse, \ JSONMethodNotAllowedResponse, JSONUnauthorizedResponse, JSONForbiddenResponse @@ -76,34 +78,25 @@ class CreateView(UpdateMixin, JSONActionMixin, JSONView): def get_query(self): """ Entity created in a collection """ - obj = self.collection.create( - **self.get_query_kwargs(), _token=self.request.token) - return obj - - def get_m2m_kwargs(self, obj): - """ Acquires the keyword-arguments for the many-to-many relationship - - FaunaDB is only able to create a many-to-many relationship - by creating a collection that references both of the object. - So, when creating an entity, it is needed to create an entity to - make them related to each other. - - Args: - obj (dict, required): - - """ - data = self.request.get_json() - fields = self.collection.get_foreign_fields_by_type( - 'pfunk.fields.ManyToManyField') + data = self.get_query_kwargs() + fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') for k, v in fields.items(): - current_value = data.get(k) - col = v.get('foreign_class')() - client = col().client() - client.query( - q.create( - - ) - ) + col = import_util(v['foreign_class']) + entities = [] + for ref in data[k]: + c = col.get(ref) + + # # TODO: instantiate collection by just passsing the ref + # col_data = {'_ref': ref} + # c = col(**col_data) + # # print(f'\n\nCOLLECTION AND REF {c.get_collection_name()} -> {ref}\n\n') + # # c._ref = q.ref(q.collection(c.get_collection_name()), ref) + # # print(f'\n\nCOLLECTION ID: {c._id}\n\n') + # print(f'\n\nCOLLECTION REF ID: {c.ref}\n\n') + entities.append(c) + data[k] = entities + obj = self.collection.create(**data, _token=self.request.token) + return obj def _payload_docs(self): # Reference the collection by default From 65eda3ba4e5dc9b09280c19cf067c83307830cfe Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 12 Dec 2022 11:25:26 +0800 Subject: [PATCH 194/214] Fixed update ops on m2m not working when adding m2m entities --- pfunk/contrib/auth/resources.py | 1 - pfunk/tests/test_m2m.py | 69 ++++++++++++++++----------------- pfunk/web/views/json.py | 23 +++++++++-- 3 files changed, 54 insertions(+), 39 deletions(-) diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 2a8be01..38395f6 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -426,7 +426,6 @@ def get_lambda(self, resource_type): if resource_type == 'write': lambda_args = ["old_object", "new_object", "object_ref"] obj_ref = q.var('old_object') - # BUG: Returning error 'NoneType' object has no attribute 'relation_field' return q.query( q.lambda_(lambda_args, q.and_( diff --git a/pfunk/tests/test_m2m.py b/pfunk/tests/test_m2m.py index 5eecedd..b1520a4 100644 --- a/pfunk/tests/test_m2m.py +++ b/pfunk/tests/test_m2m.py @@ -58,33 +58,33 @@ def setUp(self) -> None: self.token, self.exp = Newuser.api_login("test", "abc123") - # def test_read(self): - # res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', - # headers={ - # "Authorization": self.token}) - # self.assertTrue(res.status_code, 200) - # self.assertEqual("test_blog", res.json['data']['data']['title']) - - # def test_read_all(self): - # res = self.c.get(f'/json/blog/list/', - # headers={ - # "Authorization": self.token}) - # self.assertTrue(res.status_code, 200) - - # def test_create(self): - # self.assertNotIn("new blog", [ - # blog.title for blog in Blog.all()]) - # res = self.c.post('/json/blog/create/', - # json={ - # "title": "new blog", - # "content": "I created a new blog.", - # "users": [self.user.ref.id(), self.user2.ref.id()]}, - # headers={ - # "Authorization": self.token}) - - # self.assertTrue(res.status_code, 200) - # self.assertIn("new blog", [ - # blog.title for blog in Blog.all()]) + def test_read(self): + res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) + self.assertEqual("test_blog", res.json['data']['data']['title']) + + def test_read_all(self): + res = self.c.get(f'/json/blog/list/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) + + def test_create(self): + self.assertNotIn("new blog", [ + blog.title for blog in Blog.all()]) + res = self.c.post('/json/blog/create/', + json={ + "title": "new blog", + "content": "I created a new blog.", + "users": [self.user.ref.id(), self.user2.ref.id()]}, + headers={ + "Authorization": self.token}) + + self.assertTrue(res.status_code, 200) + self.assertIn("new blog", [ + blog.title for blog in Blog.all()]) def test_update(self): self.assertNotIn("updated blog", [ @@ -98,16 +98,15 @@ def test_update(self): headers={ "Authorization": self.token}) - print(f'\n\nRESPONSE: {res.json}\n\n') self.assertTrue(res.status_code, 200) self.assertIn("updated blog", [ blog.title for blog in Blog.all()]) - # def test_delete(self): - # res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', - # headers={ - # "Authorization": self.token, - # "Content-Type": "application/json" - # }) + def test_delete(self): + res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) - # self.assertTrue(res.status_code, 200) + self.assertTrue(res.status_code, 200) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 9ca3dca..3cc590f 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -121,9 +121,26 @@ class UpdateView(UpdateMixin, JSONIDMixin, JSONView): def get_query(self): """ Entity in collection updated by an ID """ - obj = self.collection.get(self.request.kwargs.get( - 'id'), _token=self.request.token) - obj._data.update(self.get_query_kwargs()) + data = self.get_query_kwargs() + obj = self.collection.get(self.request.kwargs.get('id'), _token=self.request.token) + fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') + for k, v in fields.items(): + col = import_util(v['foreign_class']) + entities = [] + for ref in data[k]: + c = col.get(ref) + + # # TODO: instantiate collection by just passsing the ref + # col_data = {'_ref': ref} + # c = col(**col_data) + # # print(f'\n\nCOLLECTION AND REF {c.get_collection_name()} -> {ref}\n\n') + # # c._ref = q.ref(q.collection(c.get_collection_name()), ref) + # # print(f'\n\nCOLLECTION ID: {c._id}\n\n') + # print(f'\n\nCOLLECTION REF ID: {c.ref}\n\n') + entities.append(c) + data[k] = entities + + obj._data.update(data) obj.save() return obj From 653549e2af4676430567f717fc6baefa487e449e Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 15 Dec 2022 11:35:44 +0800 Subject: [PATCH 195/214] Found a better fix for permission error on m2m relation saving --- pfunk/collection.py | 4 +- pfunk/contrib/auth/resources.py | 29 +++++-- ...t_m2m.py => test_custom_user_group_m2m.py} | 1 + pfunk/web/views/json.py | 86 +++---------------- 4 files changed, 40 insertions(+), 80 deletions(-) rename pfunk/tests/{test_m2m.py => test_custom_user_group_m2m.py} (99%) diff --git a/pfunk/collection.py b/pfunk/collection.py index 9632a0b..473f0f8 100644 --- a/pfunk/collection.py +++ b/pfunk/collection.py @@ -1,5 +1,5 @@ from envs import env -from faunadb.errors import BadRequest +from faunadb.errors import BadRequest, Unauthorized, PermissionDenied from valley.contrib import Schema from valley.declarative import DeclaredVars, DeclarativeVariablesMetaclass from valley.properties import BaseProperty, CharProperty, ListProperty @@ -423,7 +423,7 @@ def _save_related(self, relational_data, _token=None) -> None: } ) ) - except BadRequest: + except (BadRequest) as err: pass def call_signals(self, name): diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 38395f6..01f04e2 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -1,3 +1,4 @@ +from valley.utils import import_util from tokenize import group from envs import env @@ -212,10 +213,7 @@ def get_privileges(self): { "resource": q.index(self.get_relation_index_name()), "actions": { - "read": True, - "create": True, - "update": True, - "delete": True + "read": True } }, { @@ -397,11 +395,32 @@ def get_lambda(self, resource_type): class GenericUserBasedRoleM2M(GenericAuthorizationRole): """ Generic set of permissions for many-to-many entity to user relationship """ + def get_privileges(self): + """ Usage of parent `get_privileges()` with addition of access to M2M collection """ + priv_list = super().get_privileges() + fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') + for k, v in fields.items(): + foreign_col = self.collection._base_properties.get(k) + relation_name = foreign_col.relation_name + if relation_name: + priv_list.extend([ + { + "resource": q.collection(relation_name), + "actions": { + 'read': True, + 'create': True, + 'update': False, + 'delete': False + } + } + ]) + return priv_list + def get_name_suffix(self): return f'{self.collection.get_user_field().lower()}_based_crud_role' def get_relation_index_name(self): - """ Returns the index name of the m2m index of group and user e.g. 'users_blogs_by_blog_and_newuser' """ + """ Returns the index name of the m2m index of an entity and user e.g. 'users_blogs_by_blog_and_newuser' """ user_field = self.collection.get_user_field() if user_field: user_field = user_field.lower() diff --git a/pfunk/tests/test_m2m.py b/pfunk/tests/test_custom_user_group_m2m.py similarity index 99% rename from pfunk/tests/test_m2m.py rename to pfunk/tests/test_custom_user_group_m2m.py index b1520a4..3d7f2d1 100644 --- a/pfunk/tests/test_m2m.py +++ b/pfunk/tests/test_custom_user_group_m2m.py @@ -82,6 +82,7 @@ def test_create(self): headers={ "Authorization": self.token}) + print(f'\n\nRESPONSE: {res.json}\n\n') self.assertTrue(res.status_code, 200) self.assertIn("new blog", [ blog.title for blog in Blog.all()]) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index 3cc590f..a59a34d 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -28,46 +28,17 @@ def get_response(self): headers=self.get_headers() ) - def _payload_docs(self): - """ Used in custom defining payload parameters for the view in Swagger generation. - - Should return a dict that has the fields of a swagger parameter. - If there is an error in the swagger, it will not be raised. - Usage of `https://editor.swagger.io` to validate is recommended - e.g. - ``` - # Defining formdata - {"data": [ - { - "name":"name", - "in":"formData", - "description":"name of the pet", - "required": true, - "type": "string" - }, - { - "name": "status", - "in": "formData", - "description": "status of the pet", - "required":true, - "type":"string" - } - ]} - - # Defining a payload that references a model - {"data": [ - { - "name": "body", - "in": "body", - "description": "Collection object to add", - "required": True, - "schema": "#/definitions/Person" - } - ]} - ``` - """ - return {} - + def get_req_with_m2m(self, data): + """ Returns request with updated params that has the proper m2m entities """ + fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') + for k, v in fields.items(): + col = import_util(v['foreign_class']) + entities = [] + for ref in data[k]: + c = col.get(ref) + entities.append(c) + data[k] = entities + return data class CreateView(UpdateMixin, JSONActionMixin, JSONView): @@ -79,22 +50,7 @@ class CreateView(UpdateMixin, JSONActionMixin, JSONView): def get_query(self): """ Entity created in a collection """ data = self.get_query_kwargs() - fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') - for k, v in fields.items(): - col = import_util(v['foreign_class']) - entities = [] - for ref in data[k]: - c = col.get(ref) - - # # TODO: instantiate collection by just passsing the ref - # col_data = {'_ref': ref} - # c = col(**col_data) - # # print(f'\n\nCOLLECTION AND REF {c.get_collection_name()} -> {ref}\n\n') - # # c._ref = q.ref(q.collection(c.get_collection_name()), ref) - # # print(f'\n\nCOLLECTION ID: {c._id}\n\n') - # print(f'\n\nCOLLECTION REF ID: {c.ref}\n\n') - entities.append(c) - data[k] = entities + data = self.get_req_with_m2m(data) obj = self.collection.create(**data, _token=self.request.token) return obj @@ -122,24 +78,8 @@ class UpdateView(UpdateMixin, JSONIDMixin, JSONView): def get_query(self): """ Entity in collection updated by an ID """ data = self.get_query_kwargs() + data = self.get_req_with_m2m(data) obj = self.collection.get(self.request.kwargs.get('id'), _token=self.request.token) - fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') - for k, v in fields.items(): - col = import_util(v['foreign_class']) - entities = [] - for ref in data[k]: - c = col.get(ref) - - # # TODO: instantiate collection by just passsing the ref - # col_data = {'_ref': ref} - # c = col(**col_data) - # # print(f'\n\nCOLLECTION AND REF {c.get_collection_name()} -> {ref}\n\n') - # # c._ref = q.ref(q.collection(c.get_collection_name()), ref) - # # print(f'\n\nCOLLECTION ID: {c._id}\n\n') - # print(f'\n\nCOLLECTION REF ID: {c.ref}\n\n') - entities.append(c) - data[k] = entities - obj._data.update(data) obj.save() return obj From 531309102188a1390a9daa8e0e89019096161665 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 15 Dec 2022 11:40:55 +0800 Subject: [PATCH 196/214] Fixed wrong imports on test m2m --- pfunk/tests/test_custom_user_group_m2m.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/pfunk/tests/test_custom_user_group_m2m.py b/pfunk/tests/test_custom_user_group_m2m.py index 3d7f2d1..a92663e 100644 --- a/pfunk/tests/test_custom_user_group_m2m.py +++ b/pfunk/tests/test_custom_user_group_m2m.py @@ -12,16 +12,16 @@ class Newgroup(BaseGroup): - users = ManyToManyField('pfunk.tests.test_m2m.Newuser', + users = ManyToManyField('pfunk.tests.test_custom_user_group_m2m.Newuser', relation_name='custom_users_groups') class Newuser(ExtendedUser): - user_group_class = import_util('pfunk.tests.test_m2m.UserGroups') - group_class = import_util('pfunk.tests.test_m2m.Newgroup') + user_group_class = import_util('pfunk.tests.test_custom_user_group_m2m.UserGroups') + group_class = import_util('pfunk.tests.test_custom_user_group_m2m.Newgroup') groups = ManyToManyField( - 'pfunk.tests.test_m2m.Newgroup', relation_name='custom_users_groups') - blogs = ManyToManyField('pfunk.tests.test_m2m.Blog', + 'pfunk.tests.test_custom_user_group_m2m.Newgroup', relation_name='custom_users_groups') + blogs = ManyToManyField('pfunk.tests.test_custom_user_group_m2m.Blog', relation_name='users_blogs') @@ -29,7 +29,7 @@ class Blog(Collection): collection_roles = [GenericUserBasedRoleM2M] title = StringField(required=True) content = StringField(required=True) - users = ManyToManyField('pfunk.tests.test_m2m.Newuser', + users = ManyToManyField('pfunk.tests.test_custom_user_group_m2m.Newuser', relation_name='users_blogs') def __unicode__(self): @@ -43,8 +43,8 @@ class TestCustomUserM2M(APITestCase): def setUp(self) -> None: os.environ['USER_COLLECTION'] = 'Newuser' os.environ['GROUP_COLLECTION'] = 'Newgroup' - os.environ['USER_COLLECTION_DIR'] = 'pfunk.tests.test_m2m.Newuser' - os.environ['GROUP_COLLECTION_DIR'] = 'pfunk.tests.test_m2m.Newgroup' + os.environ['USER_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_m2m.Newuser' + os.environ['GROUP_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_m2m.Newgroup' super().setUp() self.group = Newgroup.create(name='Power Users', slug='power-users') self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', @@ -82,7 +82,6 @@ def test_create(self): headers={ "Authorization": self.token}) - print(f'\n\nRESPONSE: {res.json}\n\n') self.assertTrue(res.status_code, 200) self.assertIn("new blog", [ blog.title for blog in Blog.all()]) From d2e3fcffa45827e63be925961e0ff0d3475cd1fa Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 23 Dec 2022 10:26:12 +0800 Subject: [PATCH 197/214] updated unittests to work with latest contrib collections --- pfunk/contrib/auth/collections.py | 2 +- pfunk/contrib/auth/resources.py | 2 +- pfunk/project.py | 5 +- pfunk/tests/test_auth.py | 5 +- .../test_custom_user_group_group_perms.py | 3 +- pfunk/tests/test_deployment.py | 9 +- pfunk/tests/test_email.py | 7 +- pfunk/tests/test_jwt.py | 5 +- pfunk/tests/test_user_subclass_m2m.py | 116 ------------------ pfunk/tests/test_web_crud.py | 5 +- pfunk/tests/test_web_json_change_password.py | 5 +- pfunk/tests/test_web_json_crud.py | 5 +- pfunk/tests/test_web_json_forgot_password.py | 5 +- pfunk/tests/test_web_json_login.py | 5 +- pfunk/tests/test_web_json_signup.py | 5 +- pfunk/tests/test_web_json_stripe.py | 9 +- 16 files changed, 32 insertions(+), 161 deletions(-) delete mode 100644 pfunk/tests/test_user_subclass_m2m.py diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index 2d64d5f..8ce669b 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -393,7 +393,7 @@ def add_permissions(self, group, permissions: list, _token=None): index_name = 'usergroups_by_userID_and_groupID' for i in permissions: - perm_list.append(i) + perm_list.extend(i.permissions) if not self.user_group_class: raise NotImplementedError diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 01f04e2..f637f10 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -327,7 +327,7 @@ def get_lambda(self, resource_type): 2. You have the create permission to perform the action (create, read, write, and delete) """ current_group_field = self.collection.get_group_field().lower() - perm = f'{resource_type}'.lower() + perm = f'{self.collection.get_collection_name()}-{resource_type}'.lower() if resource_type == 'write': group_ref = q.select(current_group_field, diff --git a/pfunk/project.py b/pfunk/project.py index 528caee..1f46e17 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -205,10 +205,7 @@ def publish(self, mode: str = 'merge') -> int: auth=BearerAuth(secret), data=gql_io ) - - print(f'\n') - print(self.render()) - print('----------------------------------------\n') + if resp.status_code == 200: test_mode = env('PFUNK_TEST_MODE', False, var_type='boolean') if not test_mode: diff --git a/pfunk/tests/test_auth.py b/pfunk/tests/test_auth.py index 9162aa4..91b2e47 100644 --- a/pfunk/tests/test_auth.py +++ b/pfunk/tests/test_auth.py @@ -1,15 +1,14 @@ from faunadb.errors import PermissionDenied from pfunk.contrib.auth.key import PermissionGroup -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.exceptions import LoginFailed from pfunk.testcase import CollectionTestCase from pfunk.tests import Sport, Person, House class AuthTestCase(CollectionTestCase): - collections = [User, Group, + collections = [User, Group, UserGroups, Sport, Person, House] def setUp(self) -> None: diff --git a/pfunk/tests/test_custom_user_group_group_perms.py b/pfunk/tests/test_custom_user_group_group_perms.py index 2f36603..6f5dd33 100644 --- a/pfunk/tests/test_custom_user_group_group_perms.py +++ b/pfunk/tests/test_custom_user_group_group_perms.py @@ -4,6 +4,7 @@ from valley.utils import import_util from pprint import pprint as p +from pfunk.contrib.auth.key import PermissionGroup from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, UserGroups from pfunk.testcase import APITestCase from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField @@ -48,7 +49,7 @@ def setUp(self) -> None: last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.group]) perms = self.user.add_permissions( - self.group, ['create', 'read', 'write', 'delete']) + self.group, [PermissionGroup(Blog, ['create', 'read', 'write', 'delete'])]) self.token, self.exp = Newuser.api_login("test_user", "abc123") self.raw_token = Newuser.login("test_user", "abc123") self.blog = Blog.create( diff --git a/pfunk/tests/test_deployment.py b/pfunk/tests/test_deployment.py index b354061..a85ae03 100644 --- a/pfunk/tests/test_deployment.py +++ b/pfunk/tests/test_deployment.py @@ -1,6 +1,5 @@ from pfunk.client import q -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.project import Project from pfunk.testcase import PFunkTestCase from pfunk.tests import Sport, Person @@ -11,7 +10,7 @@ class DeploymentTestCase(PFunkTestCase): def setUp(self) -> None: super(DeploymentTestCase, self).setUp() self.project = Project() - self.project.add_resources([User, Group, Sport, Person]) + self.project.add_resources([User, Group, Sport, Person, UserGroups]) def test_project_publish(self): # Make sure collections are created @@ -24,7 +23,7 @@ def test_project_publish(self): q.paginate(q.collections(q.database(self.db_name))) ).get('data') - self.assertEqual(5, len(collections_after)) + self.assertEqual(6, len(collections_after)) # Make sure functions are created functions = self.client.query( q.paginate(q.functions(q.database(self.db_name))) @@ -36,7 +35,7 @@ def test_project_publish(self): indexes = self.client.query( q.paginate(q.indexes(q.database(self.db_name))) ).get('data') - self.assertEqual(13, len(indexes)) + self.assertEqual(15, len(indexes)) # Add User and Group to the project self.project.add_resources([User, Group]) # Publish twice more to make sure there are no errors with create_or_update_role or create_or_update_function diff --git a/pfunk/tests/test_email.py b/pfunk/tests/test_email.py index 27e899c..a841463 100644 --- a/pfunk/tests/test_email.py +++ b/pfunk/tests/test_email.py @@ -4,15 +4,14 @@ from jinja2.exceptions import TemplateNotFound from werkzeug.test import Client -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.contrib.email.base import EmailBackend from pfunk.contrib.email.ses import SESBackend from pfunk.testcase import APITestCase class TestEmailBackend(APITestCase): - collections = [User, Group] + collections = [User, Group, UserGroups] def setUp(self) -> None: super(TestEmailBackend, self).setUp() @@ -44,7 +43,7 @@ def test_get_body(self): class TestEmailSES(APITestCase): - collections = [User, Group] + collections = [User, Group, UserGroups] def setUp(self) -> None: super(TestEmailSES, self).setUp() diff --git a/pfunk/tests/test_jwt.py b/pfunk/tests/test_jwt.py index 15c3bdd..7d57c7f 100644 --- a/pfunk/tests/test_jwt.py +++ b/pfunk/tests/test_jwt.py @@ -1,12 +1,11 @@ from pfunk.contrib.auth.key import Key -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.exceptions import LoginFailed from pfunk.testcase import APITestCase class AuthToken(APITestCase): - collections = [User, Group] + collections = [User, Group, UserGroups] def setUp(self) -> None: super(AuthToken, self).setUp() diff --git a/pfunk/tests/test_user_subclass_m2m.py b/pfunk/tests/test_user_subclass_m2m.py deleted file mode 100644 index a3d4972..0000000 --- a/pfunk/tests/test_user_subclass_m2m.py +++ /dev/null @@ -1,116 +0,0 @@ -# test_dev.py - a placeholder test for fixing User - Group circular import errors - -import os -from valley.utils import import_util -from pprint import pprint as p - -from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser -from pfunk.testcase import APITestCase -from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField -from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField -from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole - - -class UserGroups(Collection): - collection_name = 'users_groups' - userID = ReferenceField('pfunk.tests.test_user_subclass.Newuser') - groupID = ReferenceField('pfunk.tests.test_user_subclass.Newgroup') - permissions = ListField() - - -class Newgroup(BaseGroup): - users = ManyToManyField('pfunk.tests.test_user_subclass.Newuser', - relation_name='custom_users_groups') - blogs = ReferenceField('pfunk.tests.test_user_subclass.Blog', - relation_name='newgroup_blogs') - - - -class Newuser(ExtendedUser): - user_group_class = import_util('pfunk.tests.test_user_subclass.UserGroups') - group_class = import_util('pfunk.tests.test_user_subclass.Newgroup') - groups = ManyToManyField( - 'pfunk.tests.test_user_subclass.Newgroup', relation_name='custom_users_groups') - - -class Blog(Collection): - collection_roles = [GenericUserBasedRole] - title = StringField(required=True) - content = StringField(required=True) - group = ReferenceField('pfunk.tests.test_user_subclass.Newgroup', - relation_name='newgroup_blogs') - - def __unicode__(self): - return self.title - - -# Test case to see if user-group is working -class TestUserGroupError(APITestCase): - collections = [Newuser, Newgroup, UserGroups, Blog] - - def setUp(self) -> None: - super().setUp() - self.group = Newgroup.create(name='Power Users', slug='power-users') - self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', - last_name='Lasso', _credentials='abc123', account_status='ACTIVE', - groups=[self.group]) - p(f'@Test Setup: User Created: {self.user.__dict__}') - self.blog = Blog.create( - title='test_blog', content='test content', group=[self.group], token=self.secret) - self.token, self.exp = Newuser.api_login("test", "abc123") - # p(f'@Test Setup: Blog Created: {self.blog.__dict__}\n') - # p(f'@Test Setup: User Created: {self.user.__dict__}') - - - def test_read(self): - res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', - headers={ - "Authorization": self.token}) - print(f'\n\nRESPONSE: {res.json}\n\n') - self.assertTrue(res.status_code, 200) - self.assertEqual("test_blog", res.json['data']['data']['title']) - - # def test_read_all(self): - # res = self.c.get(f'/json/blog/list/', - # headers={ - # "Authorization": self.token}) - # self.assertTrue(res.status_code, 200) - - # def test_create(self): - # self.assertNotIn("new blog", [ - # blog.title for blog in Blog.all()]) - # res = self.c.post('/json/blog/create/', - # json={ - # "title": "new blog", - # "content": "I created a new blog.", - # "user": self.user.ref.id()}, - # headers={ - # "Authorization": self.token}) - - # self.assertTrue(res.status_code, 200) - # self.assertIn("new blog", [ - # blog.title for blog in Blog.all()]) - - # def test_update(self): - # self.assertNotIn("the updated street somewhere", [ - # house.address for house in Blog.all()]) - # res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', - # json={ - # "title": "updated blog", - # "content": "I updated my blog.", - # "user": self.user.ref.id()}, - # headers={ - # "Authorization": self.token}) - - # self.assertTrue(res.status_code, 200) - # self.assertIn("updated blog", [ - # blog.title for blog in Blog.all()]) - - # def test_delete(self): - # res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', - # headers={ - # "Authorization": self.token, - # "Content-Type": "application/json" - # }) - - # self.assertTrue(res.status_code, 200) diff --git a/pfunk/tests/test_web_crud.py b/pfunk/tests/test_web_crud.py index e93251c..fbf1f43 100644 --- a/pfunk/tests/test_web_crud.py +++ b/pfunk/tests/test_web_crud.py @@ -1,13 +1,12 @@ from werkzeug.test import Client -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.testcase import APITestCase from pfunk.tests import House class TestWebCrud(APITestCase): - collections = [User, Group, House] + collections = [User, Group, UserGroups, House] def setUp(self) -> None: super(TestWebCrud, self).setUp() diff --git a/pfunk/tests/test_web_json_change_password.py b/pfunk/tests/test_web_json_change_password.py index 6b9a9f0..cb8dae0 100644 --- a/pfunk/tests/test_web_json_change_password.py +++ b/pfunk/tests/test_web_json_change_password.py @@ -1,10 +1,9 @@ -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.testcase import APITestCase class TestWebChangePassword(APITestCase): - collections = [User, Group] + collections = [User, Group, UserGroups] def setUp(self) -> None: super(TestWebChangePassword, self).setUp() diff --git a/pfunk/tests/test_web_json_crud.py b/pfunk/tests/test_web_json_crud.py index 48d77bf..6179e41 100644 --- a/pfunk/tests/test_web_json_crud.py +++ b/pfunk/tests/test_web_json_crud.py @@ -1,13 +1,12 @@ from werkzeug.test import Client -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.testcase import APITestCase from pfunk.tests import House class TestWebCrud(APITestCase): - collections = [User, Group, House] + collections = [User, Group, House, UserGroups] def setUp(self) -> None: super(TestWebCrud, self).setUp() diff --git a/pfunk/tests/test_web_json_forgot_password.py b/pfunk/tests/test_web_json_forgot_password.py index 64f8603..6dbdaa2 100644 --- a/pfunk/tests/test_web_json_forgot_password.py +++ b/pfunk/tests/test_web_json_forgot_password.py @@ -1,12 +1,11 @@ from werkzeug.test import Client -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.testcase import APITestCase class TestWebForgotPassword(APITestCase): - collections = [User, Group] + collections = [User, Group, UserGroups] def setUp(self) -> None: super(TestWebForgotPassword, self).setUp() diff --git a/pfunk/tests/test_web_json_login.py b/pfunk/tests/test_web_json_login.py index f67ce73..862b7d0 100644 --- a/pfunk/tests/test_web_json_login.py +++ b/pfunk/tests/test_web_json_login.py @@ -1,13 +1,12 @@ from werkzeug.test import Client -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.exceptions import LoginFailed from pfunk.testcase import APITestCase class TestWebLogin(APITestCase): - collections = [User, Group] + collections = [User, Group, UserGroups] def setUp(self) -> None: super(TestWebLogin, self).setUp() diff --git a/pfunk/tests/test_web_json_signup.py b/pfunk/tests/test_web_json_signup.py index b175ee2..0964414 100644 --- a/pfunk/tests/test_web_json_signup.py +++ b/pfunk/tests/test_web_json_signup.py @@ -1,12 +1,11 @@ from werkzeug.test import Client -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.testcase import APITestCase class TestWebSignup(APITestCase): - collections = [User, Group] + collections = [User, Group, UserGroups] def setUp(self) -> None: super(TestWebSignup, self).setUp() diff --git a/pfunk/tests/test_web_json_stripe.py b/pfunk/tests/test_web_json_stripe.py index 1158a1d..be45dec 100644 --- a/pfunk/tests/test_web_json_stripe.py +++ b/pfunk/tests/test_web_json_stripe.py @@ -3,8 +3,7 @@ from werkzeug.test import Client -from pfunk.contrib.auth.collections import Group -from pfunk.contrib.auth.collections import User +from pfunk.contrib.auth.collections import Group, User, UserGroups from pfunk.contrib.auth.key import PermissionGroup from pfunk.contrib.ecommerce.collections import StripePackage, StripeCustomer from pfunk.contrib.ecommerce.views import BaseWebhookView @@ -13,7 +12,7 @@ class TestWebStripeCrud(APITestCase): - collections = [User, Group, StripePackage, StripeCustomer] + collections = [User, Group, UserGroups, StripePackage, StripeCustomer] def setUp(self) -> None: super(TestWebStripeCrud, self).setUp() @@ -177,7 +176,7 @@ def test_delete_customer(self): class TestStripeWebhook(APITestCase): - collections = [User, Group, StripeCustomer] + collections = [User, Group, UserGroups, StripeCustomer] def setUp(self) -> None: super(TestStripeWebhook, self).setUp() @@ -262,7 +261,7 @@ def test_receive_post_req(self, mocked): class TestStripeCheckoutView(APITestCase): - collections = [User, Group, StripePackage] + collections = [User, Group, UserGroups, StripePackage] def setUp(self) -> None: super(TestStripeCheckoutView, self).setUp() From 0e6aa26641b68e343b0419fceae2896d86b113b1 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 20 Jan 2023 16:16:46 +0800 Subject: [PATCH 198/214] first stab at using class var instead of env var initially when accessing custom user and group class --- pfunk/collection.py | 11 +- pfunk/contrib/auth/collections.py | 19 ++- pfunk/contrib/auth/resources.py | 17 +-- pfunk/project.py | 7 +- .../test_custom_user_group_group_perms.py | 127 +++++++++--------- pfunk/tests/test_sandbox.py | 38 ++++++ pfunk/tests/unittest_keys.py | 2 + 7 files changed, 139 insertions(+), 82 deletions(-) create mode 100644 pfunk/tests/test_sandbox.py create mode 100644 pfunk/tests/unittest_keys.py diff --git a/pfunk/collection.py b/pfunk/collection.py index 473f0f8..2929b77 100644 --- a/pfunk/collection.py +++ b/pfunk/collection.py @@ -78,6 +78,12 @@ class Collection(BaseSchema, metaclass=PFunkDeclarativeVariablesMetaclass): 'collection_name'] """List of class variables that are not allowed a field names. """ + """ Optional in-line definition user and group class """ + user_collection = None + group_collection = None + user_collection_dir = None + group_collection_dir = None + def __str__(self): try: return self.__unicode__() # pragma: no cover @@ -132,7 +138,7 @@ def get_user_field(self) -> str: user will be used. This is to ensure the permissions to work properly """ fields = self._base_properties.items() - user_class = env('USER_COLLECTION', 'User') + user_class = self.user_collection or env('USER_COLLECTION', 'User') user_field = None for k, v in fields: if user_class in v.get_graphql_type(): @@ -148,7 +154,8 @@ def get_group_field(self) -> str: """ fields = self._base_properties.items() - group_class = env('GROUP_COLLECTION', 'Group') + # TODO: fix not being able to acquire self.group_collection properly and taking env default -> Group instead + group_class = self.group_collection or env('GROUP_COLLECTION', 'Group') group_field = None for k, v in fields: if group_class in v.get_graphql_type(): diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index 8ce669b..80c9c2a 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -1,5 +1,6 @@ from cmath import log import uuid +import os from envs import env from faunadb.errors import BadRequest @@ -42,7 +43,16 @@ class UserGroupByUserAndGroupIndex(Index): ] -class UserGroups(Collection): +class BaseUserGroup(Collection): + """ Base UserGroup Collection to subclass from when using custom User and Group """ + collection_indexes = [UserGroupByUserAndGroupIndex] + permissions = ListField() + + def __unicode__(self): + return f"{self.userID}, {self.groupID}, {self.permissions}" + + +class UserGroups(BaseUserGroup): """ Many-to-many collection of the user-group relationship The native fauna-way of holding many-to-many relationship @@ -63,15 +73,10 @@ class UserGroups(Collection): permissions (str[]): List of permissions, `['create', 'read', 'delete', 'write']` """ - collection_indexes = [UserGroupByUserAndGroupIndex] userID = ReferenceField( env('USER_COLLECTION_DIR', 'pfunk.contrib.auth.collections.User')) groupID = ReferenceField( env('GROUP_COLLECTION_DIR', 'pfunk.contrib.auth.collections.Group')) - permissions = ListField() - - def __unicode__(self): - return f"{self.userID}, {self.groupID}, {self.permissions}" AccountStatus = Enum(name='AccountStatus', choices=['ACTIVE', 'INACTIVE']) @@ -318,7 +323,7 @@ class ExtendedUser(BaseUser): Provides base methods for group-user permissions. If there are no supplied `groups` property, will raise `NotImplementedErrror` """ - user_group_class = import_util('pfunk.contrib.auth.collections.UserGroups') + # user_group_class = import_util('pfunk.contrib.auth.collections.UserGroups') @classmethod def get_permissions(cls, ref, _token=None): diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index f637f10..36c2064 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -6,8 +6,8 @@ from pfunk.resources import Function, Role # Global collections -USER_CLASS = env('USER_COLLECTION', 'User') -GROUP_CLASS = env('GROUP_COLLECTION', 'Group') +# USER_CLASS = env('USER_COLLECTION', 'User') +# GROUP_CLASS = env('GROUP_COLLECTION', 'Group') class AuthFunction(Function): @@ -188,10 +188,10 @@ def get_relation_index_name(self): return 'usergroups_by_userID_and_groupID' def get_user_table(self): - return USER_CLASS + return self.collection.user_collection or env('USER_COLLECTION', 'User') def get_group_table(self): - return GROUP_CLASS + return self.collection.group_collection or env('GROUP_COLLECTION', 'Group') def get_name_suffix(self): return f'{self.collection.get_user_field().lower()}_based_crud_role' @@ -312,12 +312,9 @@ def get_lambda(self, resource_type): class GenericGroupBasedRole(GenericAuthorizationRole): permissions_field = 'permissions' - user_table = USER_CLASS - group_table = GROUP_CLASS - through_user_field = USER_CLASS.lower() + 'ID' def get_name_suffix(self): - return f'{self.group_table.lower()}_based_crud_role' + return f'{self.get_group_table().lower()}_based_crud_role' def get_lambda(self, resource_type): """ Returns the lambda function for giving the permission to Group-based entities @@ -449,7 +446,7 @@ def get_lambda(self, resource_type): q.lambda_(lambda_args, q.and_( q.equals( - q.select(f'{USER_CLASS.lower()}ID', + q.select(f'{self.get_user_table().lower()}ID', q.select("data", q.get(q.match( q.index( @@ -479,7 +476,7 @@ def get_lambda(self, resource_type): q.lambda_( lambda_args, q.equals( - q.select(f'{USER_CLASS.lower()}ID', + q.select(f'{self.get_user_table().lower()}ID', q.select("data", q.get(q.match( q.index( diff --git a/pfunk/project.py b/pfunk/project.py index 1f46e17..660b927 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -17,7 +17,7 @@ from pfunk.web.request import HTTPRequest, RESTRequest, WSGIRequest from pfunk.web.response import HttpNotFoundResponse, JSONMethodNotAllowedResponse -from .contrib.auth.collections import User, Group, UserGroups, BaseGroup, BaseUser, ExtendedUser +from .contrib.auth.collections import User, Group, UserGroups, BaseGroup, BaseUser, ExtendedUser, BaseUserGroup from .collection import Collection from .fields import ForeignList from .template import graphql_template @@ -193,6 +193,8 @@ def publish(self, mode: str = 'merge') -> int: """ gql_io = BytesIO(self.render().encode()) + print(f'\n\nPUBLISHING PROJ...\n\n') + print(f'\n\n{self.render()}\n\n') if self.client: secret = self.client.secret @@ -224,7 +226,8 @@ def publish(self, mode: str = 'merge') -> int: or issubclass(col, BaseGroup) or issubclass(col, ExtendedUser) or issubclass(col, BaseUser) - or issubclass(col, UserGroups)): + or issubclass(col, UserGroups) + or issubclass(col, BaseUserGroup)): col.publish() collections.remove(col) for col in collections: diff --git a/pfunk/tests/test_custom_user_group_group_perms.py b/pfunk/tests/test_custom_user_group_group_perms.py index 6f5dd33..858bc9c 100644 --- a/pfunk/tests/test_custom_user_group_group_perms.py +++ b/pfunk/tests/test_custom_user_group_group_perms.py @@ -1,15 +1,20 @@ -# test_dev.py - a placeholder test for fixing User - Group circular import errors - -import os from valley.utils import import_util from pprint import pprint as p +from unittest import mock +from envs import env +from importlib import reload from pfunk.contrib.auth.key import PermissionGroup -from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, UserGroups from pfunk.testcase import APITestCase +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, BaseUserGroup as ug from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField -from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole +from pfunk.contrib.auth.resources import GenericGroupBasedRole + + +class UserGroups(ug): + userID = ReferenceField('pfunk.tests.test_custom_user_group_group_perms.Newuser') + groupID = ReferenceField('pfunk.tests.test_custom_user_group_group_perms.Newgroup') class Newgroup(BaseGroup): @@ -17,13 +22,11 @@ class Newgroup(BaseGroup): relation_name='custom_users_groups') -class Newuser(ExtendedUser): - group_class = import_util('pfunk.tests.test_custom_user_group_group_perms.Newgroup') - groups = ManyToManyField( - 'pfunk.tests.test_custom_user_group_group_perms.Newgroup', relation_name='custom_users_groups') - - class Blog(Collection): + user_collection = 'Newuser' + group_collection = 'Newgroup' + user_collection_dir = 'pfunk.tests.test_custom_user_group_group_perms.Newuser' + group_collection_dir = 'pfunk.tests.test_custom_user_group_group_perms.Newgroup' collection_roles = [GenericGroupBasedRole] title = StringField(required=True) content = StringField(required=True) @@ -34,15 +37,17 @@ def __unicode__(self): return self.title -# Test case to see if user-group is working +class Newuser(ExtendedUser): + user_group_class = import_util('pfunk.tests.test_custom_user_group_group_perms.UserGroups') + group_class = import_util('pfunk.tests.test_custom_user_group_group_perms.Newgroup') + groups = ManyToManyField( + 'pfunk.tests.test_custom_user_group_group_perms.Newgroup', relation_name='custom_users_groups') + + class TestCustomGroupBasedPerms(APITestCase): - collections = [Newuser, Newgroup, UserGroups, Blog] + collections = [Newuser, Newgroup, Blog, UserGroups] def setUp(self) -> None: - os.environ['USER_COLLECTION'] = 'Newuser' - os.environ['GROUP_COLLECTION'] = 'Newgroup' - os.environ['USER_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_group_perms.Newuser' - os.environ['GROUP_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_group_perms.Newgroup' super().setUp() self.group = Newgroup.create(name='Power Users', slug='power-users') self.user = Newuser.create(username='test_user', email='tlasso@example.org', first_name='Ted', @@ -62,47 +67,47 @@ def test_read(self): self.assertTrue(res.status_code, 200) self.assertEqual("test_blog", res.json['data']['data']['title']) - def test_read_all(self): - res = self.c.get(f'/json/blog/list/', - headers={ - "Authorization": self.token}) - self.assertTrue(res.status_code, 200) - - def test_create(self): - self.assertNotIn("new blog", [ - blog.title for blog in Blog.all()]) - res = self.c.post('/json/blog/create/', - json={ - "title": "new blog", - "content": "I created a new blog.", - "group": self.group.ref.id()}, - headers={ - "Authorization": self.token}) - self.assertTrue(res.status_code, 200) - self.assertIn("new blog", [ - blog.title for blog in Blog.all()]) - - def test_update(self): - self.assertNotIn("the updated street somewhere", [ - house.address for house in Blog.all()]) - res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', - json={ - "title": "updated blog", - "content": "I updated my blog."}, - headers={ - "Authorization": self.token}) - - self.assertTrue(res.status_code, 200) - self.assertIn("updated blog", [ - blog.title for blog in Blog.all()]) - - def test_delete(self): - res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', - headers={ - "Authorization": self.token, - "Content-Type": "application/json" - }) - - self.assertTrue(res.status_code, 200) - self.assertNotIn("test_blog", [ - blog.title for blog in Blog.all()]) + # def test_read_all(self): + # res = self.c.get(f'/json/blog/list/', + # headers={ + # "Authorization": self.token}) + # self.assertTrue(res.status_code, 200) + + # def test_create(self): + # self.assertNotIn("new blog", [ + # blog.title for blog in Blog.all()]) + # res = self.c.post('/json/blog/create/', + # json={ + # "title": "new blog", + # "content": "I created a new blog.", + # "group": self.group.ref.id()}, + # headers={ + # "Authorization": self.token}) + # self.assertTrue(res.status_code, 200) + # self.assertIn("new blog", [ + # blog.title for blog in Blog.all()]) + + # def test_update(self): + # self.assertNotIn("the updated street somewhere", [ + # house.address for house in Blog.all()]) + # res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', + # json={ + # "title": "updated blog", + # "content": "I updated my blog."}, + # headers={ + # "Authorization": self.token}) + + # self.assertTrue(res.status_code, 200) + # self.assertIn("updated blog", [ + # blog.title for blog in Blog.all()]) + + # def test_delete(self): + # res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', + # headers={ + # "Authorization": self.token, + # "Content-Type": "application/json" + # }) + + # self.assertTrue(res.status_code, 200) + # self.assertNotIn("test_blog", [ + # blog.title for blog in Blog.all()]) diff --git a/pfunk/tests/test_sandbox.py b/pfunk/tests/test_sandbox.py new file mode 100644 index 0000000..91b9426 --- /dev/null +++ b/pfunk/tests/test_sandbox.py @@ -0,0 +1,38 @@ +import unittest +import os +from unittest import mock +from importlib import reload +import sys + +from pfunk.fields import ReferenceField, ManyToManyField + +env_vars = { + 'USER_COLLECTION': 'Newuser', + 'GROUP_COLLECTION': 'Newgroup', + 'USER_COLLECTION_DIR': 'pfunk.tests.test_custom_user_group_group_perms.Newuser', + 'GROUP_COLLECTION_DIR': 'pfunk.tests.test_custom_user_group_group_perms.Newgroup' +} + + +class TestReloadModule(unittest.TestCase): + + def test_reload_pfunk(self): + import pfunk + ug = pfunk.contrib.auth.collections.UserGroups + for k,v in ug._base_properties.items(): + print(f'K: {k}, V: {dir(v)}\n') + print(f'') + + # mock.patch.dict(os.environ, env_vars) + # pfunk = reload(pfunk) + # # del sys.modules['pfunk'] + # # for x in sys.modules: + # # if 'pfunk' in x: + # # del x + # # del pfunk + + + # ug = pfunk.contrib.auth.collections.UserGroups + # for k,v in ug._base_properties.items(): + # print(f'K: {k}, V: {v.get_graphql_type()}\n') + # print(f'') \ No newline at end of file diff --git a/pfunk/tests/unittest_keys.py b/pfunk/tests/unittest_keys.py new file mode 100644 index 0000000..6458d5d --- /dev/null +++ b/pfunk/tests/unittest_keys.py @@ -0,0 +1,2 @@ + +KEYS = {'62be9435-dcfd-4b17-902c-c658ad3c1a3d': {'signature_key': 'EjrvrIfA48q1QzwORDFSOYr4pC_wmb4otQXVkrfC9N0=', 'payload_key': 't8D_JPHVZdxLv3gJJlXkV4qNe0Tu8k47rxzUPyEI6Wo=', 'kid': '62be9435-dcfd-4b17-902c-c658ad3c1a3d'}, 'cc89cd63-dbec-4583-b447-282089fb3226': {'signature_key': '3vDXNhE2ulCYC7sh1k9H33U61Eeze-WOsEH__mSOzU4=', 'payload_key': 'YukAkmvpjTwnSl5EI7jry9hcb74OTgp2vS9edYhDpzA=', 'kid': 'cc89cd63-dbec-4583-b447-282089fb3226'}, '490ada85-0ad2-43ad-a6ae-71a59b45e9b1': {'signature_key': '2x3_9dftvlYqEtx3A_RkCwhxlYumdbaUhYM_UJXOmUY=', 'payload_key': 'iy2QIeSdzXByke1pQiHIdLFcChxYJBpp2xt1Z1Y-QI4=', 'kid': '490ada85-0ad2-43ad-a6ae-71a59b45e9b1'}, 'f31a2030-53f4-497f-89a3-ae89cb7465d8': {'signature_key': 'ldUYldicrQ4vHsueYG76YTjsPheV3i8YtpnWQMt60ac=', 'payload_key': '6D-5qwF3qanco2DOoy89q5H741VslLZ6HsdFI7fLQ-s=', 'kid': 'f31a2030-53f4-497f-89a3-ae89cb7465d8'}, 'a2fdd59c-603e-4da1-991a-bdde170a5d6f': {'signature_key': 'LSP9hYwl9Ys5pCzJJ--sDTOyCZij16UPOH9Wf5jAQVo=', 'payload_key': '_dUk0D3p-Ygxyu6SgeUM-yfd-ed-Fyd-t6MikDx9FuQ=', 'kid': 'a2fdd59c-603e-4da1-991a-bdde170a5d6f'}, '3a8cd167-8ae1-4570-96cb-dc876dec4085': {'signature_key': 'EZ35xa4rlf26AKI0-rjhNmAfeqtlSzMR1TD3Q5-1LL8=', 'payload_key': 'm2rXuWNU5uaDmCsP9t0usj3QYMLluR11s_wQz3S34wg=', 'kid': '3a8cd167-8ae1-4570-96cb-dc876dec4085'}, '7beb1a45-2c7d-4d92-a348-538e106c774d': {'signature_key': '52RZafI4c3k-RGYpN_0tWIyrnNFESlxZJDezecGCm3A=', 'payload_key': 'VZynSo0lR2abNH-Dn1EJr_rgLBJCkZNMF0sxmFq6emM=', 'kid': '7beb1a45-2c7d-4d92-a348-538e106c774d'}, '5429e083-7951-40cc-8b52-474e8103df0c': {'signature_key': 'p2K6-AcfExgybkBX1SDeRDO7Z3EXXN7c78tSwiEWI2w=', 'payload_key': 'Mhy6lWX7Ax3nctQZGVDsI9HhjkbECD3bCTuQrpodqRQ=', 'kid': '5429e083-7951-40cc-8b52-474e8103df0c'}, '29e5666f-b5eb-4cad-8010-f097bd732fa6': {'signature_key': '1eb5kJZapr30y78HYciVDoKPWERVIjE9uFDui4UdJYQ=', 'payload_key': 'iOAss1h8Y0LtFSAAkFR5xSUiSRriZqAjExHQIX539AY=', 'kid': '29e5666f-b5eb-4cad-8010-f097bd732fa6'}, '0c0c822d-3784-4158-a101-fd3fee1d2844': {'signature_key': 'FM0YbWTsiay-tK0vuptbNJxkvzHojOQNa-NqDl0howk=', 'payload_key': 'mqBX0y5skZmBvL1GtZk28Y4161XxUNyE8FJ0Ejs0wuU=', 'kid': '0c0c822d-3784-4158-a101-fd3fee1d2844'}} \ No newline at end of file From b4a1bc62b50cf08bab256b54dc735657d9539164 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 23 Jan 2023 15:25:44 +0800 Subject: [PATCH 199/214] Refactored tests to enable use of optional custom user and group in unittests --- pfunk/collection.py | 1 - pfunk/contrib/auth/collections.py | 1 + pfunk/project.py | 2 - .../test_custom_user_group_group_perms.py | 97 +++++++++---------- pfunk/tests/test_custom_user_group_m2m.py | 14 ++- .../test_custom_user_group_users_perms.py | 18 +++- pfunk/tests/unittest_keys.py | 2 - 7 files changed, 71 insertions(+), 64 deletions(-) delete mode 100644 pfunk/tests/unittest_keys.py diff --git a/pfunk/collection.py b/pfunk/collection.py index 2929b77..956229e 100644 --- a/pfunk/collection.py +++ b/pfunk/collection.py @@ -154,7 +154,6 @@ def get_group_field(self) -> str: """ fields = self._base_properties.items() - # TODO: fix not being able to acquire self.group_collection properly and taking env default -> Group instead group_class = self.group_collection or env('GROUP_COLLECTION', 'Group') group_field = None for k, v in fields: diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index 80c9c2a..b7c9f2c 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -423,6 +423,7 @@ class Group(BaseGroup): class User(ExtendedUser): """ A default user that already has predefined M2M relationship with `pfunk.contrib.auth.collections.Group` """ + user_group_class = import_util('pfunk.contrib.auth.collections.UserGroups') group_class = import_util('pfunk.contrib.auth.collections.Group') groups = ManyToManyField( 'pfunk.contrib.auth.collections.Group', 'users_groups') diff --git a/pfunk/project.py b/pfunk/project.py index 660b927..dd44c8d 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -193,8 +193,6 @@ def publish(self, mode: str = 'merge') -> int: """ gql_io = BytesIO(self.render().encode()) - print(f'\n\nPUBLISHING PROJ...\n\n') - print(f'\n\n{self.render()}\n\n') if self.client: secret = self.client.secret diff --git a/pfunk/tests/test_custom_user_group_group_perms.py b/pfunk/tests/test_custom_user_group_group_perms.py index 858bc9c..179a0e4 100644 --- a/pfunk/tests/test_custom_user_group_group_perms.py +++ b/pfunk/tests/test_custom_user_group_group_perms.py @@ -1,14 +1,10 @@ from valley.utils import import_util -from pprint import pprint as p -from unittest import mock -from envs import env -from importlib import reload from pfunk.contrib.auth.key import PermissionGroup from pfunk.testcase import APITestCase from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, BaseUserGroup as ug -from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField -from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField +from pfunk import Collection, StringField, ReferenceField, ManyToManyField +from pfunk.fields import ManyToManyField, StringField from pfunk.contrib.auth.resources import GenericGroupBasedRole @@ -38,6 +34,7 @@ def __unicode__(self): class Newuser(ExtendedUser): + group_collection = 'Newgroup' user_group_class = import_util('pfunk.tests.test_custom_user_group_group_perms.UserGroups') group_class = import_util('pfunk.tests.test_custom_user_group_group_perms.Newgroup') groups = ManyToManyField( @@ -67,47 +64,47 @@ def test_read(self): self.assertTrue(res.status_code, 200) self.assertEqual("test_blog", res.json['data']['data']['title']) - # def test_read_all(self): - # res = self.c.get(f'/json/blog/list/', - # headers={ - # "Authorization": self.token}) - # self.assertTrue(res.status_code, 200) - - # def test_create(self): - # self.assertNotIn("new blog", [ - # blog.title for blog in Blog.all()]) - # res = self.c.post('/json/blog/create/', - # json={ - # "title": "new blog", - # "content": "I created a new blog.", - # "group": self.group.ref.id()}, - # headers={ - # "Authorization": self.token}) - # self.assertTrue(res.status_code, 200) - # self.assertIn("new blog", [ - # blog.title for blog in Blog.all()]) - - # def test_update(self): - # self.assertNotIn("the updated street somewhere", [ - # house.address for house in Blog.all()]) - # res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', - # json={ - # "title": "updated blog", - # "content": "I updated my blog."}, - # headers={ - # "Authorization": self.token}) - - # self.assertTrue(res.status_code, 200) - # self.assertIn("updated blog", [ - # blog.title for blog in Blog.all()]) - - # def test_delete(self): - # res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', - # headers={ - # "Authorization": self.token, - # "Content-Type": "application/json" - # }) - - # self.assertTrue(res.status_code, 200) - # self.assertNotIn("test_blog", [ - # blog.title for blog in Blog.all()]) + def test_read_all(self): + res = self.c.get(f'/json/blog/list/', + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) + + def test_create(self): + self.assertNotIn("new blog", [ + blog.title for blog in Blog.all()]) + res = self.c.post('/json/blog/create/', + json={ + "title": "new blog", + "content": "I created a new blog.", + "group": self.group.ref.id()}, + headers={ + "Authorization": self.token}) + self.assertTrue(res.status_code, 200) + self.assertIn("new blog", [ + blog.title for blog in Blog.all()]) + + def test_update(self): + self.assertNotIn("the updated street somewhere", [ + house.address for house in Blog.all()]) + res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', + json={ + "title": "updated blog", + "content": "I updated my blog."}, + headers={ + "Authorization": self.token}) + + self.assertTrue(res.status_code, 200) + self.assertIn("updated blog", [ + blog.title for blog in Blog.all()]) + + def test_delete(self): + res = self.c.delete(f'/json/blog/delete/{self.blog.ref.id()}/', + headers={ + "Authorization": self.token, + "Content-Type": "application/json" + }) + + self.assertTrue(res.status_code, 200) + self.assertNotIn("test_blog", [ + blog.title for blog in Blog.all()]) diff --git a/pfunk/tests/test_custom_user_group_m2m.py b/pfunk/tests/test_custom_user_group_m2m.py index a92663e..379f8bc 100644 --- a/pfunk/tests/test_custom_user_group_m2m.py +++ b/pfunk/tests/test_custom_user_group_m2m.py @@ -2,13 +2,17 @@ import os from valley.utils import import_util -from pprint import pprint as p -from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, UserGroups +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, BaseUserGroup as ug from pfunk.testcase import APITestCase -from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField -from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField -from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole, GenericUserBasedRoleM2M +from pfunk import Collection, StringField, ReferenceField, ManyToManyField +from pfunk.fields import ManyToManyField, StringField +from pfunk.contrib.auth.resources import GenericUserBasedRoleM2M + + +class UserGroups(ug): + userID = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newuser') + groupID = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newgroup') class Newgroup(BaseGroup): diff --git a/pfunk/tests/test_custom_user_group_users_perms.py b/pfunk/tests/test_custom_user_group_users_perms.py index fb2c58c..ddc60b5 100644 --- a/pfunk/tests/test_custom_user_group_users_perms.py +++ b/pfunk/tests/test_custom_user_group_users_perms.py @@ -4,11 +4,16 @@ from valley.utils import import_util from pprint import pprint as p -from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, UserGroups +from pfunk.contrib.auth.collections import BaseGroup , ExtendedUser, BaseUserGroup as ug from pfunk.testcase import APITestCase -from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField -from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField, ListField -from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole +from pfunk import Collection, StringField, ReferenceField, ManyToManyField +from pfunk.fields import ManyToManyField, StringField +from pfunk.contrib.auth.resources import GenericUserBasedRole + + +class UserGroups(ug): + userID = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newuser') + groupID = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newgroup') class Newgroup(BaseGroup): @@ -17,6 +22,7 @@ class Newgroup(BaseGroup): class Newuser(ExtendedUser): + group_collection = 'Newgroup' user_group_class = import_util('pfunk.tests.test_custom_user_group_users_perms.UserGroups') group_class = import_util('pfunk.tests.test_custom_user_group_users_perms.Newgroup') groups = ManyToManyField( @@ -26,6 +32,10 @@ class Newuser(ExtendedUser): class Blog(Collection): + user_collection = 'Newuser' + group_collection = 'Newgroup' + user_collection_dir = 'pfunk.tests.test_custom_user_group_users_perms.Newuser' + group_collection_dir = 'pfunk.tests.test_custom_user_group_users_perms.Newgroup' collection_roles = [GenericUserBasedRole] title = StringField(required=True) content = StringField(required=True) diff --git a/pfunk/tests/unittest_keys.py b/pfunk/tests/unittest_keys.py deleted file mode 100644 index 6458d5d..0000000 --- a/pfunk/tests/unittest_keys.py +++ /dev/null @@ -1,2 +0,0 @@ - -KEYS = {'62be9435-dcfd-4b17-902c-c658ad3c1a3d': {'signature_key': 'EjrvrIfA48q1QzwORDFSOYr4pC_wmb4otQXVkrfC9N0=', 'payload_key': 't8D_JPHVZdxLv3gJJlXkV4qNe0Tu8k47rxzUPyEI6Wo=', 'kid': '62be9435-dcfd-4b17-902c-c658ad3c1a3d'}, 'cc89cd63-dbec-4583-b447-282089fb3226': {'signature_key': '3vDXNhE2ulCYC7sh1k9H33U61Eeze-WOsEH__mSOzU4=', 'payload_key': 'YukAkmvpjTwnSl5EI7jry9hcb74OTgp2vS9edYhDpzA=', 'kid': 'cc89cd63-dbec-4583-b447-282089fb3226'}, '490ada85-0ad2-43ad-a6ae-71a59b45e9b1': {'signature_key': '2x3_9dftvlYqEtx3A_RkCwhxlYumdbaUhYM_UJXOmUY=', 'payload_key': 'iy2QIeSdzXByke1pQiHIdLFcChxYJBpp2xt1Z1Y-QI4=', 'kid': '490ada85-0ad2-43ad-a6ae-71a59b45e9b1'}, 'f31a2030-53f4-497f-89a3-ae89cb7465d8': {'signature_key': 'ldUYldicrQ4vHsueYG76YTjsPheV3i8YtpnWQMt60ac=', 'payload_key': '6D-5qwF3qanco2DOoy89q5H741VslLZ6HsdFI7fLQ-s=', 'kid': 'f31a2030-53f4-497f-89a3-ae89cb7465d8'}, 'a2fdd59c-603e-4da1-991a-bdde170a5d6f': {'signature_key': 'LSP9hYwl9Ys5pCzJJ--sDTOyCZij16UPOH9Wf5jAQVo=', 'payload_key': '_dUk0D3p-Ygxyu6SgeUM-yfd-ed-Fyd-t6MikDx9FuQ=', 'kid': 'a2fdd59c-603e-4da1-991a-bdde170a5d6f'}, '3a8cd167-8ae1-4570-96cb-dc876dec4085': {'signature_key': 'EZ35xa4rlf26AKI0-rjhNmAfeqtlSzMR1TD3Q5-1LL8=', 'payload_key': 'm2rXuWNU5uaDmCsP9t0usj3QYMLluR11s_wQz3S34wg=', 'kid': '3a8cd167-8ae1-4570-96cb-dc876dec4085'}, '7beb1a45-2c7d-4d92-a348-538e106c774d': {'signature_key': '52RZafI4c3k-RGYpN_0tWIyrnNFESlxZJDezecGCm3A=', 'payload_key': 'VZynSo0lR2abNH-Dn1EJr_rgLBJCkZNMF0sxmFq6emM=', 'kid': '7beb1a45-2c7d-4d92-a348-538e106c774d'}, '5429e083-7951-40cc-8b52-474e8103df0c': {'signature_key': 'p2K6-AcfExgybkBX1SDeRDO7Z3EXXN7c78tSwiEWI2w=', 'payload_key': 'Mhy6lWX7Ax3nctQZGVDsI9HhjkbECD3bCTuQrpodqRQ=', 'kid': '5429e083-7951-40cc-8b52-474e8103df0c'}, '29e5666f-b5eb-4cad-8010-f097bd732fa6': {'signature_key': '1eb5kJZapr30y78HYciVDoKPWERVIjE9uFDui4UdJYQ=', 'payload_key': 'iOAss1h8Y0LtFSAAkFR5xSUiSRriZqAjExHQIX539AY=', 'kid': '29e5666f-b5eb-4cad-8010-f097bd732fa6'}, '0c0c822d-3784-4158-a101-fd3fee1d2844': {'signature_key': 'FM0YbWTsiay-tK0vuptbNJxkvzHojOQNa-NqDl0howk=', 'payload_key': 'mqBX0y5skZmBvL1GtZk28Y4161XxUNyE8FJ0Ejs0wuU=', 'kid': '0c0c822d-3784-4158-a101-fd3fee1d2844'}} \ No newline at end of file From 98fd5b5a5db4438b26bfd2563e6bb2e47efd33c5 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 24 Jan 2023 14:58:13 +0800 Subject: [PATCH 200/214] Updated faunadb module. Removed unnecessary tests. --- pfunk/tests/test_sandbox.py | 38 ------------------------------------- poetry.lock | 12 ++++++------ 2 files changed, 6 insertions(+), 44 deletions(-) delete mode 100644 pfunk/tests/test_sandbox.py diff --git a/pfunk/tests/test_sandbox.py b/pfunk/tests/test_sandbox.py deleted file mode 100644 index 91b9426..0000000 --- a/pfunk/tests/test_sandbox.py +++ /dev/null @@ -1,38 +0,0 @@ -import unittest -import os -from unittest import mock -from importlib import reload -import sys - -from pfunk.fields import ReferenceField, ManyToManyField - -env_vars = { - 'USER_COLLECTION': 'Newuser', - 'GROUP_COLLECTION': 'Newgroup', - 'USER_COLLECTION_DIR': 'pfunk.tests.test_custom_user_group_group_perms.Newuser', - 'GROUP_COLLECTION_DIR': 'pfunk.tests.test_custom_user_group_group_perms.Newgroup' -} - - -class TestReloadModule(unittest.TestCase): - - def test_reload_pfunk(self): - import pfunk - ug = pfunk.contrib.auth.collections.UserGroups - for k,v in ug._base_properties.items(): - print(f'K: {k}, V: {dir(v)}\n') - print(f'') - - # mock.patch.dict(os.environ, env_vars) - # pfunk = reload(pfunk) - # # del sys.modules['pfunk'] - # # for x in sys.modules: - # # if 'pfunk' in x: - # # del x - # # del pfunk - - - # ug = pfunk.contrib.auth.collections.UserGroups - # for k,v in ug._base_properties.items(): - # print(f'K: {k}, V: {v.get_graphql_type()}\n') - # print(f'') \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 43b717f..e27ee02 100644 --- a/poetry.lock +++ b/poetry.lock @@ -289,7 +289,7 @@ optional = false python-versions = ">=3.6,<4.0" [package.extras] -cli = ["terminaltables[cli] (>=3.1.10,<4.0.0)", "Jinja2[cli] (>=3.0.3,<4.0.0)", "click[cli] (>=8.0.3,<9.0.0)"] +cli = ["click[cli] (>=8.0.3,<9.0.0)", "Jinja2[cli] (>=3.0.3,<4.0.0)", "terminaltables[cli] (>=3.1.10,<4.0.0)"] [[package]] name = "executing" @@ -781,8 +781,8 @@ nest-asyncio = "*" traitlets = ">=5.2.2" [package.extras] -test = ["xmltodict", "twine (>=1.11.0)", "testpath", "setuptools (>=60.0)", "pytest-cov (>=2.6.1)", "pytest-asyncio", "pytest (>=4.1)", "pre-commit", "pip (>=18.1)", "nbconvert", "mypy", "ipywidgets", "ipython", "ipykernel", "flake8", "check-manifest", "black"] -sphinx = ["sphinx-book-theme", "Sphinx (>=1.7)", "myst-parser", "moto", "mock", "autodoc-traits"] +sphinx = ["autodoc-traits", "mock", "moto", "myst-parser", "Sphinx (>=1.7)", "sphinx-book-theme"] +test = ["black", "check-manifest", "flake8", "ipykernel", "ipython (<8.0.0)", "ipywidgets (<8.0.0)", "mypy", "pip (>=18.1)", "pre-commit", "pytest (>=4.1)", "pytest-asyncio", "pytest-cov (>=2.6.1)", "setuptools (>=60.0)", "testpath", "twine (>=1.11.0)", "xmltodict"] [[package]] name = "nbconvert" @@ -1399,7 +1399,7 @@ executing = "*" pure-eval = "*" [package.extras] -tests = ["cython", "littleutils", "pygments", "typeguard", "pytest"] +tests = ["pytest", "typeguard", "pygments", "littleutils", "cython"] [[package]] name = "stripe" @@ -1452,8 +1452,8 @@ python-versions = ">=3.7" webencodings = ">=0.4" [package.extras] -test = ["flake8", "isort", "pytest"] -doc = ["sphinx-rtd-theme", "sphinx"] +doc = ["sphinx", "sphinx-rtd-theme"] +test = ["pytest", "pytest-cov", "pytest-flake8", "pytest-isort", "coverage"] [[package]] name = "tornado" From a52ab9cfe38af32258029217e7a9973865bb6646 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Thu, 26 Jan 2023 16:27:42 +0800 Subject: [PATCH 201/214] Refactored m2m custom user and group tests. Refactored names of custom user and group tests --- pfunk/contrib/auth/collections.py | 2 +- ...test_web_custom_user_group_group_perms.py} | 18 ++++++------- ...m.py => test_web_custom_user_group_m2m.py} | 25 ++++++++++--------- ...test_web_custom_user_group_users_perms.py} | 24 ++++++++---------- 4 files changed, 33 insertions(+), 36 deletions(-) rename pfunk/tests/{test_custom_user_group_group_perms.py => test_web_custom_user_group_group_perms.py} (82%) rename pfunk/tests/{test_custom_user_group_m2m.py => test_web_custom_user_group_m2m.py} (80%) rename pfunk/tests/{test_custom_user_group_users_perms.py => test_web_custom_user_group_users_perms.py} (76%) diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index b7c9f2c..fec6f3a 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -331,7 +331,7 @@ def get_permissions(cls, ref, _token=None): def get_groups(self, _token=None): """ Returns the groups (collections) that the user is bound with """ - if not self.group_class: + if not self.group_class or not self.user_group_class: raise NotImplementedError group_class_field = self.get_group_field() user_class = self.__class__.__name__.lower() diff --git a/pfunk/tests/test_custom_user_group_group_perms.py b/pfunk/tests/test_web_custom_user_group_group_perms.py similarity index 82% rename from pfunk/tests/test_custom_user_group_group_perms.py rename to pfunk/tests/test_web_custom_user_group_group_perms.py index 179a0e4..38274e5 100644 --- a/pfunk/tests/test_custom_user_group_group_perms.py +++ b/pfunk/tests/test_web_custom_user_group_group_perms.py @@ -9,24 +9,24 @@ class UserGroups(ug): - userID = ReferenceField('pfunk.tests.test_custom_user_group_group_perms.Newuser') - groupID = ReferenceField('pfunk.tests.test_custom_user_group_group_perms.Newgroup') + userID = ReferenceField('pfunk.tests.test_web_custom_user_group_group_perms.Newuser') + groupID = ReferenceField('pfunk.tests.test_web_custom_user_group_group_perms.Newgroup') class Newgroup(BaseGroup): - users = ManyToManyField('pfunk.tests.test_custom_user_group_group_perms.Newuser', + users = ManyToManyField('pfunk.tests.test_web_custom_user_group_group_perms.Newuser', relation_name='custom_users_groups') class Blog(Collection): user_collection = 'Newuser' group_collection = 'Newgroup' - user_collection_dir = 'pfunk.tests.test_custom_user_group_group_perms.Newuser' - group_collection_dir = 'pfunk.tests.test_custom_user_group_group_perms.Newgroup' + user_collection_dir = 'pfunk.tests.test_web_custom_user_group_group_perms.Newuser' + group_collection_dir = 'pfunk.tests.test_web_custom_user_group_group_perms.Newgroup' collection_roles = [GenericGroupBasedRole] title = StringField(required=True) content = StringField(required=True) - group = ReferenceField('pfunk.tests.test_custom_user_group_group_perms.Newgroup', + group = ReferenceField('pfunk.tests.test_web_custom_user_group_group_perms.Newgroup', relation_name='newgroup_blogs') def __unicode__(self): @@ -35,10 +35,10 @@ def __unicode__(self): class Newuser(ExtendedUser): group_collection = 'Newgroup' - user_group_class = import_util('pfunk.tests.test_custom_user_group_group_perms.UserGroups') - group_class = import_util('pfunk.tests.test_custom_user_group_group_perms.Newgroup') + user_group_class = import_util('pfunk.tests.test_web_custom_user_group_group_perms.UserGroups') + group_class = import_util('pfunk.tests.test_web_custom_user_group_group_perms.Newgroup') groups = ManyToManyField( - 'pfunk.tests.test_custom_user_group_group_perms.Newgroup', relation_name='custom_users_groups') + 'pfunk.tests.test_web_custom_user_group_group_perms.Newgroup', relation_name='custom_users_groups') class TestCustomGroupBasedPerms(APITestCase): diff --git a/pfunk/tests/test_custom_user_group_m2m.py b/pfunk/tests/test_web_custom_user_group_m2m.py similarity index 80% rename from pfunk/tests/test_custom_user_group_m2m.py rename to pfunk/tests/test_web_custom_user_group_m2m.py index 379f8bc..3f86e4a 100644 --- a/pfunk/tests/test_custom_user_group_m2m.py +++ b/pfunk/tests/test_web_custom_user_group_m2m.py @@ -11,29 +11,34 @@ class UserGroups(ug): - userID = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newuser') - groupID = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newgroup') + userID = ReferenceField('pfunk.tests.test_web_custom_user_group_m2m.Newuser') + groupID = ReferenceField('pfunk.tests.test_web_custom_user_group_m2m.Newgroup') class Newgroup(BaseGroup): - users = ManyToManyField('pfunk.tests.test_custom_user_group_m2m.Newuser', + users = ManyToManyField('pfunk.tests.test_web_custom_user_group_m2m.Newuser', relation_name='custom_users_groups') class Newuser(ExtendedUser): - user_group_class = import_util('pfunk.tests.test_custom_user_group_m2m.UserGroups') - group_class = import_util('pfunk.tests.test_custom_user_group_m2m.Newgroup') + group_collection = 'Newgroup' + user_group_class = import_util('pfunk.tests.test_web_custom_user_group_m2m.UserGroups') + group_class = import_util('pfunk.tests.test_web_custom_user_group_m2m.Newgroup') groups = ManyToManyField( - 'pfunk.tests.test_custom_user_group_m2m.Newgroup', relation_name='custom_users_groups') - blogs = ManyToManyField('pfunk.tests.test_custom_user_group_m2m.Blog', + 'pfunk.tests.test_web_custom_user_group_m2m.Newgroup', relation_name='custom_users_groups') + blogs = ManyToManyField('pfunk.tests.test_web_custom_user_group_m2m.Blog', relation_name='users_blogs') class Blog(Collection): + user_collection = 'Newuser' + group_collection = 'Newgroup' + user_collection_dir = 'pfunk.tests.test_web_custom_user_group_m2m.Newuser' + group_collection_dir = 'pfunk.tests.test_web_custom_user_group_m2m.Newgroup' collection_roles = [GenericUserBasedRoleM2M] title = StringField(required=True) content = StringField(required=True) - users = ManyToManyField('pfunk.tests.test_custom_user_group_m2m.Newuser', + users = ManyToManyField('pfunk.tests.test_web_custom_user_group_m2m.Newuser', relation_name='users_blogs') def __unicode__(self): @@ -45,10 +50,6 @@ class TestCustomUserM2M(APITestCase): collections = [Newuser, Newgroup, UserGroups, Blog] def setUp(self) -> None: - os.environ['USER_COLLECTION'] = 'Newuser' - os.environ['GROUP_COLLECTION'] = 'Newgroup' - os.environ['USER_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_m2m.Newuser' - os.environ['GROUP_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_m2m.Newgroup' super().setUp() self.group = Newgroup.create(name='Power Users', slug='power-users') self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', diff --git a/pfunk/tests/test_custom_user_group_users_perms.py b/pfunk/tests/test_web_custom_user_group_users_perms.py similarity index 76% rename from pfunk/tests/test_custom_user_group_users_perms.py rename to pfunk/tests/test_web_custom_user_group_users_perms.py index ddc60b5..a1b7b0c 100644 --- a/pfunk/tests/test_custom_user_group_users_perms.py +++ b/pfunk/tests/test_web_custom_user_group_users_perms.py @@ -12,34 +12,34 @@ class UserGroups(ug): - userID = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newuser') - groupID = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newgroup') + userID = ReferenceField('pfunk.tests.test_web_custom_user_group_users_perms.Newuser') + groupID = ReferenceField('pfunk.tests.test_web_custom_user_group_users_perms.Newgroup') class Newgroup(BaseGroup): - users = ManyToManyField('pfunk.tests.test_custom_user_group_users_perms.Newuser', + users = ManyToManyField('pfunk.tests.test_web_custom_user_group_users_perms.Newuser', relation_name='custom_users_groups') class Newuser(ExtendedUser): group_collection = 'Newgroup' - user_group_class = import_util('pfunk.tests.test_custom_user_group_users_perms.UserGroups') - group_class = import_util('pfunk.tests.test_custom_user_group_users_perms.Newgroup') + user_group_class = import_util('pfunk.tests.test_web_custom_user_group_users_perms.UserGroups') + group_class = import_util('pfunk.tests.test_web_custom_user_group_users_perms.Newgroup') groups = ManyToManyField( - 'pfunk.tests.test_custom_user_group_users_perms.Newgroup', relation_name='custom_users_groups') - blogs = ManyToManyField('pfunk.tests.test_custom_user_group_users_perms.Blog', + 'pfunk.tests.test_web_custom_user_group_users_perms.Newgroup', relation_name='custom_users_groups') + blogs = ManyToManyField('pfunk.tests.test_web_custom_user_group_users_perms.Blog', relation_name='users_blogs') class Blog(Collection): user_collection = 'Newuser' group_collection = 'Newgroup' - user_collection_dir = 'pfunk.tests.test_custom_user_group_users_perms.Newuser' - group_collection_dir = 'pfunk.tests.test_custom_user_group_users_perms.Newgroup' + user_collection_dir = 'pfunk.tests.test_web_custom_user_group_users_perms.Newuser' + group_collection_dir = 'pfunk.tests.test_web_custom_user_group_users_perms.Newgroup' collection_roles = [GenericUserBasedRole] title = StringField(required=True) content = StringField(required=True) - user = ReferenceField('pfunk.tests.test_custom_user_group_users_perms.Newuser', + user = ReferenceField('pfunk.tests.test_web_custom_user_group_users_perms.Newuser', relation_name='users_blogs') def __unicode__(self): @@ -51,10 +51,6 @@ class TestCustomUserBasedPerms(APITestCase): collections = [Newuser, Newgroup, UserGroups, Blog] def setUp(self) -> None: - os.environ['USER_COLLECTION'] = 'Newuser' - os.environ['GROUP_COLLECTION'] = 'Newgroup' - os.environ['USER_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_users_perms.Newuser' - os.environ['GROUP_COLLECTION_DIR'] = 'pfunk.tests.test_custom_user_group_users_perms.Newgroup' super().setUp() self.group = Newgroup.create(name='Power Users', slug='power-users') self.user = Newuser.create(username='test', email='tlasso@example.org', first_name='Ted', From b126606419f793e4e099788aff9c73c54dd7c051 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 1 Mar 2023 21:46:29 +0800 Subject: [PATCH 202/214] updated current branch to reflect latest changes in swagger feature. updated poetry dependencies to be much cleaner --- pfunk/tests/test_project.py | 8 +- pfunk/utils/swagger.py | 4 + pfunk/web/views/json.py | 40 + poetry.lock | 1629 +++++++++++------------------------ pyproject.toml | 8 +- 5 files changed, 560 insertions(+), 1129 deletions(-) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index 2665808..f7e97e0 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -29,10 +29,10 @@ def setUp(self) -> None: def tearDown(self) -> None: os.remove("pfunk.json") - try: - os.remove('swagger.yaml') - except FileNotFoundError: - pass + # try: + # os.remove('swagger.yaml') + # except FileNotFoundError: + # pass def test_add_resource(self): self.project.add_resource(Person) diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index a765760..9e13625 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -3,6 +3,7 @@ import re import swaggyp as sw +from pfunk.web.views.html import HTMLView from pfunk.collection import Collection from pfunk.utils.routing import parse_rule @@ -163,6 +164,9 @@ def get_operations(self, col: Collection): ``` """ for view in col.collection_views: + # We skip HTML Views + if issubclass(view, HTMLView): + continue route = view.url(col) rule = route.rule methods = route.methods diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index a59a34d..632dc3b 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -28,6 +28,46 @@ def get_response(self): headers=self.get_headers() ) + def _payload_docs(self): + """ Used in custom defining payload parameters for the view in Swagger generation. + + Should return a dict that has the fields of a swagger parameter. + If there is an error in the swagger, it will not be raised. + Usage of `https://editor.swagger.io` to validate is recommended + e.g. + ``` + # Defining formdata + {"data": [ + { + "name":"name", + "in":"formData", + "description":"name of the pet", + "required": true, + "type": "string" + }, + { + "name": "status", + "in": "formData", + "description": "status of the pet", + "required":true, + "type":"string" + } + ]} + + # Defining a payload that references a model + {"data": [ + { + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": "#/definitions/Person" + } + ]} + ``` + """ + return {} + def get_req_with_m2m(self, data): """ Returns request with updated params that has the proper m2m entities """ fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') diff --git a/poetry.lock b/poetry.lock index e27ee02..6e82eca 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,8 +1,8 @@ [[package]] name = "anyio" -version = "3.6.1" +version = "3.6.2" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "dev" +category = "main" optional = false python-versions = ">=3.6.2" @@ -13,7 +13,7 @@ sniffio = ">=1.1" [package.extras] doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] -trio = ["trio (>=0.16)"] +trio = ["trio (>=0.16,<0.22)"] [[package]] name = "appnope" @@ -54,9 +54,20 @@ cffi = ">=1.0.1" dev = ["pytest", "cogapp", "pre-commit", "wheel"] tests = ["pytest"] +[[package]] +name = "arrow" +version = "1.2.3" +description = "Better dates & times for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +python-dateutil = ">=2.7.0" + [[package]] name = "asttokens" -version = "2.0.8" +version = "2.2.1" description = "Annotate AST trees with source code positions" category = "dev" optional = false @@ -81,17 +92,19 @@ six = ">=1.6.1,<2.0" [[package]] name = "attrs" -version = "22.1.0" +version = "22.2.0" description = "Classes Without Boilerplate" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"] +cov = ["attrs", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs"] +docs = ["furo", "sphinx", "myst-parser", "zope.interface", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] +tests = ["attrs", "zope.interface"] +tests-no-zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"] +tests_no_zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"] [[package]] name = "backcall" @@ -103,7 +116,7 @@ python-versions = "*" [[package]] name = "beautifulsoup4" -version = "4.11.1" +version = "4.11.2" description = "Screen-scraping library" category = "dev" optional = false @@ -131,14 +144,14 @@ webencodings = "*" [[package]] name = "boto3" -version = "1.24.92" +version = "1.26.81" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.7" [package.dependencies] -botocore = ">=1.27.92,<1.28.0" +botocore = ">=1.29.81,<1.30.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -147,7 +160,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.27.92" +version = "1.29.81" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -159,7 +172,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.14.0)"] +crt = ["awscrt (==0.16.9)"] [[package]] name = "cachetools" @@ -171,7 +184,7 @@ python-versions = "~=3.5" [[package]] name = "certifi" -version = "2022.9.24" +version = "2022.12.7" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false @@ -190,14 +203,11 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "2.1.1" +version = "3.0.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = ">=3.6.0" - -[package.extras] -unicode_backport = ["unicodedata2"] +python-versions = "*" [[package]] name = "click" @@ -212,11 +222,25 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "colorama" -version = "0.4.5" +version = "0.4.6" description = "Cross-platform colored terminal text." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" + +[[package]] +name = "comm" +version = "0.1.2" +description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +traitlets = ">=5.3" + +[package.extras] +test = ["pytest"] [[package]] name = "coverage" @@ -250,7 +274,7 @@ test = ["pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pret [[package]] name = "debugpy" -version = "1.6.3" +version = "1.6.6" description = "An implementation of the Debug Adapter Protocol for Python" category = "dev" optional = false @@ -272,14 +296,6 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -[[package]] -name = "entrypoints" -version = "0.4" -description = "Discover and load entry points from installed packages." -category = "dev" -optional = false -python-versions = ">=3.6" - [[package]] name = "envs" version = "1.4" @@ -293,18 +309,18 @@ cli = ["click[cli] (>=8.0.3,<9.0.0)", "Jinja2[cli] (>=3.0.3,<4.0.0)", "terminalt [[package]] name = "executing" -version = "1.1.1" +version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" category = "dev" optional = false python-versions = "*" [package.extras] -tests = ["rich", "littleutils", "pytest", "asttokens"] +tests = ["asttokens", "pytest", "littleutils", "rich"] [[package]] name = "fastjsonschema" -version = "2.16.2" +version = "2.16.3" description = "Fastest Python implementation of JSON schema" category = "dev" optional = false @@ -315,7 +331,7 @@ devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benc [[package]] name = "faunadb" -version = "4.3.1" +version = "4.5.0" description = "FaunaDB Python driver" category = "main" optional = false @@ -323,7 +339,7 @@ python-versions = "*" [package.dependencies] future = "*" -hyper = "*" +httpx = {version = "*", extras = ["http2"]} iso8601 = "*" requests = "*" @@ -343,9 +359,17 @@ python-versions = ">=3.8,<4.0" Jinja2 = ">=3.1.2,<4.0.0" valley = ">=1.5.8,<2.0.0" +[[package]] +name = "fqdn" +version = "1.5.1" +description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" + [[package]] name = "future" -version = "0.18.2" +version = "0.18.3" description = "Clean single-source support for Python 3 and 2" category = "main" optional = false @@ -362,48 +386,80 @@ python-versions = "*" [package.dependencies] ply = ">=3.6" +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" +optional = false +python-versions = ">=3.7" + [[package]] name = "h2" -version = "2.6.2" +version = "4.1.0" description = "HTTP/2 State-Machine based protocol implementation" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6.1" [package.dependencies] -hpack = ">=2.2,<4" -hyperframe = ">=3.1,<4.0.0 || >4.0.0,<6" +hpack = ">=4.0,<5" +hyperframe = ">=6.0,<7" [[package]] name = "hpack" -version = "3.0.0" +version = "4.0.0" description = "Pure-Python HPACK header compression" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6.1" [[package]] -name = "hyper" -version = "0.7.0" -description = "HTTP/2 Client for Python" +name = "httpcore" +version = "0.16.3" +description = "A minimal low-level HTTP client." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7" + +[package.dependencies] +anyio = ">=3.0,<5.0" +certifi = "*" +h11 = ">=0.13,<0.15" +sniffio = ">=1.0.0,<2.0.0" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + +[[package]] +name = "httpx" +version = "0.23.3" +description = "The next generation HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" [package.dependencies] -h2 = ">=2.4,<3.0" -hyperframe = ">=3.2,<4.0" +certifi = "*" +h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""} +httpcore = ">=0.15.0,<0.17.0" +rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +sniffio = "*" [package.extras] -fast = ["pycohttpparser"] +brotli = ["brotli", "brotlicffi"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "hyperframe" -version = "3.2.0" +version = "6.0.1" description = "HTTP/2 framing layer for Python" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6.1" [[package]] name = "idna" @@ -414,10 +470,10 @@ optional = false python-versions = ">=3.5" [[package]] -name = "importlib-resources" -version = "5.6.0" -description = "Read resources from Python packages" -category = "main" +name = "importlib-metadata" +version = "6.0.0" +description = "Read metadata from Python packages" +category = "dev" optional = false python-versions = ">=3.7" @@ -425,13 +481,13 @@ python-versions = ">=3.7" zipp = ">=0.5" [package.extras] -docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "jaraco.tidelift (>=1.4)"] +docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"] perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8", "importlib-resources (>=1.3)"] [[package]] name = "importlib-resources" -version = "5.10.0" +version = "5.12.0" description = "Read resources from Python packages" category = "main" optional = false @@ -441,36 +497,42 @@ python-versions = ">=3.7" zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "jaraco.tidelift (>=1.4)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"] [[package]] name = "ipykernel" -version = "6.16.0" +version = "6.21.2" description = "IPython Kernel for Jupyter" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" [package.dependencies] appnope = {version = "*", markers = "platform_system == \"Darwin\""} -debugpy = ">=1.0" +comm = ">=0.1.1" +debugpy = ">=1.6.5" ipython = ">=7.23.1" jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" psutil = "*" -pyzmq = ">=17" +pyzmq = ">=20" tornado = ">=6.1" -traitlets = ">=5.1.0" +traitlets = ">=5.4.0" [package.extras] -test = ["flaky", "ipyparallel", "pre-commit", "pytest-cov", "pytest-timeout", "pytest (>=6.0)"] +cov = ["coverage", "curio", "matplotlib", "pytest-cov", "trio"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] +pyqt5 = ["pyqt5"] +pyside6 = ["pyside6"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest-asyncio", "pytest-cov", "pytest-timeout", "pytest (>=7.0)"] [[package]] name = "ipython" -version = "8.5.0" +version = "8.11.0" description = "IPython: Productive Interactive Computing" category = "dev" optional = false @@ -485,15 +547,15 @@ jedi = ">=0.16" matplotlib-inline = "*" pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} pickleshare = "*" -prompt-toolkit = ">3.0.1,<3.1.0" +prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" pygments = ">=2.4.0" stack-data = "*" traitlets = ">=5" [package.extras] -all = ["black", "Sphinx (>=1.3)", "ipykernel", "nbconvert", "nbformat", "ipywidgets", "notebook", "ipyparallel", "qtconsole", "pytest (<7.1)", "pytest-asyncio", "testpath", "curio", "matplotlib (!=3.2.0)", "numpy (>=1.19)", "pandas", "trio"] +all = ["black", "ipykernel", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "docrepr", "matplotlib", "stack-data", "pytest (<7)", "typing-extensions", "pytest (<7.1)", "pytest-asyncio", "testpath", "nbconvert", "nbformat", "ipywidgets", "notebook", "ipyparallel", "qtconsole", "curio", "matplotlib (!=3.2.0)", "numpy (>=1.21)", "pandas", "trio"] black = ["black"] -doc = ["Sphinx (>=1.3)"] +doc = ["ipykernel", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "docrepr", "matplotlib", "stack-data", "pytest (<7)", "typing-extensions", "pytest (<7.1)", "pytest-asyncio", "testpath"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] @@ -501,7 +563,7 @@ notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test_extra = ["pytest (<7.1)", "pytest-asyncio", "testpath", "curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.19)", "pandas", "trio"] +test_extra = ["pytest (<7.1)", "pytest-asyncio", "testpath", "curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "trio"] [[package]] name = "ipython-genutils" @@ -513,7 +575,7 @@ python-versions = "*" [[package]] name = "ipywidgets" -version = "8.0.2" +version = "8.0.4" description = "Jupyter interactive widgets" category = "dev" optional = false @@ -537,9 +599,20 @@ category = "main" optional = false python-versions = ">=3.6.2,<4.0" +[[package]] +name = "isoduration" +version = "20.11.0" +description = "Operations with ISO 8601 durations" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +arrow = ">=0.15.0" + [[package]] name = "jedi" -version = "0.18.1" +version = "0.18.2" description = "An autocompletion tool for Python that can be used for text editors." category = "dev" optional = false @@ -549,8 +622,9 @@ python-versions = ">=3.6" parso = ">=0.8.0,<0.9.0" [package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx-rtd-theme (==0.4.3)", "sphinx (==1.8.5)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<7.0.0)"] +testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] name = "jinja2" @@ -574,9 +648,17 @@ category = "main" optional = false python-versions = ">=3.7" +[[package]] +name = "jsonpointer" +version = "2.3" +description = "Identify specific nodes in a JSON document (RFC 6901)" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + [[package]] name = "jsonschema" -version = "4.16.0" +version = "4.17.3" description = "An implementation of JSON Schema validation for Python" category = "main" optional = false @@ -584,9 +666,17 @@ python-versions = ">=3.7" [package.dependencies] attrs = ">=17.4.0" +fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" +rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} +uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] @@ -610,86 +700,130 @@ qtconsole = "*" [[package]] name = "jupyter-client" -version = "7.4.2" +version = "8.0.3" description = "Jupyter protocol implementation and client libraries" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" [package.dependencies] -entrypoints = "*" -jupyter-core = ">=4.9.2" -nest-asyncio = ">=1.5.4" +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" tornado = ">=6.2" -traitlets = "*" +traitlets = ">=5.3" [package.extras] -doc = ["ipykernel", "myst-parser", "sphinx-rtd-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt"] -test = ["codecov", "coverage", "ipykernel (>=6.5)", "ipython", "mypy", "pre-commit", "pytest", "pytest-asyncio (>=0.18)", "pytest-cov", "pytest-timeout"] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinx (>=4)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["codecov", "coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] [[package]] name = "jupyter-console" -version = "6.4.4" +version = "6.6.2" description = "Jupyter terminal console" category = "dev" optional = false python-versions = ">=3.7" [package.dependencies] -ipykernel = "*" +ipykernel = ">=6.14" ipython = "*" jupyter-client = ">=7.0.0" -prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +prompt-toolkit = ">=3.0.30" pygments = "*" +pyzmq = ">=17" +traitlets = ">=5.4" [package.extras] -test = ["pexpect"] +test = ["flaky", "pexpect", "pytest"] [[package]] name = "jupyter-core" -version = "4.11.1" +version = "5.2.0" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" [package.dependencies] +platformdirs = ">=2.5" pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} -traitlets = "*" +traitlets = ">=5.3" [package.extras] +docs = ["myst-parser", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] +[[package]] +name = "jupyter-events" +version = "0.6.3" +description = "Jupyter Event System library" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +jsonschema = {version = ">=3.2.0", extras = ["format-nongpl"]} +python-json-logger = ">=2.0.4" +pyyaml = ">=5.3" +rfc3339-validator = "*" +rfc3986-validator = ">=0.1.1" +traitlets = ">=5.3" + +[package.extras] +cli = ["click", "rich"] +docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] +test = ["click", "coverage", "pre-commit", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "pytest-cov", "pytest (>=7.0)", "rich"] + [[package]] name = "jupyter-server" -version = "1.21.0" +version = "2.3.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" [package.dependencies] -anyio = ">=3.1.0,<4" +anyio = ">=3.1.0" argon2-cffi = "*" jinja2 = "*" -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.7.0" +jupyter-client = ">=7.4.4" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-events = ">=0.4.0" +jupyter-server-terminals = "*" nbconvert = ">=6.4.4" -nbformat = ">=5.2.0" +nbformat = ">=5.3.0" packaging = "*" prometheus-client = "*" pywinpty = {version = "*", markers = "os_name == \"nt\""} -pyzmq = ">=17" -Send2Trash = "*" +pyzmq = ">=24" +send2trash = "*" terminado = ">=0.8.3" -tornado = ">=6.1.0" -traitlets = ">=5.1" +tornado = ">=6.2.0" +traitlets = ">=5.6.0" websocket-client = "*" [package.extras] -test = ["coverage", "ipykernel", "pre-commit", "pytest-console-scripts", "pytest-cov", "pytest-mock", "pytest-timeout", "pytest-tornasync", "pytest (>=7.0)", "requests"] +docs = ["docutils (<0.20)", "ipykernel", "jinja2", "jupyter-client", "jupyter-server", "mistune (<1.0.0)", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] +test = ["ipykernel", "pre-commit", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "pytest (>=7.0)", "requests"] + +[[package]] +name = "jupyter-server-terminals" +version = "0.4.4" +description = "A Jupyter Server Extension Providing Terminals." +category = "dev" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} +terminado = ">=0.8.3" + +[package.extras] +docs = ["jinja2", "jupyter-server", "mistune (<3.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] +test = ["coverage", "jupyter-server (>=2.0.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout", "pytest (>=7.0)"] [[package]] name = "jupyterlab-pygments" @@ -701,7 +835,7 @@ python-versions = ">=3.7" [[package]] name = "jupyterlab-widgets" -version = "3.0.3" +version = "3.0.5" description = "Jupyter interactive widgets for JupyterLab" category = "dev" optional = false @@ -709,7 +843,7 @@ python-versions = ">=3.7" [[package]] name = "markupsafe" -version = "2.1.1" +version = "2.1.2" description = "Safely add untrusted strings to HTML/XML markup." category = "main" optional = false @@ -728,7 +862,7 @@ traitlets = "*" [[package]] name = "mistune" -version = "2.0.4" +version = "2.0.5" description = "A sane Markdown parser with useful plugins and renderers" category = "dev" optional = false @@ -736,8 +870,8 @@ python-versions = "*" [[package]] name = "nbclassic" -version = "0.4.5" -description = "A web-based notebook environment for interactive computing" +version = "0.5.2" +description = "Jupyter Notebook as a Jupyter Server extension." category = "dev" optional = false python-versions = ">=3.7" @@ -764,29 +898,30 @@ traitlets = ">=4.2.1" [package.extras] docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"] json-logging = ["json-logging"] -test = ["pytest", "coverage", "requests", "testpath", "nbval", "selenium (==4.1.5)", "pytest-cov", "pytest-tornasync", "requests-unixsocket"] +test = ["pytest", "coverage", "requests", "testpath", "nbval", "pytest-playwright", "pytest-cov", "pytest-jupyter", "pytest-tornasync", "requests-unixsocket"] [[package]] name = "nbclient" -version = "0.7.0" +version = "0.7.2" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." category = "dev" optional = false python-versions = ">=3.7.0" [package.dependencies] -jupyter-client = ">=6.1.5" -nbformat = ">=5.0" -nest-asyncio = "*" -traitlets = ">=5.2.2" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +nbformat = ">=5.1" +traitlets = ">=5.3" [package.extras] -sphinx = ["autodoc-traits", "mock", "moto", "myst-parser", "Sphinx (>=1.7)", "sphinx-book-theme"] -test = ["black", "check-manifest", "flake8", "ipykernel", "ipython (<8.0.0)", "ipywidgets (<8.0.0)", "mypy", "pip (>=18.1)", "pre-commit", "pytest (>=4.1)", "pytest-asyncio", "pytest-cov (>=2.6.1)", "setuptools (>=60.0)", "testpath", "twine (>=1.11.0)", "xmltodict"] +dev = ["pre-commit"] +docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient", "sphinx-book-theme", "sphinx (>=1.7)"] +test = ["ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "pytest (>=7.0)", "testpath", "xmltodict"] [[package]] name = "nbconvert" -version = "7.2.1" +version = "7.2.9" description = "Converting Jupyter Notebooks" category = "dev" optional = false @@ -811,17 +946,17 @@ tinycss2 = "*" traitlets = ">=5.0" [package.extras] -all = ["ipykernel", "ipython", "ipywidgets (>=7)", "myst-parser", "nbsphinx (>=0.2.12)", "pre-commit", "pyppeteer (>=1,<1.1)", "pyqtwebengine (>=5.15)", "pytest", "pytest-cov", "pytest-dependency", "sphinx-rtd-theme", "sphinx (==5.0.2)", "tornado (>=6.1)"] -docs = ["ipython", "myst-parser", "nbsphinx (>=0.2.12)", "sphinx-rtd-theme", "sphinx (==5.0.2)"] -qtpdf = ["pyqtwebengine (>=5.15)"] +all = ["nbconvert"] +docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] +qtpdf = ["nbconvert"] qtpng = ["pyqtwebengine (>=5.15)"] serve = ["tornado (>=6.1)"] -test = ["ipykernel", "ipywidgets (>=7)", "pre-commit", "pyppeteer (>=1,<1.1)", "pytest", "pytest-cov", "pytest-dependency"] +test = ["ipykernel", "ipywidgets (>=7)", "pre-commit", "pytest", "pytest-dependency"] webpdf = ["pyppeteer (>=1,<1.1)"] [[package]] name = "nbformat" -version = "5.7.0" +version = "5.7.3" description = "The Jupyter Notebook format" category = "dev" optional = false @@ -834,7 +969,8 @@ jupyter-core = "*" traitlets = ">=5.1" [package.extras] -test = ["check-manifest", "pep440", "pre-commit", "pytest", "testpath"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["pep440", "pre-commit", "pytest", "testpath"] [[package]] name = "nest-asyncio" @@ -846,7 +982,7 @@ python-versions = ">=3.5" [[package]] name = "notebook" -version = "6.5.1" +version = "6.5.2" description = "A web-based notebook environment for interactive computing" category = "dev" optional = false @@ -859,7 +995,7 @@ ipython-genutils = "*" jinja2 = "*" jupyter-client = ">=5.3.4" jupyter-core = ">=4.6.1" -nbclassic = "0.4.5" +nbclassic = ">=0.4.7" nbconvert = ">=5" nbformat = "*" nest-asyncio = ">=1.5" @@ -877,7 +1013,7 @@ test = ["pytest", "coverage", "requests", "testpath", "nbval", "selenium (==4.1. [[package]] name = "notebook-shim" -version = "0.2.0" +version = "0.2.2" description = "A shim layer for notebook traits and config" category = "dev" optional = false @@ -887,103 +1023,7 @@ python-versions = ">=3.7" jupyter-server = ">=1.8,<3" [package.extras] -test = ["pytest-tornasync", "pytest-console-scripts", "pytest"] - -[[package]] -name = "openapi-schema-validator" -version = "0.2.3" -description = "OpenAPI schema validation for Python" -category = "main" -optional = false -python-versions = ">=3.7.0,<4.0.0" - -[package.dependencies] -jsonschema = ">=3.0.0,<5.0.0" - -[package.extras] -rfc3339-validator = ["rfc3339-validator"] -strict-rfc3339 = ["strict-rfc3339"] -isodate = ["isodate"] - -[[package]] -name = "openapi-spec-validator" -version = "0.4.0" -description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3.0 spec validator" -category = "main" -optional = false -python-versions = ">=3.7.0,<4.0.0" - -[package.dependencies] -jsonschema = ">=3.2.0,<5.0.0" -openapi-schema-validator = ">=0.2.0,<0.3.0" -PyYAML = ">=5.1" - -[package.extras] -requests = ["requests"] - -[[package]] -name = "openapi-schema-validator" -version = "0.2.3" -description = "OpenAPI schema validation for Python" -category = "main" -optional = false -python-versions = ">=3.7.0,<4.0.0" - -[package.dependencies] -jsonschema = ">=3.0.0,<5.0.0" - -[package.extras] -rfc3339-validator = ["rfc3339-validator"] -strict-rfc3339 = ["strict-rfc3339"] -isodate = ["isodate"] - -[[package]] -name = "openapi-spec-validator" -version = "0.4.0" -description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3.0 spec validator" -category = "main" -optional = false -python-versions = ">=3.7.0,<4.0.0" - -[package.dependencies] -jsonschema = ">=3.2.0,<5.0.0" -openapi-schema-validator = ">=0.2.0,<0.3.0" -PyYAML = ">=5.1" - -[package.extras] -requests = ["requests"] - -[[package]] -name = "openapi-schema-validator" -version = "0.2.3" -description = "OpenAPI schema validation for Python" -category = "main" -optional = false -python-versions = ">=3.7.0,<4.0.0" - -[package.dependencies] -jsonschema = ">=3.0.0,<5.0.0" - -[package.extras] -rfc3339-validator = ["rfc3339-validator"] -strict-rfc3339 = ["strict-rfc3339"] -isodate = ["isodate"] - -[[package]] -name = "openapi-spec-validator" -version = "0.4.0" -description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3.0 spec validator" -category = "main" -optional = false -python-versions = ">=3.7.0,<4.0.0" - -[package.dependencies] -jsonschema = ">=3.2.0,<5.0.0" -openapi-schema-validator = ">=0.2.0,<0.3.0" -PyYAML = ">=5.1" - -[package.extras] -requests = ["requests"] +test = ["pytest", "pytest-console-scripts", "pytest-tornasync"] [[package]] name = "openapi-schema-validator" @@ -1019,14 +1059,11 @@ requests = ["requests"] [[package]] name = "packaging" -version = "21.3" +version = "23.0" description = "Core utilities for Python packages" category = "main" optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" [[package]] name = "pandocfilters" @@ -1088,10 +1125,22 @@ python-versions = "*" name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." -category = "dev" +category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "platformdirs" +version = "3.0.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)", "sphinx (>=6.1.3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest (>=7.2.1)"] + [[package]] name = "ply" version = "3.11" @@ -1102,7 +1151,7 @@ python-versions = "*" [[package]] name = "prometheus-client" -version = "0.15.0" +version = "0.16.0" description = "Python client for the Prometheus monitoring system." category = "dev" optional = false @@ -1113,18 +1162,18 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.31" +version = "3.0.38" description = "Library for building powerful interactive command lines in Python" category = "dev" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7.0" [package.dependencies] wcwidth = "*" [[package]] name = "psutil" -version = "5.9.2" +version = "5.9.4" description = "Cross-platform lib for process and system monitoring in Python." category = "dev" optional = false @@ -1152,14 +1201,6 @@ python-versions = "*" [package.extras] tests = ["pytest"] -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - [[package]] name = "pycparser" version = "2.21" @@ -1170,7 +1211,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.13.0" +version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false @@ -1181,32 +1222,21 @@ plugins = ["importlib-metadata"] [[package]] name = "pyjwt" -version = "2.5.0" +version = "2.6.0" description = "JSON Web Token implementation in Python" category = "main" optional = false python-versions = ">=3.7" [package.extras] -crypto = ["cryptography (>=3.3.1)", "types-cryptography (>=3.3.21)"] -dev = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.3.1)", "types-cryptography (>=3.3.21)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "pre-commit"] +crypto = ["cryptography (>=3.4.0)"] +dev = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.4.0)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "pre-commit"] docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"] -[[package]] -name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" -optional = false -python-versions = ">=3.6.8" - -[package.extras] -diagrams = ["railroad-diagrams", "jinja2"] - [[package]] name = "pyrsistent" -version = "0.18.1" +version = "0.19.3" description = "Persistent/Functional/Immutable data structures" category = "main" optional = false @@ -1223,6 +1253,14 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" [package.dependencies] six = ">=1.5" +[[package]] +name = "python-json-logger" +version = "2.0.7" +description = "A python library adding a json log formatter" +category = "dev" +optional = false +python-versions = ">=3.6" + [[package]] name = "pytz" version = "2021.3" @@ -1233,7 +1271,7 @@ python-versions = "*" [[package]] name = "pywin32" -version = "304" +version = "305" description = "Python for Window Extensions" category = "dev" optional = false @@ -1241,7 +1279,7 @@ python-versions = "*" [[package]] name = "pywinpty" -version = "2.0.8" +version = "2.0.10" description = "Pseudo terminal support for Windows from Python." category = "dev" optional = false @@ -1257,7 +1295,7 @@ python-versions = ">=3.6" [[package]] name = "pyzmq" -version = "24.0.1" +version = "25.0.0" description = "Python bindings for 0MQ" category = "dev" optional = false @@ -1265,11 +1303,10 @@ python-versions = ">=3.6" [package.dependencies] cffi = {version = "*", markers = "implementation_name == \"pypy\""} -py = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "qtconsole" -version = "5.3.2" +version = "5.4.0" description = "Jupyter Qt console" category = "dev" optional = false @@ -1291,7 +1328,7 @@ test = ["flaky", "pytest", "pytest-qt"] [[package]] name = "qtpy" -version = "2.2.1" +version = "2.3.0" description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." category = "dev" optional = false @@ -1305,7 +1342,7 @@ test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] [[package]] name = "requests" -version = "2.28.1" +version = "2.28.2" description = "Python HTTP for Humans." category = "main" optional = false @@ -1313,7 +1350,7 @@ python-versions = ">=3.7, <4" [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" +charset-normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<1.27" @@ -1321,6 +1358,39 @@ urllib3 = ">=1.21.1,<1.27" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "rfc3339-validator" +version = "0.1.4" +description = "A pure python RFC3339 validator" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +six = "*" + +[[package]] +name = "rfc3986" +version = "1.5.0" +description = "Validating URI References per RFC 3986" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} + +[package.extras] +idna2008 = ["idna"] + +[[package]] +name = "rfc3986-validator" +version = "0.1.1" +description = "Pure python rfc3986 validator" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + [[package]] name = "s3transfer" version = "0.6.0" @@ -1373,29 +1443,29 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" [[package]] name = "soupsieve" -version = "2.3.2.post1" +version = "2.4" description = "A modern CSS selector implementation for Beautiful Soup." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "stack-data" -version = "0.5.1" +version = "0.6.2" description = "Extract data from python stack frames and tracebacks for informative displays" category = "dev" optional = false python-versions = "*" [package.dependencies] -asttokens = "*" -executing = "*" +asttokens = ">=2.1.0" +executing = ">=1.2.0" pure-eval = "*" [package.extras] @@ -1426,7 +1496,7 @@ valley = ">=1.5.6,<2.0.0" [[package]] name = "terminado" -version = "0.16.0" +version = "0.17.1" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." category = "dev" optional = false @@ -1438,7 +1508,8 @@ pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} tornado = ">=6.1.0" [package.extras] -test = ["pre-commit", "pytest-timeout", "pytest (>=6.0)"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["pre-commit", "pytest-timeout", "pytest (>=7.0)"] [[package]] name = "tinycss2" @@ -1453,7 +1524,7 @@ webencodings = ">=0.4" [package.extras] doc = ["sphinx", "sphinx-rtd-theme"] -test = ["pytest", "pytest-cov", "pytest-flake8", "pytest-isort", "coverage"] +test = ["pytest", "isort", "flake8"] [[package]] name = "tornado" @@ -1465,23 +1536,34 @@ python-versions = ">= 3.7" [[package]] name = "traitlets" -version = "5.5.0" -description = "" +version = "5.9.0" +description = "Traitlets Python configuration system" category = "dev" optional = false python-versions = ">=3.7" [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["pre-commit", "pytest"] +test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] + +[[package]] +name = "uri-template" +version = "1.2.0" +description = "RFC 6570 URI Template Processor" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +dev = ["mypy", "flake8 (<4.0.0)", "flake8-annotations", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-noqa", "flake8-requirements", "flake8-type-annotations", "flake8-use-fstring", "pep8-naming"] [[package]] name = "urllib3" -version = "1.26.12" +version = "1.26.14" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.extras] brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] @@ -1501,12 +1583,20 @@ envs = ">=1.3,<2.0" [[package]] name = "wcwidth" -version = "0.2.5" +version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" category = "dev" optional = false python-versions = "*" +[[package]] +name = "webcolors" +version = "1.12" +description = "A library for working with color names and color values formats defined by HTML and CSS." +category = "dev" +optional = false +python-versions = ">=3.7" + [[package]] name = "webencodings" version = "0.5.1" @@ -1517,16 +1607,16 @@ python-versions = "*" [[package]] name = "websocket-client" -version = "1.4.1" +version = "1.5.1" description = "WebSocket client for Python with low level API options" category = "dev" optional = false python-versions = ">=3.7" [package.extras] +docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] +optional = ["python-socks", "wsaccel"] test = ["websockets"] -optional = ["wsaccel", "python-socks"] -docs = ["sphinx-rtd-theme (>=0.5)", "Sphinx (>=3.4)"] [[package]] name = "werkzeug" @@ -1544,860 +1634,155 @@ watchdog = ["watchdog"] [[package]] name = "widgetsnbextension" -version = "3.6.0" -description = "IPython HTML widgets for Jupyter" +version = "4.0.5" +description = "Jupyter interactive widgets for Jupyter Notebook" category = "dev" optional = false -python-versions = "*" - -[package.dependencies] -notebook = ">=4.4.1" - -[[package]] -name = "zipp" -version = "3.7.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" -optional = false python-versions = ">=3.7" [[package]] name = "zipp" -version = "3.9.0" +version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "jaraco.tidelift (>=1.4)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "jaraco.functools", "more-itertools", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "jaraco.functools", "more-itertools", "big-o", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "b98fdaeac6227f48d169512de02fc0155103f626a8e7c5d074c25fef2bef9cd5" +content-hash = "1eed3385a9e7bfaf61e1c764485f5ebfff46418df4af9b6ee8dd0bb12b429c37" [metadata.files] -appnope = [ - {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, - {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, -] -argon2-cffi = [ - {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, - {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, -] -argon2-cffi-bindings = [ - {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, - {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, -] -asttokens = [ - {file = "asttokens-2.0.5-py2.py3-none-any.whl", hash = "sha256:0844691e88552595a6f4a4281a9f7f79b8dd45ca4ccea82e5e05b4bbdb76705c"}, - {file = "asttokens-2.0.5.tar.gz", hash = "sha256:9a54c114f02c7a9480d56550932546a3f1fe71d8a02f1bc7ccd0ee3ee35cf4d5"}, -] -astunparse = [ - {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, - {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, -] -attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, -] -backcall = [ - {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, - {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, -] -beautifulsoup4 = [ - {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, - {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, -] -bleach = [ - {file = "bleach-4.1.0-py2.py3-none-any.whl", hash = "sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994"}, - {file = "bleach-4.1.0.tar.gz", hash = "sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da"}, -] -boto3 = [ - {file = "boto3-1.23.8-py3-none-any.whl", hash = "sha256:15733c2bbedce7a36fcf1749560c72c3ee90785aa6302a98658c7bffdcbe1f2a"}, - {file = "boto3-1.23.8.tar.gz", hash = "sha256:ea8ebcea4ccb70d1cf57526d9eec6012c76796f28ada3e9cc1d89178683d8107"}, -] -botocore = [ - {file = "botocore-1.26.8-py3-none-any.whl", hash = "sha256:620851daf1245af5bc28137aa821375bac964aa0eddc482437c783fe01e298fc"}, - {file = "botocore-1.26.8.tar.gz", hash = "sha256:e786722cb14de7319331cc55e9092174de66a768559700ef656d05ff41b3e24f"}, -] -cachetools = [ - {file = "cachetools-4.2.4-py3-none-any.whl", hash = "sha256:92971d3cb7d2a97efff7c7bb1657f21a8f5fb309a37530537c71b1774189f2d1"}, - {file = "cachetools-4.2.4.tar.gz", hash = "sha256:89ea6f1b638d5a73a4f9226be57ac5e4f399d22770b92355f92dcb0f7f001693"}, -] -certifi = [ - {file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"}, - {file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"}, -] -cffi = [ - {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, - {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, - {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, - {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, - {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, - {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, - {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, - {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, - {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, - {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, - {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, - {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, - {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, - {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, - {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, - {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, - {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, - {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, - {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, - {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, - {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, - {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, - {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, - {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, - {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, -] -charset-normalizer = [ - {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, - {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, -] -click = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, -] -colorama = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, -] -coverage = [ - {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, - {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, - {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, - {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, - {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, - {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, - {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, - {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, - {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, - {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, - {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, - {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, - {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, - {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, - {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, - {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, - {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, - {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, - {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, - {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, - {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, - {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, - {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, - {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, - {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, - {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, - {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, - {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, - {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, - {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, - {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, - {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, - {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, - {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, - {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, - {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, - {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, - {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, - {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, - {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, - {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, - {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, - {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, - {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, - {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, - {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, - {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, - {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, -] -cryptography = [ - {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, - {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c4129fc3fdc0fa8e40861b5ac0c673315b3c902bbdc05fc176764815b43dd1d"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:695104a9223a7239d155d7627ad912953b540929ef97ae0c34c7b8bf30857e89"}, - {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"}, - {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"}, - {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, -] -debugpy = [ - {file = "debugpy-1.6.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:eb1946efac0c0c3d411cea0b5ac772fbde744109fd9520fb0c5a51979faf05ad"}, - {file = "debugpy-1.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e3513399177dd37af4c1332df52da5da1d0c387e5927dc4c0709e26ee7302e8f"}, - {file = "debugpy-1.6.0-cp310-cp310-win32.whl", hash = "sha256:5c492235d6b68f879df3bdbdb01f25c15be15682665517c2c7d0420e5658d71f"}, - {file = "debugpy-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:40de9ba137d355538432209d05e0f5fe5d0498dce761c39119ad4b950b51db31"}, - {file = "debugpy-1.6.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:0d383b91efee57dbb923ba20801130cf60450a0eda60bce25bccd937de8e323a"}, - {file = "debugpy-1.6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1ff853e60e77e1c16f85a31adb8360bb2d98ca588d7ed645b7f0985b240bdb5e"}, - {file = "debugpy-1.6.0-cp37-cp37m-win32.whl", hash = "sha256:8e972c717d95f56b6a3a7a29a5ede1ee8f2c3802f6f0e678203b0778eb322bf1"}, - {file = "debugpy-1.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a8aaeb53e87225141fda7b9081bd87155c1debc13e2f5a532d341112d1983b65"}, - {file = "debugpy-1.6.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:132defb585b518955358321d0f42f6aa815aa15b432be27db654807707c70b2f"}, - {file = "debugpy-1.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ee75844242b4537beb5899f3e60a578454d1f136b99e8d57ac424573797b94a"}, - {file = "debugpy-1.6.0-cp38-cp38-win32.whl", hash = "sha256:a65a2499761d47df3e9ea9567109be6e73d412e00ac3ffcf74839f3ddfcdf028"}, - {file = "debugpy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:bd980d533d0ddfc451e03a3bb32acb2900049fec39afc3425b944ebf0889be62"}, - {file = "debugpy-1.6.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:245c7789a012f86210847ec7ee9f38c30a30d4c2223c3e111829a76c9006a5d0"}, - {file = "debugpy-1.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e3aa2368883e83e7b689ddff3cafb595f7b711f6a065886b46a96a7fef874e7"}, - {file = "debugpy-1.6.0-cp39-cp39-win32.whl", hash = "sha256:72bcfa97f3afa0064afc77ab811f48ad4a06ac330f290b675082c24437730366"}, - {file = "debugpy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:30abefefd2ff5a5481162d613cb70e60e2fa80a5eb4c994717c0f008ed25d2e1"}, - {file = "debugpy-1.6.0-py2.py3-none-any.whl", hash = "sha256:4de7777842da7e08652f2776c552070bbdd758557fdec73a15d7be0e4aab95ce"}, - {file = "debugpy-1.6.0.zip", hash = "sha256:7b79c40852991f7b6c3ea65845ed0f5f6b731c37f4f9ad9c61e2ab4bd48a9275"}, -] -decorator = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] -defusedxml = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] -entrypoints = [ - {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, - {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, -] -envs = [ - {file = "envs-1.4-py3-none-any.whl", hash = "sha256:4a1fcf85e4d4443e77c348ff7cdd3bfc4c0178b181d447057de342e4172e5ed1"}, - {file = "envs-1.4.tar.gz", hash = "sha256:9d8435c6985d1cdd68299e04c58e2bdb8ae6cf66b2596a8079e6f9a93f2a0398"}, -] -executing = [ - {file = "executing-0.8.3-py2.py3-none-any.whl", hash = "sha256:d1eef132db1b83649a3905ca6dd8897f71ac6f8cac79a7e58a1a09cf137546c9"}, - {file = "executing-0.8.3.tar.gz", hash = "sha256:c6554e21c6b060590a6d3be4b82fb78f8f0194d809de5ea7df1c093763311501"}, -] -fastjsonschema = [ - {file = "fastjsonschema-2.15.3-py3-none-any.whl", hash = "sha256:ddb0b1d8243e6e3abb822bd14e447a89f4ab7439342912d590444831fa00b6a0"}, - {file = "fastjsonschema-2.15.3.tar.gz", hash = "sha256:0a572f0836962d844c1fc435e200b2e4f4677e4e6611a2e3bdd01ba697c275ec"}, -] -faunadb = [ - {file = "faunadb-4.2.0-py2.py3-none-any.whl", hash = "sha256:73d5f560bddb7fc45f9201d526f97fcd0a7b0ef74ce3d4d46b8e116f4ce6e219"}, -] -future = [ - {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, -] -graphql-py = [ - {file = "graphql-py-0.8.1.tar.gz", hash = "sha256:a03557d67817a0f23c5bef83ce8791c8fa3f5f9d165e1408abc6393def1d720c"}, -] -h2 = [ - {file = "h2-2.6.2-py2.py3-none-any.whl", hash = "sha256:93cbd1013a2218539af05cdf9fc37b786655b93bbc94f5296b7dabd1c5cadf41"}, - {file = "h2-2.6.2.tar.gz", hash = "sha256:af35878673c83a44afbc12b13ac91a489da2819b5dc1e11768f3c2406f740fe9"}, -] -hpack = [ - {file = "hpack-3.0.0-py2.py3-none-any.whl", hash = "sha256:0edd79eda27a53ba5be2dfabf3b15780928a0dff6eb0c60a3d6767720e970c89"}, - {file = "hpack-3.0.0.tar.gz", hash = "sha256:8eec9c1f4bfae3408a3f30500261f7e6a65912dc138526ea054f9ad98892e9d2"}, -] -hyper = [ - {file = "hyper-0.7.0-py2.py3-none-any.whl", hash = "sha256:069514f54231fb7b5df2fb910a114663a83306d5296f588fffcb0a9be19407fc"}, - {file = "hyper-0.7.0.tar.gz", hash = "sha256:12c82eacd122a659673484c1ea0d34576430afbe5aa6b8f63fe37fcb06a2458c"}, -] -hyperframe = [ - {file = "hyperframe-3.2.0-py2.py3-none-any.whl", hash = "sha256:4dcab11967482d400853b396d042038e4c492a15a5d2f57259e2b5f89a32f755"}, - {file = "hyperframe-3.2.0.tar.gz", hash = "sha256:05f0e063e117c16fcdd13c12c93a4424a2c40668abfac3bb419a10f57698204e"}, -] -idna = [ - {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, - {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, -] -importlib-resources = [ - {file = "importlib_resources-5.7.1-py3-none-any.whl", hash = "sha256:e447dc01619b1e951286f3929be820029d48c75eb25d265c28b92a16548212b8"}, - {file = "importlib_resources-5.7.1.tar.gz", hash = "sha256:b6062987dfc51f0fcb809187cffbd60f35df7acb4589091f154214af6d0d49d3"}, -] -ipykernel = [ - {file = "ipykernel-6.13.0-py3-none-any.whl", hash = "sha256:2b0987af43c0d4b62cecb13c592755f599f96f29aafe36c01731aaa96df30d39"}, - {file = "ipykernel-6.13.0.tar.gz", hash = "sha256:0e28273e290858393e86e152b104e5506a79c13d25b951ac6eca220051b4be60"}, -] -ipython = [ - {file = "ipython-8.3.0-py3-none-any.whl", hash = "sha256:341456643a764c28f670409bbd5d2518f9b82c013441084ff2c2fc999698f83b"}, - {file = "ipython-8.3.0.tar.gz", hash = "sha256:807ae3cf43b84693c9272f70368440a9a7eaa2e7e6882dad943c32fbf7e51402"}, -] -ipython-genutils = [ - {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, - {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, -] -ipywidgets = [ - {file = "ipywidgets-7.7.0-py2.py3-none-any.whl", hash = "sha256:e58ff58bc94d481e91ecb6e13a5cb96a87b6b8ade135e055603d0ca24593df38"}, - {file = "ipywidgets-7.7.0.tar.gz", hash = "sha256:ab4a5596855a88b83761921c768707d65e5847068139bc1729ddfe834703542a"}, -] -iso8601 = [ - {file = "iso8601-1.0.2-py3-none-any.whl", hash = "sha256:d7bc01b1c2a43b259570bb307f057abc578786ea734ba2b87b836c5efc5bd443"}, - {file = "iso8601-1.0.2.tar.gz", hash = "sha256:27f503220e6845d9db954fb212b95b0362d8b7e6c1b2326a87061c3de93594b1"}, -] -jedi = [ - {file = "jedi-0.18.1-py2.py3-none-any.whl", hash = "sha256:637c9635fcf47945ceb91cd7f320234a7be540ded6f3e99a50cb6febdfd1ba8d"}, - {file = "jedi-0.18.1.tar.gz", hash = "sha256:74137626a64a99c8eb6ae5832d99b3bdd7d29a3850fe2aa80a4126b2a7d949ab"}, -] -jinja2 = [ - {file = "Jinja2-3.0.1-py3-none-any.whl", hash = "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4"}, - {file = "Jinja2-3.0.1.tar.gz", hash = "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4"}, -] -jmespath = [ - {file = "jmespath-1.0.0-py3-none-any.whl", hash = "sha256:e8dcd576ed616f14ec02eed0005c85973b5890083313860136657e24784e4c04"}, - {file = "jmespath-1.0.0.tar.gz", hash = "sha256:a490e280edd1f57d6de88636992d05b71e97d69a26a19f058ecf7d304474bf5e"}, -] -jsonschema = [ - {file = "jsonschema-4.5.1-py3-none-any.whl", hash = "sha256:71b5e39324422543546572954ce71c67728922c104902cb7ce252e522235b33f"}, - {file = "jsonschema-4.5.1.tar.gz", hash = "sha256:7c6d882619340c3347a1bf7315e147e6d3dae439033ae6383d6acb908c101dfc"}, -] -jupyter = [ - {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, - {file = "jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"}, - {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, -] -jupyter-client = [ - {file = "jupyter_client-7.3.1-py3-none-any.whl", hash = "sha256:404abe552540aff3527e66e16beb114b6b4ff58479d51a301f4eb9701e4f52ef"}, - {file = "jupyter_client-7.3.1.tar.gz", hash = "sha256:05d4ff6a0ade25138c6bb0fbeac7ddc26b5fe835e7dd816b64b4a45b931bdc0b"}, -] -jupyter-console = [ - {file = "jupyter_console-6.4.3-py3-none-any.whl", hash = "sha256:e630bcb682c0088dda45688ad7c2424d4a825c8acf494cb036ced03ed0424841"}, - {file = "jupyter_console-6.4.3.tar.gz", hash = "sha256:55f32626b0be647a85e3217ddcdb22db69efc79e8b403b9771eb9ecc696019b5"}, -] -jupyter-core = [ - {file = "jupyter_core-4.10.0-py3-none-any.whl", hash = "sha256:e7f5212177af7ab34179690140f188aa9bf3d322d8155ed972cbded19f55b6f3"}, - {file = "jupyter_core-4.10.0.tar.gz", hash = "sha256:a6de44b16b7b31d7271130c71a6792c4040f077011961138afed5e5e73181aec"}, -] -jupyterlab-pygments = [ - {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, - {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, -] -jupyterlab-widgets = [ - {file = "jupyterlab_widgets-1.1.0-py3-none-any.whl", hash = "sha256:c2a9bd3789f120f64d73268c066ed3b000c56bc1dda217be5cdc43e7b4ebad3f"}, - {file = "jupyterlab_widgets-1.1.0.tar.gz", hash = "sha256:d5f41bc1713795385f718d44dcba47e1e1473c6289f28a95aa6b2c0782ee372a"}, -] -markupsafe = [ - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, - {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, -] -matplotlib-inline = [ - {file = "matplotlib-inline-0.1.3.tar.gz", hash = "sha256:a04bfba22e0d1395479f866853ec1ee28eea1485c1d69a6faf00dc3e24ff34ee"}, - {file = "matplotlib_inline-0.1.3-py3-none-any.whl", hash = "sha256:aed605ba3b72462d64d475a21a9296f400a19c4f74a31b59103d2a99ffd5aa5c"}, -] -mistune = [ - {file = "mistune-0.8.4-py2.py3-none-any.whl", hash = "sha256:88a1051873018da288eee8538d476dffe1262495144b33ecb586c4ab266bb8d4"}, - {file = "mistune-0.8.4.tar.gz", hash = "sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e"}, -] -nbclient = [ - {file = "nbclient-0.6.3-py3-none-any.whl", hash = "sha256:2747ac9b385720d8a6c34f2f71e72cbe64aec6cadaadcc064a4df0b0e99c5874"}, - {file = "nbclient-0.6.3.tar.gz", hash = "sha256:b80726fc1fb89a0e8f8be1e77e28d0026b1e8ed90bc143c8a0c7622e4f8cdd9e"}, -] -nbconvert = [ - {file = "nbconvert-6.5.0-py3-none-any.whl", hash = "sha256:c56dd0b8978a1811a5654f74c727ff16ca87dd5a43abd435a1c49b840fcd8360"}, - {file = "nbconvert-6.5.0.tar.gz", hash = "sha256:223e46e27abe8596b8aed54301fadbba433b7ffea8196a68fd7b1ff509eee99d"}, -] -nbformat = [ - {file = "nbformat-5.4.0-py3-none-any.whl", hash = "sha256:0d6072aaec95dddc39735c144ee8bbc6589c383fb462e4058abc855348152dad"}, - {file = "nbformat-5.4.0.tar.gz", hash = "sha256:44ba5ca6acb80c5d5a500f1e5b83ede8cbe364d5a495c4c8cf60aaf1ba656501"}, -] -nest-asyncio = [ - {file = "nest_asyncio-1.5.5-py3-none-any.whl", hash = "sha256:b98e3ec1b246135e4642eceffa5a6c23a3ab12c82ff816a92c612d68205813b2"}, - {file = "nest_asyncio-1.5.5.tar.gz", hash = "sha256:e442291cd942698be619823a17a86a5759eabe1f8613084790de189fe9e16d65"}, -] -notebook = [ - {file = "notebook-6.4.11-py3-none-any.whl", hash = "sha256:b4a6baf2eba21ce67a0ca11a793d1781b06b8078f34d06c710742e55f3eee505"}, - {file = "notebook-6.4.11.tar.gz", hash = "sha256:709b1856a564fe53054796c80e17a67262071c86bfbdfa6b96aaa346113c555a"}, -] -openapi-schema-validator = [ - {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, - {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, -] -openapi-spec-validator = [ - {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, - {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, -] -openapi-schema-validator = [ - {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, - {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, -] -openapi-spec-validator = [ - {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, - {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, -] -openapi-schema-validator = [ - {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, - {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, -] -openapi-spec-validator = [ - {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, - {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, -] -openapi-schema-validator = [ - {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, - {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, -] -openapi-spec-validator = [ - {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, - {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, -] -packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, -] -pandocfilters = [ - {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, - {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, -] -parso = [ - {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, - {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, -] -pdoc = [ - {file = "pdoc-7.4.0-py3-none-any.whl", hash = "sha256:681a2f243e4ca51bedd0645c2d18275b8b83444e9b6e42b502882ec45369e679"}, -] -pexpect = [ - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, - {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, -] -pickleshare = [ - {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, - {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, -] -ply = [ - {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"}, - {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, -] -prometheus-client = [ - {file = "prometheus_client-0.14.1-py3-none-any.whl", hash = "sha256:522fded625282822a89e2773452f42df14b5a8e84a86433e3f8a189c1d54dc01"}, - {file = "prometheus_client-0.14.1.tar.gz", hash = "sha256:5459c427624961076277fdc6dc50540e2bacb98eebde99886e59ec55ed92093a"}, -] -prompt-toolkit = [ - {file = "prompt_toolkit-3.0.29-py3-none-any.whl", hash = "sha256:62291dad495e665fca0bda814e342c69952086afb0f4094d0893d357e5c78752"}, - {file = "prompt_toolkit-3.0.29.tar.gz", hash = "sha256:bd640f60e8cecd74f0dc249713d433ace2ddc62b65ee07f96d358e0b152b6ea7"}, -] -psutil = [ - {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:799759d809c31aab5fe4579e50addf84565e71c1dc9f1c31258f159ff70d3f87"}, - {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9272167b5f5fbfe16945be3db475b3ce8d792386907e673a209da686176552af"}, - {file = "psutil-5.9.1-cp27-cp27m-win32.whl", hash = "sha256:0904727e0b0a038830b019551cf3204dd48ef5c6868adc776e06e93d615fc5fc"}, - {file = "psutil-5.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e7e10454cb1ab62cc6ce776e1c135a64045a11ec4c6d254d3f7689c16eb3efd2"}, - {file = "psutil-5.9.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:56960b9e8edcca1456f8c86a196f0c3d8e3e361320071c93378d41445ffd28b0"}, - {file = "psutil-5.9.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:44d1826150d49ffd62035785a9e2c56afcea66e55b43b8b630d7706276e87f22"}, - {file = "psutil-5.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7be9d7f5b0d206f0bbc3794b8e16fb7dbc53ec9e40bbe8787c6f2d38efcf6c9"}, - {file = "psutil-5.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd9246e4cdd5b554a2ddd97c157e292ac11ef3e7af25ac56b08b455c829dca8"}, - {file = "psutil-5.9.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29a442e25fab1f4d05e2655bb1b8ab6887981838d22effa2396d584b740194de"}, - {file = "psutil-5.9.1-cp310-cp310-win32.whl", hash = "sha256:20b27771b077dcaa0de1de3ad52d22538fe101f9946d6dc7869e6f694f079329"}, - {file = "psutil-5.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:58678bbadae12e0db55186dc58f2888839228ac9f41cc7848853539b70490021"}, - {file = "psutil-5.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3a76ad658641172d9c6e593de6fe248ddde825b5866464c3b2ee26c35da9d237"}, - {file = "psutil-5.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6a11e48cb93a5fa606306493f439b4aa7c56cb03fc9ace7f6bfa21aaf07c453"}, - {file = "psutil-5.9.1-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:068935df39055bf27a29824b95c801c7a5130f118b806eee663cad28dca97685"}, - {file = "psutil-5.9.1-cp36-cp36m-win32.whl", hash = "sha256:0f15a19a05f39a09327345bc279c1ba4a8cfb0172cc0d3c7f7d16c813b2e7d36"}, - {file = "psutil-5.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:db417f0865f90bdc07fa30e1aadc69b6f4cad7f86324b02aa842034efe8d8c4d"}, - {file = "psutil-5.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:91c7ff2a40c373d0cc9121d54bc5f31c4fa09c346528e6a08d1845bce5771ffc"}, - {file = "psutil-5.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fea896b54f3a4ae6f790ac1d017101252c93f6fe075d0e7571543510f11d2676"}, - {file = "psutil-5.9.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3054e923204b8e9c23a55b23b6df73a8089ae1d075cb0bf711d3e9da1724ded4"}, - {file = "psutil-5.9.1-cp37-cp37m-win32.whl", hash = "sha256:d2d006286fbcb60f0b391741f520862e9b69f4019b4d738a2a45728c7e952f1b"}, - {file = "psutil-5.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b14ee12da9338f5e5b3a3ef7ca58b3cba30f5b66f7662159762932e6d0b8f680"}, - {file = "psutil-5.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:19f36c16012ba9cfc742604df189f2f28d2720e23ff7d1e81602dbe066be9fd1"}, - {file = "psutil-5.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:944c4b4b82dc4a1b805329c980f270f170fdc9945464223f2ec8e57563139cf4"}, - {file = "psutil-5.9.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b6750a73a9c4a4e689490ccb862d53c7b976a2a35c4e1846d049dcc3f17d83b"}, - {file = "psutil-5.9.1-cp38-cp38-win32.whl", hash = "sha256:a8746bfe4e8f659528c5c7e9af5090c5a7d252f32b2e859c584ef7d8efb1e689"}, - {file = "psutil-5.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:79c9108d9aa7fa6fba6e668b61b82facc067a6b81517cab34d07a84aa89f3df0"}, - {file = "psutil-5.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:28976df6c64ddd6320d281128817f32c29b539a52bdae5e192537bc338a9ec81"}, - {file = "psutil-5.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b88f75005586131276634027f4219d06e0561292be8bd6bc7f2f00bdabd63c4e"}, - {file = "psutil-5.9.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:645bd4f7bb5b8633803e0b6746ff1628724668681a434482546887d22c7a9537"}, - {file = "psutil-5.9.1-cp39-cp39-win32.whl", hash = "sha256:32c52611756096ae91f5d1499fe6c53b86f4a9ada147ee42db4991ba1520e574"}, - {file = "psutil-5.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:f65f9a46d984b8cd9b3750c2bdb419b2996895b005aefa6cbaba9a143b1ce2c5"}, - {file = "psutil-5.9.1.tar.gz", hash = "sha256:57f1819b5d9e95cdfb0c881a8a5b7d542ed0b7c522d575706a80bedc848c8954"}, -] -ptyprocess = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] -pure-eval = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, -] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] -pycparser = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] -pygments = [ - {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, - {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, -] -pyjwt = [ - {file = "PyJWT-2.4.0-py3-none-any.whl", hash = "sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf"}, - {file = "PyJWT-2.4.0.tar.gz", hash = "sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba"}, -] -pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] -pyrsistent = [ - {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, - {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, - {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, - {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, -] -python-dateutil = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] -pytz = [ - {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, - {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, -] -pywin32 = [ - {file = "pywin32-304-cp310-cp310-win32.whl", hash = "sha256:3c7bacf5e24298c86314f03fa20e16558a4e4138fc34615d7de4070c23e65af3"}, - {file = "pywin32-304-cp310-cp310-win_amd64.whl", hash = "sha256:4f32145913a2447736dad62495199a8e280a77a0ca662daa2332acf849f0be48"}, - {file = "pywin32-304-cp310-cp310-win_arm64.whl", hash = "sha256:d3ee45adff48e0551d1aa60d2ec066fec006083b791f5c3527c40cd8aefac71f"}, - {file = "pywin32-304-cp311-cp311-win32.whl", hash = "sha256:30c53d6ce44c12a316a06c153ea74152d3b1342610f1b99d40ba2795e5af0269"}, - {file = "pywin32-304-cp311-cp311-win_amd64.whl", hash = "sha256:7ffa0c0fa4ae4077e8b8aa73800540ef8c24530057768c3ac57c609f99a14fd4"}, - {file = "pywin32-304-cp311-cp311-win_arm64.whl", hash = "sha256:cbbe34dad39bdbaa2889a424d28752f1b4971939b14b1bb48cbf0182a3bcfc43"}, - {file = "pywin32-304-cp36-cp36m-win32.whl", hash = "sha256:be253e7b14bc601718f014d2832e4c18a5b023cbe72db826da63df76b77507a1"}, - {file = "pywin32-304-cp36-cp36m-win_amd64.whl", hash = "sha256:de9827c23321dcf43d2f288f09f3b6d772fee11e809015bdae9e69fe13213988"}, - {file = "pywin32-304-cp37-cp37m-win32.whl", hash = "sha256:f64c0377cf01b61bd5e76c25e1480ca8ab3b73f0c4add50538d332afdf8f69c5"}, - {file = "pywin32-304-cp37-cp37m-win_amd64.whl", hash = "sha256:bb2ea2aa81e96eee6a6b79d87e1d1648d3f8b87f9a64499e0b92b30d141e76df"}, - {file = "pywin32-304-cp38-cp38-win32.whl", hash = "sha256:94037b5259701988954931333aafd39cf897e990852115656b014ce72e052e96"}, - {file = "pywin32-304-cp38-cp38-win_amd64.whl", hash = "sha256:ead865a2e179b30fb717831f73cf4373401fc62fbc3455a0889a7ddac848f83e"}, - {file = "pywin32-304-cp39-cp39-win32.whl", hash = "sha256:25746d841201fd9f96b648a248f731c1dec851c9a08b8e33da8b56148e4c65cc"}, - {file = "pywin32-304-cp39-cp39-win_amd64.whl", hash = "sha256:d24a3382f013b21aa24a5cfbfad5a2cd9926610c0affde3e8ab5b3d7dbcf4ac9"}, -] -pywinpty = [ - {file = "pywinpty-2.0.5-cp310-none-win_amd64.whl", hash = "sha256:f86c76e2881c37e69678cbbf178109f8da1fa8584db24d58e1b9369b0276cfcb"}, - {file = "pywinpty-2.0.5-cp37-none-win_amd64.whl", hash = "sha256:ff9b52f182650cfdf3db1b264a6fe0963eb9d996a7a1fa843ac406c1e32111f8"}, - {file = "pywinpty-2.0.5-cp38-none-win_amd64.whl", hash = "sha256:651ee1467bd7eb6f64d44dbc954b7ab7d15ab6d8adacc4e13299692c67c5d5d2"}, - {file = "pywinpty-2.0.5-cp39-none-win_amd64.whl", hash = "sha256:e59a508ae78374febada3e53b5bbc90b5ad07ae68cbfd72a2e965f9793ae04f3"}, - {file = "pywinpty-2.0.5.tar.gz", hash = "sha256:e125d3f1804d8804952b13e33604ad2ca8b9b2cac92b27b521c005d1604794f8"}, -] -pyyaml = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, -] -pyzmq = [ - {file = "pyzmq-23.0.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:176be6c348dbec04e8e0d41e810743b7084b73e50954a6fedeeafc65d7fa9290"}, - {file = "pyzmq-23.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ef2d1476cea927ba33a29f59aa128ce3b174e81083cbd091dd3149af741c85d"}, - {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2394bb857607494c3750b5040f852a1ad7831d7a7907b6106f0af2c70860cef"}, - {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fe8807d67456e7cf0e9a33b85e0d05bb9d2977dbdb23977e4cc2b843633618fd"}, - {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be3425dfdb9c46dc62d490fc1a6142a5f3dc6605ebb9048ae675056ef621413c"}, - {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cda55ff0a7566405fb29ca38db1829fecb4c041b8dc3f91754f337bb7b27cbd8"}, - {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2e4d70d34112997a32c8193fae2579aec854745f8730031e5d84cc579fd98ff"}, - {file = "pyzmq-23.0.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f3daabbe42ca31712e29d906dfa4bf1890341d2fd5178de118bc9977a8d2b23b"}, - {file = "pyzmq-23.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e7ae3e520bd182a0cbfff3cc69dda3a2c26f69847e81bd3f090ed04471fc1282"}, - {file = "pyzmq-23.0.0-cp310-cp310-win32.whl", hash = "sha256:1d480d48253f61ff90115b8069ed32f51a0907eb19101c4a5ae0b9a5973e40ad"}, - {file = "pyzmq-23.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:7eca5902ff41575d9a26f91fc750018b7eb129600ea600fe69ce852fbdfab4e2"}, - {file = "pyzmq-23.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b2a4af5e6fa85ee1743c725b46579f8de0b97024eb5ae1a0b5c5711adc436665"}, - {file = "pyzmq-23.0.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:591b455546d34bb96aa453dd9666bddb8c81314e23dbf2606f9614acf7e73d9f"}, - {file = "pyzmq-23.0.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdd008629293a0d4f00b516841ac0df89f17a64bc2d83bcfa48212d3f3b3ca1a"}, - {file = "pyzmq-23.0.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:df0b05fa4321b090abe5601dea9b1c8933c06f496588ccb397a0b1f9dfe32ebe"}, - {file = "pyzmq-23.0.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:12a53f5c13edf12547ce495afebdd5ab11c1b67ea078a941b21e13161783741a"}, - {file = "pyzmq-23.0.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:cb45b7ea577283b547b907a3389d62ca2eaddaf725fbb79cd360802440fa9c91"}, - {file = "pyzmq-23.0.0-cp36-cp36m-win32.whl", hash = "sha256:0a787f7870cba38d655c68ea7ae14bb6c3e9e19bb618d0c2412513321eeaeb80"}, - {file = "pyzmq-23.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:536491ad640448f14d8aa2dc497c354a348f216eb23513bf5aa0ac40e2b02577"}, - {file = "pyzmq-23.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5eaf7e0841d3d8d1d92838c8b56f98cb9bf35b14bcbe4efa281e4812ef4be728"}, - {file = "pyzmq-23.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21792f4d0fcc5040978ee211c033e915d8b6608ea8a5b33fe197a04f0d43e991"}, - {file = "pyzmq-23.0.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a37f0ec88e220326803084208d80229218b309d728954ab747ab21cca33424aa"}, - {file = "pyzmq-23.0.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9622d9560a6fd8d589816cdcec6946642cb4e070b3f68be1d3779b52cf240f73"}, - {file = "pyzmq-23.0.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:434044eec7f9df08fc5ca5c9bdd1a4bb08663679d43ebe7b9849713956f4d85f"}, - {file = "pyzmq-23.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12eac2294d48ee27d1eaef7e214acedb394e4c95e3a1f6e4467099b82d58ef73"}, - {file = "pyzmq-23.0.0-cp37-cp37m-win32.whl", hash = "sha256:07d2008e51718fba60641e5d1a0646b222b7929f16f6e7cf0834b8439f42c9e8"}, - {file = "pyzmq-23.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b8528aefceb787f41ad429f3210a3c6b52e99f85413416e3d0c9e6d035f8ac"}, - {file = "pyzmq-23.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3f3807e81bf51d4c63eb12a21920614e0e840645418e9f2e3b5ffdd5991b3415"}, - {file = "pyzmq-23.0.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:011a45c846ec69a3671ed15893b74b6ad608800c89ac6d0f0411e2137c6b313d"}, - {file = "pyzmq-23.0.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b97dc1273f16f85a38cff6668a07b636ef14e30591039efbfd21f5f91efae964"}, - {file = "pyzmq-23.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8951830d6a00636b3af478091f9668ecc486f1dad01b975527957fd1d8c31bfd"}, - {file = "pyzmq-23.0.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5619f6598d6fd30778053ae2daa48a7c54029816648b908270b751411fd52e74"}, - {file = "pyzmq-23.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0a89b9860d2171bcf674648dc8186db9cf3b773ad3c0610a2c7bf189cf3560b6"}, - {file = "pyzmq-23.0.0-cp38-cp38-win32.whl", hash = "sha256:0258563bf69f6ca305204354f171e0627a9bf8fe78c9d4f63a5e2447035cbb4b"}, - {file = "pyzmq-23.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:9feb7ccd426ff2158ce79f4c87a8a1600ed4f77e65e2fffda2b42638b2bc73e4"}, - {file = "pyzmq-23.0.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:e9631c6a339843e4f95efb80ff9a1bfaaf3d611ba9677a7a5cc61ffb923b4e06"}, - {file = "pyzmq-23.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:34b143751e9b2b89cf9b656081f1b2842a563c4c9ffc8465531875daf546e772"}, - {file = "pyzmq-23.0.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2f227150148e7c3db7ecd8a58500439979f556e15455841a30b6d121755b14bc"}, - {file = "pyzmq-23.0.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277b3ebc684b369a57a186a9acf629c1b01247eb04d1105536ef2dae5f61168a"}, - {file = "pyzmq-23.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e2093a97bf3f6008a4be6b5bae8ae3fc409f18373593bef19dd7b381ab8030c"}, - {file = "pyzmq-23.0.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6c09e6e5c4baf0959287943dc8170624d739ae555d334e896a94d9de01c7bb21"}, - {file = "pyzmq-23.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c234aefeef034c5d6de452e2af5173a95ea06315b685db703091e6f937a6e60"}, - {file = "pyzmq-23.0.0-cp39-cp39-win32.whl", hash = "sha256:7b518ad9cdbaaeb1a9da3444797698871ae2eeae34ff9a656d5150d37e1e42a1"}, - {file = "pyzmq-23.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:011f26841dd56ed87e464c98023dbbd4c0b3ab8802a045de3ea83e0187eb8145"}, - {file = "pyzmq-23.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a89285fedbeca483a855a77285453e21e4fc86ef0944bc018ef4b3033aa04ad2"}, - {file = "pyzmq-23.0.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5a13171268f05d127e31b4c369b753733f67dbb0d765901ef625a115feb5c7de"}, - {file = "pyzmq-23.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cd3f563b98e2a8730c93bdc550f119ae766b2d3da1f0d6a3c7735b59adfa1642"}, - {file = "pyzmq-23.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e730d490b1421e52b43b1b9f5e1f8c3973499206e188f29b582577531e11033b"}, - {file = "pyzmq-23.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0de8a7e13ffacfe33c89acc0d7bfa2f5bde94e3f74b7f1e4d43c97ce17864d77"}, - {file = "pyzmq-23.0.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a64b9cce166396df5f33894074d6515778d48c63aae5ee1abd86d8bbc5a711d8"}, - {file = "pyzmq-23.0.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e464e7b1be2216eba54b47256c15bf307ae4a656aa0f73becea7b3e7283c5ac2"}, - {file = "pyzmq-23.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3fa7126d532effee452c0ab395ab3cbef1c06fd6870ab7e681f812ba9e685cfa"}, - {file = "pyzmq-23.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9273f6d1da1018822f41630fb0f3fe208e8e70e5d5e780795326900cfa22d8b6"}, - {file = "pyzmq-23.0.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca7d77f24644298cbe53bc279eb7ca05f3b8637473d392f0c9f34b37f08b49a"}, - {file = "pyzmq-23.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8f40604437ec8010f77f7053fd135ccb202d6ca18329903831087cab8dbdab1"}, - {file = "pyzmq-23.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4d861ae20040afc17adef33053c328667da78d4d3676b2936788fd031665e3a8"}, - {file = "pyzmq-23.0.0.tar.gz", hash = "sha256:a45f5c0477d12df05ef2e2922b49b7c0ae9d0f4ff9b6bb0d666558df0ef37122"}, -] -qtconsole = [ - {file = "qtconsole-5.3.0-py3-none-any.whl", hash = "sha256:75f2ded876444454edcb5a53262149e33b53db3a4a53116b7c3df52830905b0f"}, - {file = "qtconsole-5.3.0.tar.gz", hash = "sha256:8e3520fdc75e46abc4cc6cffeca16fa2652754109b8ae839fa28e27d1eba5625"}, -] -qtpy = [ - {file = "QtPy-2.1.0-py3-none-any.whl", hash = "sha256:aee0586081f943029312becece9f63977b0a9e3788f77a6ac8cc74802bb173d6"}, - {file = "QtPy-2.1.0.tar.gz", hash = "sha256:ca8cd4217175186344299ee4c0f7e7adcf362c70852ba35b255a534077025c06"}, -] -requests = [ - {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, - {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, -] -s3transfer = [ - {file = "s3transfer-0.5.2-py3-none-any.whl", hash = "sha256:7a6f4c4d1fdb9a2b640244008e142cbc2cd3ae34b386584ef044dd0f27101971"}, - {file = "s3transfer-0.5.2.tar.gz", hash = "sha256:95c58c194ce657a5f4fb0b9e60a84968c808888aed628cd98ab8771fe1db98ed"}, -] -sammy = [ - {file = "sammy-0.4.3-py2.py3-none-any.whl", hash = "sha256:f7fc12fae537ba22830a9de761686fac439e23c67339807f2f3d026d019bae9e"}, - {file = "sammy-0.4.3.tar.gz", hash = "sha256:dff98017d59c8860349cb07bd8af2eb843ccfc7a78d11ed82599b306ef4ce7ca"}, -] -send2trash = [ - {file = "Send2Trash-1.8.0-py3-none-any.whl", hash = "sha256:f20eaadfdb517eaca5ce077640cb261c7d2698385a6a0f072a4a5447fd49fa08"}, - {file = "Send2Trash-1.8.0.tar.gz", hash = "sha256:d2c24762fd3759860a0aff155e45871447ea58d2be6bdd39b5c8f966a0c99c2d"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -soupsieve = [ - {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, - {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, -] -stack-data = [ - {file = "stack_data-0.2.0-py3-none-any.whl", hash = "sha256:999762f9c3132308789affa03e9271bbbe947bf78311851f4d485d8402ed858e"}, - {file = "stack_data-0.2.0.tar.gz", hash = "sha256:45692d41bd633a9503a5195552df22b583caf16f0b27c4e58c98d88c8b648e12"}, -] -stripe = [ - {file = "stripe-2.76.0-py2.py3-none-any.whl", hash = "sha256:756bf6c1206f438d1fa23bb90cdf1233c9383478f854f2720a8a3e1eaf1f715b"}, - {file = "stripe-2.76.0.tar.gz", hash = "sha256:fd3fc6935c3b6189967191607b6f38ebe490005a590b4d0d43fbe3aba45deca8"}, -] -swaggyp = [ - {file = "swaggyp-0.3.0-py3-none-any.whl", hash = "sha256:d678daa026f374c09690bfae93670e8824c616fe7186375cca531dd9530d2c41"}, - {file = "swaggyp-0.3.0.tar.gz", hash = "sha256:2af8b74e014ba83287d4b2fcbe355636ca8ec09ca70bffba4290b3f833c8ad46"}, -] -terminado = [ - {file = "terminado-0.15.0-py3-none-any.whl", hash = "sha256:0d5f126fbfdb5887b25ae7d9d07b0d716b1cc0ccaacc71c1f3c14d228e065197"}, - {file = "terminado-0.15.0.tar.gz", hash = "sha256:ab4eeedccfcc1e6134bfee86106af90852c69d602884ea3a1e8ca6d4486e9bfe"}, -] -tinycss2 = [ - {file = "tinycss2-1.1.1-py3-none-any.whl", hash = "sha256:fe794ceaadfe3cf3e686b22155d0da5780dd0e273471a51846d0a02bc204fec8"}, - {file = "tinycss2-1.1.1.tar.gz", hash = "sha256:b2e44dd8883c360c35dd0d1b5aad0b610e5156c2cb3b33434634e539ead9d8bf"}, -] -tornado = [ - {file = "tornado-6.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:d371e811d6b156d82aa5f9a4e08b58debf97c302a35714f6f45e35139c332e32"}, - {file = "tornado-6.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:0d321a39c36e5f2c4ff12b4ed58d41390460f798422c4504e09eb5678e09998c"}, - {file = "tornado-6.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9de9e5188a782be6b1ce866e8a51bc76a0fbaa0e16613823fc38e4fc2556ad05"}, - {file = "tornado-6.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:61b32d06ae8a036a6607805e6720ef00a3c98207038444ba7fd3d169cd998910"}, - {file = "tornado-6.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:3e63498f680547ed24d2c71e6497f24bca791aca2fe116dbc2bd0ac7f191691b"}, - {file = "tornado-6.1-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:6c77c9937962577a6a76917845d06af6ab9197702a42e1346d8ae2e76b5e3675"}, - {file = "tornado-6.1-cp35-cp35m-win32.whl", hash = "sha256:6286efab1ed6e74b7028327365cf7346b1d777d63ab30e21a0f4d5b275fc17d5"}, - {file = "tornado-6.1-cp35-cp35m-win_amd64.whl", hash = "sha256:fa2ba70284fa42c2a5ecb35e322e68823288a4251f9ba9cc77be04ae15eada68"}, - {file = "tornado-6.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a00ff4561e2929a2c37ce706cb8233b7907e0cdc22eab98888aca5dd3775feb"}, - {file = "tornado-6.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:748290bf9112b581c525e6e6d3820621ff020ed95af6f17fedef416b27ed564c"}, - {file = "tornado-6.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e385b637ac3acaae8022e7e47dfa7b83d3620e432e3ecb9a3f7f58f150e50921"}, - {file = "tornado-6.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:25ad220258349a12ae87ede08a7b04aca51237721f63b1808d39bdb4b2164558"}, - {file = "tornado-6.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:65d98939f1a2e74b58839f8c4dab3b6b3c1ce84972ae712be02845e65391ac7c"}, - {file = "tornado-6.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:e519d64089b0876c7b467274468709dadf11e41d65f63bba207e04217f47c085"}, - {file = "tornado-6.1-cp36-cp36m-win32.whl", hash = "sha256:b87936fd2c317b6ee08a5741ea06b9d11a6074ef4cc42e031bc6403f82a32575"}, - {file = "tornado-6.1-cp36-cp36m-win_amd64.whl", hash = "sha256:cc0ee35043162abbf717b7df924597ade8e5395e7b66d18270116f8745ceb795"}, - {file = "tornado-6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7250a3fa399f08ec9cb3f7b1b987955d17e044f1ade821b32e5f435130250d7f"}, - {file = "tornado-6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ed3ad863b1b40cd1d4bd21e7498329ccaece75db5a5bf58cd3c9f130843e7102"}, - {file = "tornado-6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:dcef026f608f678c118779cd6591c8af6e9b4155c44e0d1bc0c87c036fb8c8c4"}, - {file = "tornado-6.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:70dec29e8ac485dbf57481baee40781c63e381bebea080991893cd297742b8fd"}, - {file = "tornado-6.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:d3f7594930c423fd9f5d1a76bee85a2c36fd8b4b16921cae7e965f22575e9c01"}, - {file = "tornado-6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3447475585bae2e77ecb832fc0300c3695516a47d46cefa0528181a34c5b9d3d"}, - {file = "tornado-6.1-cp37-cp37m-win32.whl", hash = "sha256:e7229e60ac41a1202444497ddde70a48d33909e484f96eb0da9baf8dc68541df"}, - {file = "tornado-6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:cb5ec8eead331e3bb4ce8066cf06d2dfef1bfb1b2a73082dfe8a161301b76e37"}, - {file = "tornado-6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:20241b3cb4f425e971cb0a8e4ffc9b0a861530ae3c52f2b0434e6c1b57e9fd95"}, - {file = "tornado-6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c77da1263aa361938476f04c4b6c8916001b90b2c2fdd92d8d535e1af48fba5a"}, - {file = "tornado-6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fba85b6cd9c39be262fcd23865652920832b61583de2a2ca907dbd8e8a8c81e5"}, - {file = "tornado-6.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:1e8225a1070cd8eec59a996c43229fe8f95689cb16e552d130b9793cb570a288"}, - {file = "tornado-6.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d14d30e7f46a0476efb0deb5b61343b1526f73ebb5ed84f23dc794bdb88f9d9f"}, - {file = "tornado-6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8f959b26f2634a091bb42241c3ed8d3cedb506e7c27b8dd5c7b9f745318ddbb6"}, - {file = "tornado-6.1-cp38-cp38-win32.whl", hash = "sha256:34ca2dac9e4d7afb0bed4677512e36a52f09caa6fded70b4e3e1c89dbd92c326"}, - {file = "tornado-6.1-cp38-cp38-win_amd64.whl", hash = "sha256:6196a5c39286cc37c024cd78834fb9345e464525d8991c21e908cc046d1cc02c"}, - {file = "tornado-6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0ba29bafd8e7e22920567ce0d232c26d4d47c8b5cf4ed7b562b5db39fa199c5"}, - {file = "tornado-6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:33892118b165401f291070100d6d09359ca74addda679b60390b09f8ef325ffe"}, - {file = "tornado-6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7da13da6f985aab7f6f28debab00c67ff9cbacd588e8477034c0652ac141feea"}, - {file = "tornado-6.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:e0791ac58d91ac58f694d8d2957884df8e4e2f6687cdf367ef7eb7497f79eaa2"}, - {file = "tornado-6.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:66324e4e1beede9ac79e60f88de548da58b1f8ab4b2f1354d8375774f997e6c0"}, - {file = "tornado-6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:a48900ecea1cbb71b8c71c620dee15b62f85f7c14189bdeee54966fbd9a0c5bd"}, - {file = "tornado-6.1-cp39-cp39-win32.whl", hash = "sha256:d3d20ea5782ba63ed13bc2b8c291a053c8d807a8fa927d941bd718468f7b950c"}, - {file = "tornado-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:548430be2740e327b3fe0201abe471f314741efcb0067ec4f2d7dcfb4825f3e4"}, - {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, -] -traitlets = [ - {file = "traitlets-5.2.1.post0-py3-none-any.whl", hash = "sha256:f44b708d33d98b0addb40c29d148a761f44af740603a8fd0e2f8b5b27cf0f087"}, - {file = "traitlets-5.2.1.post0.tar.gz", hash = "sha256:70815ecb20ec619d1af28910ade523383be13754283aef90528eb3d47b77c5db"}, -] -urllib3 = [ - {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, - {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, -] -valley = [ - {file = "valley-1.5.6-py3-none-any.whl", hash = "sha256:fa2e5fc51d59901e5eb178116a4fb15b712928b4c87809f59cdf02a934d63cf6"}, - {file = "valley-1.5.6.tar.gz", hash = "sha256:ec55f7df3512f0dfa23c9f253b414a02491dea41a62230ed459a43cf02fee9a3"}, -] -wcwidth = [ - {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, - {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, -] -webencodings = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] -werkzeug = [ - {file = "Werkzeug-2.1.2-py3-none-any.whl", hash = "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255"}, - {file = "Werkzeug-2.1.2.tar.gz", hash = "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6"}, -] -widgetsnbextension = [ - {file = "widgetsnbextension-3.6.0-py2.py3-none-any.whl", hash = "sha256:4fd321cad39fdcf8a8e248a657202d42917ada8e8ed5dd3f60f073e0d54ceabd"}, - {file = "widgetsnbextension-3.6.0.tar.gz", hash = "sha256:e84a7a9fcb9baf3d57106e184a7389a8f8eb935bf741a5eb9d60aa18cc029a80"}, -] -zipp = [ - {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, - {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, -] +anyio = [] +appnope = [] +argon2-cffi = [] +argon2-cffi-bindings = [] +arrow = [] +asttokens = [] +astunparse = [] +attrs = [] +backcall = [] +beautifulsoup4 = [] +bleach = [] +boto3 = [] +botocore = [] +cachetools = [] +certifi = [] +cffi = [] +charset-normalizer = [] +click = [] +colorama = [] +comm = [] +coverage = [] +cryptography = [] +debugpy = [] +decorator = [] +defusedxml = [] +envs = [] +executing = [] +fastjsonschema = [] +faunadb = [] +formy = [] +fqdn = [] +future = [] +graphql-py = [] +h11 = [] +h2 = [] +hpack = [] +httpcore = [] +httpx = [] +hyperframe = [] +idna = [] +importlib-metadata = [] +importlib-resources = [] +ipykernel = [] +ipython = [] +ipython-genutils = [] +ipywidgets = [] +iso8601 = [] +isoduration = [] +jedi = [] +jinja2 = [] +jmespath = [] +jsonpointer = [] +jsonschema = [] +jupyter = [] +jupyter-client = [] +jupyter-console = [] +jupyter-core = [] +jupyter-events = [] +jupyter-server = [] +jupyter-server-terminals = [] +jupyterlab-pygments = [] +jupyterlab-widgets = [] +markupsafe = [] +matplotlib-inline = [] +mistune = [] +nbclassic = [] +nbclient = [] +nbconvert = [] +nbformat = [] +nest-asyncio = [] +notebook = [] +notebook-shim = [] +openapi-schema-validator = [] +openapi-spec-validator = [] +packaging = [] +pandocfilters = [] +parso = [] +pdoc = [] +pexpect = [] +pickleshare = [] +pkgutil-resolve-name = [] +platformdirs = [] +ply = [] +prometheus-client = [] +prompt-toolkit = [] +psutil = [] +ptyprocess = [] +pure-eval = [] +pycparser = [] +pygments = [] +pyjwt = [] +pyrsistent = [] +python-dateutil = [] +python-json-logger = [] +pytz = [] +pywin32 = [] +pywinpty = [] +pyyaml = [] +pyzmq = [] +qtconsole = [] +qtpy = [] +requests = [] +rfc3339-validator = [] +rfc3986 = [] +rfc3986-validator = [] +s3transfer = [] +sammy = [] +send2trash = [] +six = [] +sniffio = [] +soupsieve = [] +stack-data = [] +stripe = [] +swaggyp = [] +terminado = [] +tinycss2 = [] +tornado = [] +traitlets = [] +uri-template = [] +urllib3 = [] +valley = [] +wcwidth = [] +webcolors = [] +webencodings = [] +websocket-client = [] +werkzeug = [] +widgetsnbextension = [] +zipp = [] diff --git a/pyproject.toml b/pyproject.toml index 6cea052..65903c8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,9 +12,8 @@ valley = "1.5.8" requests = "^2.23.0" pytz = "^2021.1" decorator = "^5.0.9" -Werkzeug = "^2.0.1" pyjwt = "^2.1.0" -pip = "^21.2.4" +pip = "^23.0.1" cryptography = "^3.4.7" cachetools = "^4.2.2" click = "^8.0.1" @@ -25,6 +24,9 @@ stripe = "^2.61.0" bleach = "^4.1.0" openapi-spec-validator = "^0.4.0" swaggyp = "^0.3.0" +formy = "1.3.1" +Jinja2 = "^3.1.2" +Werkzeug = "2.1.2" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" @@ -32,7 +34,7 @@ coverage = "^5.5" pdoc = "^7.2.0" [build-system] -requires = ["poetry>=0.12"] +requires = ["setuptools", "poetry>=0.12"] [tool.poetry.scripts] pfunk = 'pfunk.cli:pfunk' From 61d411e9ff89741809e0f9ed74e43f7c17021d0e Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 6 Mar 2023 14:17:32 +0800 Subject: [PATCH 203/214] added documentation for auth json views --- pfunk/contrib/auth/views.py | 86 ++++++++++++++++++++++++++++++++++++- pfunk/utils/swagger.py | 3 +- 2 files changed, 87 insertions(+), 2 deletions(-) diff --git a/pfunk/contrib/auth/views.py b/pfunk/contrib/auth/views.py index 01b6b86..495443b 100644 --- a/pfunk/contrib/auth/views.py +++ b/pfunk/contrib/auth/views.py @@ -38,6 +38,25 @@ def get_query(self): 'exp': exp } + def _payload_docs(self): + return {"data": [ + { + "name": "username", + "in": "formData", + "description": "Username of the user", + "required": True, + "type": "string" + }, + { + "name": "password", + "in": "formData", + "description": "Password of the user", + "required": True, + "type":"string", + "format": "password" + } + ]} + class LogoutView(ActionMixin, JSONAuthView): """ Creates a logout view to enable logout via endpoint @@ -64,6 +83,24 @@ class SignUpView(ActionMixin, JSONAuthView): def get_query(self): return self.collection.signup(**self.get_query_kwargs()) + def _payload_docs(self): + return {"data": [ + { + "name": "username", + "in": "formData", + "description": "username of the user", + "required": True, + "type": "string" + }, + { + "name": "password", + "in": "formData", + "description": "password of the user", + "required": True, + "type":"string", + "format": "password" + } + ]} class VerifyEmailView(ActionMixin, JSONAuthView): """ Creates a view that enables verification of a user @@ -97,6 +134,33 @@ def get_query(self): self.collection.update_password(kwargs['current_password'], kwargs['new_password'], kwargs['new_password_confirm'], _token=self.request.token) + def _payload_docs(self): + return {"data": [ + { + "name": "current_password", + "in": "formData", + "description": "current password of the user", + "required": True, + "type": "string", + "format": "password" + }, + { + "name": "new_password", + "in": "formData", + "description": "new password of the user", + "required": True, + "type":"string", + "format": "password" + }, + { + "name": "new_password_confirm", + "in": "formData", + "description": "confirm the new password of the user by entering the same string", + "required": True, + "type":"string", + "format": "password" + } + ]} class ForgotPasswordView(ActionMixin, JSONAuthView): """ Create a view to allow call of forgot password func """ @@ -106,6 +170,17 @@ class ForgotPasswordView(ActionMixin, JSONAuthView): def get_query(self): return self.collection.forgot_password(**self.get_query_kwargs()) + + def _payload_docs(self): + return {"data": [ + { + "name": "email", + "in": "formData", + "description": "email of the user", + "required": True, + "type": "string" + } + ]} class ForgotPasswordChangeView(ActionMixin, JSONAuthView): @@ -124,7 +199,16 @@ def get_query(self): verify_type='forgot', password=kwargs['password']) - + def _payload_docs(self): + return {"data": [ + { + "name": "verification_key", + "in": "formData", + "description": "hashed key for verification of forgot password event", + "required": True, + "type": "string" + } + ]} class WebhookView(JSONView): pass diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 9e13625..a0f108c 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -237,7 +237,8 @@ def get_operations(self, col: Collection): _in=field.get('in'), description=field.get('description'), required=field.get('required'), - allowEmptyValue=False + allowEmptyValue=False, + _format=field.get('format') ) params.append(param) From 30aea6cac68d8d76f522a5bd5d9e402d58a49dcd Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 13 Mar 2023 16:28:19 +0800 Subject: [PATCH 204/214] Fixed creation of jwt to cast to int the payload for iat. Updated packages --- pfunk/contrib/auth/key.py | 4 +- poetry.lock | 1300 +++++++++++++++++++++++++++++++------ pyproject.toml | 2 +- 3 files changed, 1094 insertions(+), 212 deletions(-) diff --git a/pfunk/contrib/auth/key.py b/pfunk/contrib/auth/key.py index efbe123..bea8951 100644 --- a/pfunk/contrib/auth/key.py +++ b/pfunk/contrib/auth/key.py @@ -55,7 +55,7 @@ def create_jwt(cls, secret_claims): now = datetime.datetime.now(tz=gmt) exp = now + datetime.timedelta(days=1) payload = { - 'iat': now.timestamp(), + 'iat': int(now.timestamp()), 'exp': exp.timestamp(), 'nbf': now.timestamp(), 'iss': env('PROJECT_NAME', 'pfunk'), @@ -69,7 +69,7 @@ def decrypt_jwt(cls, encoded): keys = cls.import_keys() key = keys.get(headers.get('kid')) try: - decoded = jwt.decode(encoded, key.get('signature_key'), algorithms="HS256", verify=True, + decoded = jwt.decode(encoded, key.get('signature_key'), algorithms=["HS256"], verify=True, options={"require": ["iat", "exp", "nbf", 'iss', 'til']}) except ExpiredSignatureError: raise Unauthorized('Unauthorized') diff --git a/poetry.lock b/poetry.lock index 6e82eca..855d0b1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,8 +11,8 @@ idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] -test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] +doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] trio = ["trio (>=0.16,<0.22)"] [[package]] @@ -35,8 +35,8 @@ python-versions = ">=3.6" argon2-cffi-bindings = "*" [package.extras] -dev = ["pre-commit", "cogapp", "tomli", "coverage[toml] (>=5.0.2)", "hypothesis", "pytest", "sphinx", "sphinx-notfound-page", "furo"] -docs = ["sphinx", "sphinx-notfound-page", "furo"] +dev = ["cogapp", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "pre-commit", "pytest", "sphinx", "sphinx-notfound-page", "tomli"] +docs = ["furo", "sphinx", "sphinx-notfound-page"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] [[package]] @@ -51,7 +51,7 @@ python-versions = ">=3.6" cffi = ">=1.0.1" [package.extras] -dev = ["pytest", "cogapp", "pre-commit", "wheel"] +dev = ["cogapp", "pre-commit", "pytest", "wheel"] tests = ["pytest"] [[package]] @@ -89,6 +89,7 @@ python-versions = "*" [package.dependencies] six = ">=1.6.1,<2.0" +wheel = ">=0.23.0,<1.0" [[package]] name = "attrs" @@ -99,12 +100,12 @@ optional = false python-versions = ">=3.6" [package.extras] -cov = ["attrs", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["attrs"] -docs = ["furo", "sphinx", "myst-parser", "zope.interface", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] -tests = ["attrs", "zope.interface"] -tests-no-zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"] -tests_no_zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=0.971,<0.990)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests_no_zope = ["cloudpickle", "hypothesis", "mypy (>=0.971,<0.990)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] [[package]] name = "backcall" @@ -144,14 +145,14 @@ webencodings = "*" [[package]] name = "boto3" -version = "1.26.81" +version = "1.26.89" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.7" [package.dependencies] -botocore = ">=1.29.81,<1.30.0" +botocore = ">=1.29.89,<1.30.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -160,7 +161,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.29.81" +version = "1.29.89" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -203,11 +204,11 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.0.1" +version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7.0" [[package]] name = "click" @@ -265,12 +266,12 @@ python-versions = ">=3.6" cffi = ">=1.12" [package.extras] -docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] +docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx_rtd_theme"] +docstest = ["doc8", "pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] +test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] [[package]] name = "debugpy" @@ -305,7 +306,7 @@ optional = false python-versions = ">=3.6,<4.0" [package.extras] -cli = ["click[cli] (>=8.0.3,<9.0.0)", "Jinja2[cli] (>=3.0.3,<4.0.0)", "terminaltables[cli] (>=3.1.10,<4.0.0)"] +cli = ["Jinja2[cli] (>=3.0.3,<4.0.0)", "click[cli] (>=8.0.3,<9.0.0)", "terminaltables[cli] (>=3.1.10,<4.0.0)"] [[package]] name = "executing" @@ -316,7 +317,7 @@ optional = false python-versions = "*" [package.extras] -tests = ["asttokens", "pytest", "littleutils", "rich"] +tests = ["asttokens", "littleutils", "pytest", "rich"] [[package]] name = "fastjsonschema" @@ -327,7 +328,7 @@ optional = false python-versions = "*" [package.extras] -devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] [[package]] name = "faunadb" @@ -345,7 +346,7 @@ requests = "*" [package.extras] lint = ["pylint"] -test = ["nose2", "nose2"] +test = ["nose2", "nose2[coverage_plugin]"] [[package]] name = "formy" @@ -481,9 +482,9 @@ python-versions = ">=3.7" zipp = ">=0.5" [package.extras] -docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8", "importlib-resources (>=1.3)"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "importlib-resources" @@ -497,12 +498,12 @@ python-versions = ">=3.7" zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [[package]] name = "ipykernel" -version = "6.21.2" +version = "6.21.3" description = "IPython Kernel for Jupyter" category = "dev" optional = false @@ -524,11 +525,11 @@ tornado = ">=6.1" traitlets = ">=5.4.0" [package.extras] -cov = ["coverage", "curio", "matplotlib", "pytest-cov", "trio"] +cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] pyqt5 = ["pyqt5"] pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest-asyncio", "pytest-cov", "pytest-timeout", "pytest (>=7.0)"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] [[package]] name = "ipython" @@ -553,9 +554,9 @@ stack-data = "*" traitlets = ">=5" [package.extras] -all = ["black", "ipykernel", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "docrepr", "matplotlib", "stack-data", "pytest (<7)", "typing-extensions", "pytest (<7.1)", "pytest-asyncio", "testpath", "nbconvert", "nbformat", "ipywidgets", "notebook", "ipyparallel", "qtconsole", "curio", "matplotlib (!=3.2.0)", "numpy (>=1.21)", "pandas", "trio"] +all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] black = ["black"] -doc = ["ipykernel", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "docrepr", "matplotlib", "stack-data", "pytest (<7)", "typing-extensions", "pytest (<7.1)", "pytest-asyncio", "testpath"] +doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] @@ -563,10 +564,10 @@ notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test_extra = ["pytest (<7.1)", "pytest-asyncio", "testpath", "curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "trio"] +test_extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] [[package]] -name = "ipython-genutils" +name = "ipython_genutils" version = "0.2.0" description = "Vestigial utilities from IPython" category = "dev" @@ -622,12 +623,12 @@ python-versions = ">=3.6" parso = ">=0.8.0,<0.9.0" [package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx-rtd-theme (==0.4.3)", "sphinx (==1.8.5)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] -name = "jinja2" +name = "Jinja2" version = "3.1.2" description = "A very fast and expressive template engine." category = "main" @@ -715,12 +716,12 @@ tornado = ">=6.2" traitlets = ">=5.3" [package.extras] -docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinx (>=4)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] test = ["codecov", "coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] [[package]] name = "jupyter-console" -version = "6.6.2" +version = "6.6.3" description = "Jupyter terminal console" category = "dev" optional = false @@ -775,11 +776,11 @@ traitlets = ">=5.3" [package.extras] cli = ["click", "rich"] docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] -test = ["click", "coverage", "pre-commit", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "pytest-cov", "pytest (>=7.0)", "rich"] +test = ["click", "coverage", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "pytest-cov", "rich"] [[package]] name = "jupyter-server" -version = "2.3.0" +version = "2.4.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." category = "dev" optional = false @@ -806,8 +807,8 @@ traitlets = ">=5.6.0" websocket-client = "*" [package.extras] -docs = ["docutils (<0.20)", "ipykernel", "jinja2", "jupyter-client", "jupyter-server", "mistune (<1.0.0)", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] -test = ["ipykernel", "pre-commit", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "pytest (>=7.0)", "requests"] +docs = ["docutils (<0.20)", "ipykernel", "jinja2", "jupyter-client", "jupyter-server", "mistune (<1.0.0)", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] +test = ["ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"] [[package]] name = "jupyter-server-terminals" @@ -823,7 +824,7 @@ terminado = ">=0.8.3" [package.extras] docs = ["jinja2", "jupyter-server", "mistune (<3.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] -test = ["coverage", "jupyter-server (>=2.0.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout", "pytest (>=7.0)"] +test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] [[package]] name = "jupyterlab-pygments" @@ -842,7 +843,7 @@ optional = false python-versions = ">=3.7" [[package]] -name = "markupsafe" +name = "MarkupSafe" version = "2.1.2" description = "Safely add untrusted strings to HTML/XML markup." category = "main" @@ -870,7 +871,7 @@ python-versions = "*" [[package]] name = "nbclassic" -version = "0.5.2" +version = "0.5.3" description = "Jupyter Notebook as a Jupyter Server extension." category = "dev" optional = false @@ -896,9 +897,9 @@ tornado = ">=6.1" traitlets = ">=4.2.1" [package.extras] -docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"] +docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] json-logging = ["json-logging"] -test = ["pytest", "coverage", "requests", "testpath", "nbval", "pytest-playwright", "pytest-cov", "pytest-jupyter", "pytest-tornasync", "requests-unixsocket"] +test = ["coverage", "nbval", "pytest", "pytest-cov", "pytest-jupyter", "pytest-playwright", "pytest-tornasync", "requests", "requests-unixsocket", "testpath"] [[package]] name = "nbclient" @@ -916,8 +917,8 @@ traitlets = ">=5.3" [package.extras] dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient", "sphinx-book-theme", "sphinx (>=1.7)"] -test = ["ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "pytest (>=7.0)", "testpath", "xmltodict"] +docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme"] +test = ["ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] [[package]] name = "nbconvert" @@ -946,9 +947,9 @@ tinycss2 = "*" traitlets = ">=5.0" [package.extras] -all = ["nbconvert"] +all = ["nbconvert[docs,qtpdf,serve,test,webpdf]"] docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] -qtpdf = ["nbconvert"] +qtpdf = ["nbconvert[qtpng]"] qtpng = ["pyqtwebengine (>=5.15)"] serve = ["tornado (>=6.1)"] test = ["ipykernel", "ipywidgets (>=7)", "pre-commit", "pytest", "pytest-dependency"] @@ -982,7 +983,7 @@ python-versions = ">=3.5" [[package]] name = "notebook" -version = "6.5.2" +version = "6.5.3" description = "A web-based notebook environment for interactive computing" category = "dev" optional = false @@ -1007,9 +1008,9 @@ tornado = ">=6.1" traitlets = ">=4.2.1" [package.extras] -docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"] +docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] json-logging = ["json-logging"] -test = ["pytest", "coverage", "requests", "testpath", "nbval", "selenium (==4.1.5)", "pytest-cov", "requests-unixsocket"] +test = ["coverage", "nbval", "pytest", "pytest-cov", "requests", "requests-unixsocket", "selenium (==4.1.5)", "testpath"] [[package]] name = "notebook-shim" @@ -1037,9 +1038,9 @@ python-versions = ">=3.7.0,<4.0.0" jsonschema = ">=3.0.0,<5.0.0" [package.extras] +isodate = ["isodate"] rfc3339-validator = ["rfc3339-validator"] strict-rfc3339 = ["strict-rfc3339"] -isodate = ["isodate"] [[package]] name = "openapi-spec-validator" @@ -1053,6 +1054,7 @@ python-versions = ">=3.7.0,<4.0.0" jsonschema = ">=3.2.0,<5.0.0" openapi-schema-validator = ">=0.2.0,<0.3.0" PyYAML = ">=5.1" +setuptools = "*" [package.extras] requests = ["requests"] @@ -1122,7 +1124,15 @@ optional = false python-versions = "*" [[package]] -name = "pkgutil-resolve-name" +name = "pip" +version = "23.0.1" +description = "The PyPA recommended tool for installing Python packages." +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "pkgutil_resolve_name" version = "1.3.10" description = "Resolve a name to an object." category = "main" @@ -1131,15 +1141,15 @@ python-versions = ">=3.6" [[package]] name = "platformdirs" -version = "3.0.0" +version = "3.1.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)", "sphinx (>=6.1.3)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest (>=7.2.1)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "ply" @@ -1180,7 +1190,7 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.extras] -test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] [[package]] name = "ptyprocess" @@ -1210,7 +1220,7 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] -name = "pygments" +name = "Pygments" version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" @@ -1221,7 +1231,7 @@ python-versions = ">=3.6" plugins = ["importlib-metadata"] [[package]] -name = "pyjwt" +name = "PyJWT" version = "2.6.0" description = "JSON Web Token implementation in Python" category = "main" @@ -1230,9 +1240,9 @@ python-versions = ">=3.7" [package.extras] crypto = ["cryptography (>=3.4.0)"] -dev = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.4.0)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "pre-commit"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pyrsistent" @@ -1286,7 +1296,7 @@ optional = false python-versions = ">=3.7" [[package]] -name = "pyyaml" +name = "PyYAML" version = "6.0" description = "YAML parser and emitter for Python" category = "main" @@ -1306,7 +1316,7 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "qtconsole" -version = "5.4.0" +version = "5.4.1" description = "Jupyter Qt console" category = "dev" optional = false @@ -1317,6 +1327,7 @@ ipykernel = ">=4.1" ipython-genutils = "*" jupyter-client = ">=4.1" jupyter-core = "*" +packaging = "*" pygments = "*" pyzmq = ">=17.1" qtpy = ">=2.0.1" @@ -1327,7 +1338,7 @@ doc = ["Sphinx (>=1.3)"] test = ["flaky", "pytest", "pytest-qt"] [[package]] -name = "qtpy" +name = "QtPy" version = "2.3.0" description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." category = "dev" @@ -1419,7 +1430,7 @@ PyYAML = ">=3.12" valley = ">=1.5.2" [[package]] -name = "send2trash" +name = "Send2Trash" version = "1.8.0" description = "Send file to trash natively under Mac OS X, Windows and Linux." category = "dev" @@ -1427,10 +1438,25 @@ optional = false python-versions = "*" [package.extras] +nativelib = ["pyobjc-framework-Cocoa", "pywin32"] +objc = ["pyobjc-framework-Cocoa"] win32 = ["pywin32"] objc = ["pyobjc-framework-cocoa"] nativelib = ["pywin32", "pyobjc-framework-cocoa"] +[[package]] +name = "setuptools" +version = "67.6.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "six" version = "1.16.0" @@ -1469,7 +1495,7 @@ executing = ">=1.2.0" pure-eval = "*" [package.extras] -tests = ["pytest", "typeguard", "pygments", "littleutils", "cython"] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] [[package]] name = "stripe" @@ -1509,7 +1535,7 @@ tornado = ">=6.1.0" [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["pre-commit", "pytest-timeout", "pytest (>=7.0)"] +test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] [[package]] name = "tinycss2" @@ -1523,8 +1549,8 @@ python-versions = ">=3.7" webencodings = ">=0.4" [package.extras] -doc = ["sphinx", "sphinx-rtd-theme"] -test = ["pytest", "isort", "flake8"] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["flake8", "isort", "pytest"] [[package]] name = "tornado" @@ -1555,19 +1581,19 @@ optional = false python-versions = ">=3.6" [package.extras] -dev = ["mypy", "flake8 (<4.0.0)", "flake8-annotations", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-noqa", "flake8-requirements", "flake8-type-annotations", "flake8-use-fstring", "pep8-naming"] +dev = ["flake8 (<4.0.0)", "flake8-annotations", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-noqa", "flake8-requirements", "flake8-type-annotations", "flake8-use-fstring", "mypy", "pep8-naming"] [[package]] name = "urllib3" -version = "1.26.14" +version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.extras] -brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -1619,8 +1645,8 @@ optional = ["python-socks", "wsaccel"] test = ["websockets"] [[package]] -name = "werkzeug" -version = "2.2.2" +name = "Werkzeug" +version = "2.1.2" description = "The comprehensive WSGI web application library." category = "main" optional = false @@ -1632,6 +1658,17 @@ MarkupSafe = ">=2.1.1" [package.extras] watchdog = ["watchdog"] +[[package]] +name = "wheel" +version = "0.38.4" +description = "A built-package format for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=3.0.0)"] + [[package]] name = "widgetsnbextension" version = "4.0.5" @@ -1649,140 +1686,985 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "jaraco.functools", "more-itertools", "big-o", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "1eed3385a9e7bfaf61e1c764485f5ebfff46418df4af9b6ee8dd0bb12b429c37" +content-hash = "2da6450ab510552fae213960c22acc6456126a4642acf9fd9bc77062959c14f7" [metadata.files] -anyio = [] -appnope = [] -argon2-cffi = [] -argon2-cffi-bindings = [] -arrow = [] -asttokens = [] -astunparse = [] -attrs = [] -backcall = [] -beautifulsoup4 = [] -bleach = [] -boto3 = [] -botocore = [] -cachetools = [] -certifi = [] -cffi = [] -charset-normalizer = [] -click = [] -colorama = [] -comm = [] -coverage = [] -cryptography = [] -debugpy = [] -decorator = [] -defusedxml = [] -envs = [] -executing = [] -fastjsonschema = [] -faunadb = [] -formy = [] -fqdn = [] -future = [] -graphql-py = [] -h11 = [] -h2 = [] -hpack = [] -httpcore = [] -httpx = [] -hyperframe = [] -idna = [] -importlib-metadata = [] -importlib-resources = [] -ipykernel = [] -ipython = [] -ipython-genutils = [] -ipywidgets = [] -iso8601 = [] -isoduration = [] -jedi = [] -jinja2 = [] -jmespath = [] -jsonpointer = [] -jsonschema = [] -jupyter = [] -jupyter-client = [] -jupyter-console = [] -jupyter-core = [] -jupyter-events = [] -jupyter-server = [] -jupyter-server-terminals = [] -jupyterlab-pygments = [] -jupyterlab-widgets = [] -markupsafe = [] -matplotlib-inline = [] -mistune = [] -nbclassic = [] -nbclient = [] -nbconvert = [] -nbformat = [] -nest-asyncio = [] -notebook = [] -notebook-shim = [] -openapi-schema-validator = [] -openapi-spec-validator = [] -packaging = [] -pandocfilters = [] -parso = [] -pdoc = [] -pexpect = [] -pickleshare = [] -pkgutil-resolve-name = [] -platformdirs = [] -ply = [] -prometheus-client = [] -prompt-toolkit = [] -psutil = [] -ptyprocess = [] -pure-eval = [] -pycparser = [] -pygments = [] -pyjwt = [] -pyrsistent = [] -python-dateutil = [] -python-json-logger = [] -pytz = [] -pywin32 = [] -pywinpty = [] -pyyaml = [] -pyzmq = [] -qtconsole = [] -qtpy = [] -requests = [] -rfc3339-validator = [] -rfc3986 = [] -rfc3986-validator = [] -s3transfer = [] -sammy = [] -send2trash = [] -six = [] -sniffio = [] -soupsieve = [] -stack-data = [] -stripe = [] -swaggyp = [] -terminado = [] -tinycss2 = [] -tornado = [] -traitlets = [] -uri-template = [] -urllib3 = [] -valley = [] -wcwidth = [] -webcolors = [] -webencodings = [] -websocket-client = [] -werkzeug = [] -widgetsnbextension = [] -zipp = [] +anyio = [ + {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, + {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, +] +appnope = [ + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, +] +argon2-cffi = [ + {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, + {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, +] +argon2-cffi-bindings = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] +arrow = [ + {file = "arrow-1.2.3-py3-none-any.whl", hash = "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2"}, + {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"}, +] +asttokens = [ + {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, + {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, +] +astunparse = [ + {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, + {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, +] +attrs = [ + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, +] +backcall = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] +beautifulsoup4 = [ + {file = "beautifulsoup4-4.11.2-py3-none-any.whl", hash = "sha256:0e79446b10b3ecb499c1556f7e228a53e64a2bfcebd455f370d8927cb5b59e39"}, + {file = "beautifulsoup4-4.11.2.tar.gz", hash = "sha256:bc4bdda6717de5a2987436fb8d72f45dc90dd856bdfd512a1314ce90349a0106"}, +] +bleach = [ + {file = "bleach-4.1.0-py2.py3-none-any.whl", hash = "sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994"}, + {file = "bleach-4.1.0.tar.gz", hash = "sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da"}, +] +boto3 = [ + {file = "boto3-1.26.89-py3-none-any.whl", hash = "sha256:09929b24aaec4951e435d53d31f800e2ca52244af049dc11e5385ce062e106e9"}, + {file = "boto3-1.26.89.tar.gz", hash = "sha256:e819812f16fab46fadf9b2853a46aaa126e108e7f038502dde555ebbbfc80133"}, +] +botocore = [ + {file = "botocore-1.29.89-py3-none-any.whl", hash = "sha256:b757e59feca82ac62934f658918133116b4535cf66f1d72ff4935fa24e522527"}, + {file = "botocore-1.29.89.tar.gz", hash = "sha256:ac8da651f73a9d5759cf5d80beba68deda407e56aaaeb10d249fd557459f3b56"}, +] +cachetools = [ + {file = "cachetools-4.2.4-py3-none-any.whl", hash = "sha256:92971d3cb7d2a97efff7c7bb1657f21a8f5fb309a37530537c71b1774189f2d1"}, + {file = "cachetools-4.2.4.tar.gz", hash = "sha256:89ea6f1b638d5a73a4f9226be57ac5e4f399d22770b92355f92dcb0f7f001693"}, +] +certifi = [ + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, +] +cffi = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] +charset-normalizer = [ + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, +] +click = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] +colorama = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] +comm = [ + {file = "comm-0.1.2-py3-none-any.whl", hash = "sha256:9f3abf3515112fa7c55a42a6a5ab358735c9dccc8b5910a9d8e3ef5998130666"}, + {file = "comm-0.1.2.tar.gz", hash = "sha256:3e2f5826578e683999b93716285b3b1f344f157bf75fa9ce0a797564e742f062"}, +] +coverage = [ + {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, + {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, + {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, + {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, + {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, + {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, + {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, + {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, + {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, + {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, + {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, + {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, + {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, + {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, + {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, + {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, + {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, + {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, + {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, + {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, + {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, + {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, + {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, + {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, +] +cryptography = [ + {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, + {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"}, + {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c4129fc3fdc0fa8e40861b5ac0c673315b3c902bbdc05fc176764815b43dd1d"}, + {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:695104a9223a7239d155d7627ad912953b540929ef97ae0c34c7b8bf30857e89"}, + {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"}, + {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"}, + {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, +] +debugpy = [ + {file = "debugpy-1.6.6-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:0ea1011e94416e90fb3598cc3ef5e08b0a4dd6ce6b9b33ccd436c1dffc8cd664"}, + {file = "debugpy-1.6.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dff595686178b0e75580c24d316aa45a8f4d56e2418063865c114eef651a982e"}, + {file = "debugpy-1.6.6-cp310-cp310-win32.whl", hash = "sha256:87755e173fcf2ec45f584bb9d61aa7686bb665d861b81faa366d59808bbd3494"}, + {file = "debugpy-1.6.6-cp310-cp310-win_amd64.whl", hash = "sha256:72687b62a54d9d9e3fb85e7a37ea67f0e803aaa31be700e61d2f3742a5683917"}, + {file = "debugpy-1.6.6-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:78739f77c58048ec006e2b3eb2e0cd5a06d5f48c915e2fc7911a337354508110"}, + {file = "debugpy-1.6.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23c29e40e39ad7d869d408ded414f6d46d82f8a93b5857ac3ac1e915893139ca"}, + {file = "debugpy-1.6.6-cp37-cp37m-win32.whl", hash = "sha256:7aa7e103610e5867d19a7d069e02e72eb2b3045b124d051cfd1538f1d8832d1b"}, + {file = "debugpy-1.6.6-cp37-cp37m-win_amd64.whl", hash = "sha256:f6383c29e796203a0bba74a250615ad262c4279d398e89d895a69d3069498305"}, + {file = "debugpy-1.6.6-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:23363e6d2a04d726bbc1400bd4e9898d54419b36b2cdf7020e3e215e1dcd0f8e"}, + {file = "debugpy-1.6.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b5d1b13d7c7bf5d7cf700e33c0b8ddb7baf030fcf502f76fc061ddd9405d16c"}, + {file = "debugpy-1.6.6-cp38-cp38-win32.whl", hash = "sha256:70ab53918fd907a3ade01909b3ed783287ede362c80c75f41e79596d5ccacd32"}, + {file = "debugpy-1.6.6-cp38-cp38-win_amd64.whl", hash = "sha256:c05349890804d846eca32ce0623ab66c06f8800db881af7a876dc073ac1c2225"}, + {file = "debugpy-1.6.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a771739902b1ae22a120dbbb6bd91b2cae6696c0e318b5007c5348519a4211c6"}, + {file = "debugpy-1.6.6-cp39-cp39-win32.whl", hash = "sha256:549ae0cb2d34fc09d1675f9b01942499751d174381b6082279cf19cdb3c47cbe"}, + {file = "debugpy-1.6.6-cp39-cp39-win_amd64.whl", hash = "sha256:de4a045fbf388e120bb6ec66501458d3134f4729faed26ff95de52a754abddb1"}, + {file = "debugpy-1.6.6-py2.py3-none-any.whl", hash = "sha256:be596b44448aac14eb3614248c91586e2bc1728e020e82ef3197189aae556115"}, + {file = "debugpy-1.6.6.zip", hash = "sha256:b9c2130e1c632540fbf9c2c88341493797ddf58016e7cba02e311de9b0a96b67"}, +] +decorator = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] +defusedxml = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] +envs = [ + {file = "envs-1.4-py3-none-any.whl", hash = "sha256:4a1fcf85e4d4443e77c348ff7cdd3bfc4c0178b181d447057de342e4172e5ed1"}, + {file = "envs-1.4.tar.gz", hash = "sha256:9d8435c6985d1cdd68299e04c58e2bdb8ae6cf66b2596a8079e6f9a93f2a0398"}, +] +executing = [ + {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, + {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, +] +fastjsonschema = [ + {file = "fastjsonschema-2.16.3-py3-none-any.whl", hash = "sha256:04fbecc94300436f628517b05741b7ea009506ce8f946d40996567c669318490"}, + {file = "fastjsonschema-2.16.3.tar.gz", hash = "sha256:4a30d6315a68c253cfa8f963b9697246315aa3db89f98b97235e345dedfb0b8e"}, +] +faunadb = [ + {file = "faunadb-4.5.0-py2.py3-none-any.whl", hash = "sha256:5845911a3c16bc405145e16a247b1bcf67b4113822962cbfc40e1d1c6b5ac745"}, +] +formy = [ + {file = "formy-1.3.1-py3-none-any.whl", hash = "sha256:07c2a7ee351039694fe5b958ad4dfec34baeb0ffbddbf4af231609a75994e6f6"}, + {file = "formy-1.3.1.tar.gz", hash = "sha256:4ce7f79185c88f2fd896984a17e3d5cd23360db5408c7e726f64609371c0035d"}, +] +fqdn = [ + {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, + {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, +] +future = [ + {file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"}, +] +graphql-py = [ + {file = "graphql-py-0.8.1.tar.gz", hash = "sha256:a03557d67817a0f23c5bef83ce8791c8fa3f5f9d165e1408abc6393def1d720c"}, +] +h11 = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] +h2 = [ + {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, + {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, +] +hpack = [ + {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, + {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, +] +httpcore = [ + {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, + {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, +] +httpx = [ + {file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"}, + {file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"}, +] +hyperframe = [ + {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, + {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, +] +idna = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] +importlib-metadata = [ + {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"}, + {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"}, +] +importlib-resources = [ + {file = "importlib_resources-5.12.0-py3-none-any.whl", hash = "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"}, + {file = "importlib_resources-5.12.0.tar.gz", hash = "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6"}, +] +ipykernel = [ + {file = "ipykernel-6.21.3-py3-none-any.whl", hash = "sha256:24ebd9715e317c185e37156ab3a87382410185230dde7aeffce389d6c7d4428a"}, + {file = "ipykernel-6.21.3.tar.gz", hash = "sha256:c8ff581905d70e7299bc1473a2f7c113bec1744fb3746d58e5b4b93bd8ee7001"}, +] +ipython = [ + {file = "ipython-8.11.0-py3-none-any.whl", hash = "sha256:5b54478e459155a326bf5f42ee4f29df76258c0279c36f21d71ddb560f88b156"}, + {file = "ipython-8.11.0.tar.gz", hash = "sha256:735cede4099dbc903ee540307b9171fbfef4aa75cfcacc5a273b2cda2f02be04"}, +] +ipython_genutils = [ + {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, + {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, +] +ipywidgets = [ + {file = "ipywidgets-8.0.4-py3-none-any.whl", hash = "sha256:ebb195e743b16c3947fe8827190fb87b4d00979c0fbf685afe4d2c4927059fa1"}, + {file = "ipywidgets-8.0.4.tar.gz", hash = "sha256:c0005a77a47d77889cafed892b58e33b4a2a96712154404c6548ec22272811ea"}, +] +iso8601 = [ + {file = "iso8601-1.1.0-py3-none-any.whl", hash = "sha256:8400e90141bf792bce2634df533dc57e3bee19ea120a87bebcd3da89a58ad73f"}, + {file = "iso8601-1.1.0.tar.gz", hash = "sha256:32811e7b81deee2063ea6d2e94f8819a86d1f3811e49d23623a41fa832bef03f"}, +] +isoduration = [ + {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, + {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, +] +jedi = [ + {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"}, + {file = "jedi-0.18.2.tar.gz", hash = "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612"}, +] +Jinja2 = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] +jmespath = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] +jsonpointer = [ + {file = "jsonpointer-2.3-py2.py3-none-any.whl", hash = "sha256:51801e558539b4e9cd268638c078c6c5746c9ac96bc38152d443400e4f3793e9"}, + {file = "jsonpointer-2.3.tar.gz", hash = "sha256:97cba51526c829282218feb99dab1b1e6bdf8efd1c43dc9d57be093c0d69c99a"}, +] +jsonschema = [ + {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, + {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, +] +jupyter = [ + {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, + {file = "jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"}, + {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, +] +jupyter-client = [ + {file = "jupyter_client-8.0.3-py3-none-any.whl", hash = "sha256:be48ac6bd659cbbddb7a674cf06b3b8afbf53f228253cf58bde604c03bd487b0"}, + {file = "jupyter_client-8.0.3.tar.gz", hash = "sha256:ed65498bea6d876ef9d8da3e0db3dd33c5d129f5b2645f56ae03993782966bd0"}, +] +jupyter-console = [ + {file = "jupyter_console-6.6.3-py3-none-any.whl", hash = "sha256:309d33409fcc92ffdad25f0bcdf9a4a9daa61b6f341177570fdac03de5352485"}, + {file = "jupyter_console-6.6.3.tar.gz", hash = "sha256:566a4bf31c87adbfadf22cdf846e3069b59a71ed5da71d6ba4d8aaad14a53539"}, +] +jupyter-core = [ + {file = "jupyter_core-5.2.0-py3-none-any.whl", hash = "sha256:4bdc2928c37f6917130c667d8b8708f20aee539d8283c6be72aabd2a4b4c83b0"}, + {file = "jupyter_core-5.2.0.tar.gz", hash = "sha256:1407cdb4c79ee467696c04b76633fc1884015fa109323365a6372c8e890cc83f"}, +] +jupyter-events = [ + {file = "jupyter_events-0.6.3-py3-none-any.whl", hash = "sha256:57a2749f87ba387cd1bfd9b22a0875b889237dbf2edc2121ebb22bde47036c17"}, + {file = "jupyter_events-0.6.3.tar.gz", hash = "sha256:9a6e9995f75d1b7146b436ea24d696ce3a35bfa8bfe45e0c33c334c79464d0b3"}, +] +jupyter-server = [ + {file = "jupyter_server-2.4.0-py3-none-any.whl", hash = "sha256:cc22792281bfb0131a728414f28ae74883b44ad6d009971aa975cae9bcc650de"}, + {file = "jupyter_server-2.4.0.tar.gz", hash = "sha256:f31f0ba2c3c44f07143bfa03fb07dd0253f857eb63f0c26f2fea955f04a49765"}, +] +jupyter-server-terminals = [ + {file = "jupyter_server_terminals-0.4.4-py3-none-any.whl", hash = "sha256:75779164661cec02a8758a5311e18bb8eb70c4e86c6b699403100f1585a12a36"}, + {file = "jupyter_server_terminals-0.4.4.tar.gz", hash = "sha256:57ab779797c25a7ba68e97bcfb5d7740f2b5e8a83b5e8102b10438041a7eac5d"}, +] +jupyterlab-pygments = [ + {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, + {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, +] +jupyterlab-widgets = [ + {file = "jupyterlab_widgets-3.0.5-py3-none-any.whl", hash = "sha256:a04a42e50231b355b7087e16a818f541e53589f7647144ea0344c4bf16f300e5"}, + {file = "jupyterlab_widgets-3.0.5.tar.gz", hash = "sha256:eeaecdeaf6c03afc960ddae201ced88d5979b4ca9c3891bcb8f6631af705f5ef"}, +] +MarkupSafe = [ + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, + {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, +] +matplotlib-inline = [ + {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, + {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, +] +mistune = [ + {file = "mistune-2.0.5-py2.py3-none-any.whl", hash = "sha256:bad7f5d431886fcbaf5f758118ecff70d31f75231b34024a1341120340a65ce8"}, + {file = "mistune-2.0.5.tar.gz", hash = "sha256:0246113cb2492db875c6be56974a7c893333bf26cd92891c85f63151cee09d34"}, +] +nbclassic = [ + {file = "nbclassic-0.5.3-py3-none-any.whl", hash = "sha256:e849277872d9ffd8fe4b39a8038d01ba82d6a1def9ce11b1b3c26c9546ed5131"}, + {file = "nbclassic-0.5.3.tar.gz", hash = "sha256:889772a7ba524eb781d2901f396540bcad41151e1f7e043f12ebc14a6540d342"}, +] +nbclient = [ + {file = "nbclient-0.7.2-py3-none-any.whl", hash = "sha256:d97ac6257de2794f5397609df754fcbca1a603e94e924eb9b99787c031ae2e7c"}, + {file = "nbclient-0.7.2.tar.gz", hash = "sha256:884a3f4a8c4fc24bb9302f263e0af47d97f0d01fe11ba714171b320c8ac09547"}, +] +nbconvert = [ + {file = "nbconvert-7.2.9-py3-none-any.whl", hash = "sha256:495638c5e06005f4a5ce828d8a81d28e34f95c20f4384d5d7a22254b443836e7"}, + {file = "nbconvert-7.2.9.tar.gz", hash = "sha256:a42c3ac137c64f70cbe4d763111bf358641ea53b37a01a5c202ed86374af5234"}, +] +nbformat = [ + {file = "nbformat-5.7.3-py3-none-any.whl", hash = "sha256:22a98a6516ca216002b0a34591af5bcb8072ca6c63910baffc901cfa07fefbf0"}, + {file = "nbformat-5.7.3.tar.gz", hash = "sha256:4b021fca24d3a747bf4e626694033d792d594705829e5e35b14ee3369f9f6477"}, +] +nest-asyncio = [ + {file = "nest_asyncio-1.5.6-py3-none-any.whl", hash = "sha256:b9a953fb40dceaa587d109609098db21900182b16440652454a146cffb06e8b8"}, + {file = "nest_asyncio-1.5.6.tar.gz", hash = "sha256:d267cc1ff794403f7df692964d1d2a3fa9418ffea2a3f6859a439ff482fef290"}, +] +notebook = [ + {file = "notebook-6.5.3-py3-none-any.whl", hash = "sha256:50a334ad9d60b30cb759405168ef6fc3d60350ab5439fb1631544bb09dcb2cce"}, + {file = "notebook-6.5.3.tar.gz", hash = "sha256:b12bee3292211d85dd7e588a790ddce30cb3e8fbcfa1e803522a207f60819e05"}, +] +notebook-shim = [ + {file = "notebook_shim-0.2.2-py3-none-any.whl", hash = "sha256:9c6c30f74c4fbea6fce55c1be58e7fd0409b1c681b075dcedceb005db5026949"}, + {file = "notebook_shim-0.2.2.tar.gz", hash = "sha256:090e0baf9a5582ff59b607af523ca2db68ff216da0c69956b62cab2ef4fc9c3f"}, +] +openapi-schema-validator = [ + {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, + {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, +] +openapi-spec-validator = [ + {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, + {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, +] +packaging = [ + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, +] +pandocfilters = [ + {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, + {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, +] +parso = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] +pdoc = [ + {file = "pdoc-7.4.0-py3-none-any.whl", hash = "sha256:681a2f243e4ca51bedd0645c2d18275b8b83444e9b6e42b502882ec45369e679"}, +] +pexpect = [ + {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, + {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, +] +pickleshare = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] +pip = [ + {file = "pip-23.0.1-py3-none-any.whl", hash = "sha256:236bcb61156d76c4b8a05821b988c7b8c35bf0da28a4b614e8d6ab5212c25c6f"}, + {file = "pip-23.0.1.tar.gz", hash = "sha256:cd015ea1bfb0fcef59d8a286c1f8bebcb983f6317719d415dc5351efb7cd7024"}, +] +pkgutil_resolve_name = [ + {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, + {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, +] +platformdirs = [ + {file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"}, + {file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"}, +] +ply = [ + {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"}, + {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, +] +prometheus-client = [ + {file = "prometheus_client-0.16.0-py3-none-any.whl", hash = "sha256:0836af6eb2c8f4fed712b2f279f6c0a8bbab29f9f4aa15276b91c7cb0d1616ab"}, + {file = "prometheus_client-0.16.0.tar.gz", hash = "sha256:a03e35b359f14dd1630898543e2120addfdeacd1a6069c1367ae90fd93ad3f48"}, +] +prompt-toolkit = [ + {file = "prompt_toolkit-3.0.38-py3-none-any.whl", hash = "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"}, + {file = "prompt_toolkit-3.0.38.tar.gz", hash = "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b"}, +] +psutil = [ + {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, + {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, + {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, + {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"}, + {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"}, + {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"}, + {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"}, + {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"}, + {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"}, + {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"}, + {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"}, + {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"}, + {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, + {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, +] +ptyprocess = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] +pure-eval = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] +pycparser = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] +Pygments = [ + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, +] +PyJWT = [ + {file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"}, + {file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"}, +] +pyrsistent = [ + {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, + {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, + {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, + {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, + {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, + {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] +python-json-logger = [ + {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, + {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, +] +pytz = [ + {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, + {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, +] +pywin32 = [ + {file = "pywin32-305-cp310-cp310-win32.whl", hash = "sha256:421f6cd86e84bbb696d54563c48014b12a23ef95a14e0bdba526be756d89f116"}, + {file = "pywin32-305-cp310-cp310-win_amd64.whl", hash = "sha256:73e819c6bed89f44ff1d690498c0a811948f73777e5f97c494c152b850fad478"}, + {file = "pywin32-305-cp310-cp310-win_arm64.whl", hash = "sha256:742eb905ce2187133a29365b428e6c3b9001d79accdc30aa8969afba1d8470f4"}, + {file = "pywin32-305-cp311-cp311-win32.whl", hash = "sha256:19ca459cd2e66c0e2cc9a09d589f71d827f26d47fe4a9d09175f6aa0256b51c2"}, + {file = "pywin32-305-cp311-cp311-win_amd64.whl", hash = "sha256:326f42ab4cfff56e77e3e595aeaf6c216712bbdd91e464d167c6434b28d65990"}, + {file = "pywin32-305-cp311-cp311-win_arm64.whl", hash = "sha256:4ecd404b2c6eceaca52f8b2e3e91b2187850a1ad3f8b746d0796a98b4cea04db"}, + {file = "pywin32-305-cp36-cp36m-win32.whl", hash = "sha256:48d8b1659284f3c17b68587af047d110d8c44837736b8932c034091683e05863"}, + {file = "pywin32-305-cp36-cp36m-win_amd64.whl", hash = "sha256:13362cc5aa93c2beaf489c9c9017c793722aeb56d3e5166dadd5ef82da021fe1"}, + {file = "pywin32-305-cp37-cp37m-win32.whl", hash = "sha256:a55db448124d1c1484df22fa8bbcbc45c64da5e6eae74ab095b9ea62e6d00496"}, + {file = "pywin32-305-cp37-cp37m-win_amd64.whl", hash = "sha256:109f98980bfb27e78f4df8a51a8198e10b0f347257d1e265bb1a32993d0c973d"}, + {file = "pywin32-305-cp38-cp38-win32.whl", hash = "sha256:9dd98384da775afa009bc04863426cb30596fd78c6f8e4e2e5bbf4edf8029504"}, + {file = "pywin32-305-cp38-cp38-win_amd64.whl", hash = "sha256:56d7a9c6e1a6835f521788f53b5af7912090674bb84ef5611663ee1595860fc7"}, + {file = "pywin32-305-cp39-cp39-win32.whl", hash = "sha256:9d968c677ac4d5cbdaa62fd3014ab241718e619d8e36ef8e11fb930515a1e918"}, + {file = "pywin32-305-cp39-cp39-win_amd64.whl", hash = "sha256:50768c6b7c3f0b38b7fb14dd4104da93ebced5f1a50dc0e834594bff6fbe1271"}, +] +pywinpty = [ + {file = "pywinpty-2.0.10-cp310-none-win_amd64.whl", hash = "sha256:4c7d06ad10f6e92bc850a467f26d98f4f30e73d2fe5926536308c6ae0566bc16"}, + {file = "pywinpty-2.0.10-cp311-none-win_amd64.whl", hash = "sha256:7ffbd66310b83e42028fc9df7746118978d94fba8c1ebf15a7c1275fdd80b28a"}, + {file = "pywinpty-2.0.10-cp37-none-win_amd64.whl", hash = "sha256:38cb924f2778b5751ef91a75febd114776b3af0ae411bc667be45dd84fc881d3"}, + {file = "pywinpty-2.0.10-cp38-none-win_amd64.whl", hash = "sha256:902d79444b29ad1833b8d5c3c9aabdfd428f4f068504430df18074007c8c0de8"}, + {file = "pywinpty-2.0.10-cp39-none-win_amd64.whl", hash = "sha256:3c46aef80dd50979aff93de199e4a00a8ee033ba7a03cadf0a91fed45f0c39d7"}, + {file = "pywinpty-2.0.10.tar.gz", hash = "sha256:cdbb5694cf8c7242c2ecfaca35c545d31fa5d5814c3d67a4e628f803f680ebea"}, +] +PyYAML = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] +pyzmq = [ + {file = "pyzmq-25.0.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:2d05d904f03ddf1e0d83d97341354dfe52244a619b5a1440a5f47a5b3451e84e"}, + {file = "pyzmq-25.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a154ef810d44f9d28868be04641f837374a64e7449df98d9208e76c260c7ef1"}, + {file = "pyzmq-25.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:487305c2a011fdcf3db1f24e8814bb76d23bc4d2f46e145bc80316a59a9aa07d"}, + {file = "pyzmq-25.0.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e7b87638ee30ab13230e37ce5331b3e730b1e0dda30120b9eeec3540ed292c8"}, + {file = "pyzmq-25.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75243e422e85a62f0ab7953dc315452a56b2c6a7e7d1a3c3109ac3cc57ed6b47"}, + {file = "pyzmq-25.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:31e523d067ce44a04e876bed3ff9ea1ff8d1b6636d16e5fcace9d22f8c564369"}, + {file = "pyzmq-25.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8539216173135e9e89f6b1cc392e74e6b935b91e8c76106cf50e7a02ab02efe5"}, + {file = "pyzmq-25.0.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2754fa68da08a854f4816e05160137fa938a2347276471103d31e04bcee5365c"}, + {file = "pyzmq-25.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4a1bc30f0c18444d51e9b0d0dd39e3a4e7c53ee74190bebef238cd58de577ea9"}, + {file = "pyzmq-25.0.0-cp310-cp310-win32.whl", hash = "sha256:01d53958c787cfea34091fcb8ef36003dbb7913b8e9f8f62a0715234ebc98b70"}, + {file = "pyzmq-25.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:58fc3ad5e1cfd2e6d24741fbb1e216b388115d31b0ca6670f894187f280b6ba6"}, + {file = "pyzmq-25.0.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:e4bba04ea779a3d7ef25a821bb63fd0939142c88e7813e5bd9c6265a20c523a2"}, + {file = "pyzmq-25.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:af1fbfb7ad6ac0009ccee33c90a1d303431c7fb594335eb97760988727a37577"}, + {file = "pyzmq-25.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85456f0d8f3268eecd63dede3b99d5bd8d3b306310c37d4c15141111d22baeaf"}, + {file = "pyzmq-25.0.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0645b5a2d2a06fd8eb738018490c514907f7488bf9359c6ee9d92f62e844b76f"}, + {file = "pyzmq-25.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f72ea279b2941a5203e935a4588b9ba8a48aeb9a926d9dfa1986278bd362cb8"}, + {file = "pyzmq-25.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:4e295f7928a31ae0f657e848c5045ba6d693fe8921205f408ca3804b1b236968"}, + {file = "pyzmq-25.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ac97e7d647d5519bcef48dd8d3d331f72975afa5c4496c95f6e854686f45e2d9"}, + {file = "pyzmq-25.0.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:656281d496aaf9ca4fd4cea84e6d893e3361057c4707bd38618f7e811759103c"}, + {file = "pyzmq-25.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f6116991568aac48b94d6d8aaed6157d407942ea385335a6ed313692777fb9d"}, + {file = "pyzmq-25.0.0-cp311-cp311-win32.whl", hash = "sha256:0282bba9aee6e0346aa27d6c69b5f7df72b5a964c91958fc9e0c62dcae5fdcdc"}, + {file = "pyzmq-25.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:526f884a27e8bba62fe1f4e07c62be2cfe492b6d432a8fdc4210397f8cf15331"}, + {file = "pyzmq-25.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ccb3e1a863222afdbda42b7ca8ac8569959593d7abd44f5a709177d6fa27d266"}, + {file = "pyzmq-25.0.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4046d03100aca266e70d54a35694cb35d6654cfbef633e848b3c4a8d64b9d187"}, + {file = "pyzmq-25.0.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3100dddcada66ec5940ed6391ebf9d003cc3ede3d320748b2737553019f58230"}, + {file = "pyzmq-25.0.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7877264aa851c19404b1bb9dbe6eed21ea0c13698be1eda3784aab3036d1c861"}, + {file = "pyzmq-25.0.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5049e75cc99db65754a3da5f079230fb8889230cf09462ec972d884d1704a3ed"}, + {file = "pyzmq-25.0.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:81f99fb1224d36eb91557afec8cdc2264e856f3464500b55749020ce4c848ef2"}, + {file = "pyzmq-25.0.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a1cd4a95f176cdc0ee0a82d49d5830f13ae6015d89decbf834c273bc33eeb3d3"}, + {file = "pyzmq-25.0.0-cp36-cp36m-win32.whl", hash = "sha256:926236ca003aec70574754f39703528947211a406f5c6c8b3e50eca04a9e87fc"}, + {file = "pyzmq-25.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:94f0a7289d0f5c80807c37ebb404205e7deb737e8763eb176f4770839ee2a287"}, + {file = "pyzmq-25.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f3f96d452e9580cb961ece2e5a788e64abaecb1232a80e61deffb28e105ff84a"}, + {file = "pyzmq-25.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:930e6ad4f2eaac31a3d0c2130619d25db754b267487ebc186c6ad18af2a74018"}, + {file = "pyzmq-25.0.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e1081d7030a1229c8ff90120346fb7599b54f552e98fcea5170544e7c6725aab"}, + {file = "pyzmq-25.0.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:531866c491aee5a1e967c286cfa470dffac1e2a203b1afda52d62b58782651e9"}, + {file = "pyzmq-25.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fc7c1421c5b1c916acf3128bf3cc7ea7f5018b58c69a6866d70c14190e600ce9"}, + {file = "pyzmq-25.0.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9a2d5e419bd39a1edb6cdd326d831f0120ddb9b1ff397e7d73541bf393294973"}, + {file = "pyzmq-25.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:183e18742be3621acf8908903f689ec520aee3f08449bfd29f583010ca33022b"}, + {file = "pyzmq-25.0.0-cp37-cp37m-win32.whl", hash = "sha256:02f5cb60a7da1edd5591a15efa654ffe2303297a41e1b40c3c8942f8f11fc17c"}, + {file = "pyzmq-25.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:cac602e02341eaaf4edfd3e29bd3fdef672e61d4e6dfe5c1d065172aee00acee"}, + {file = "pyzmq-25.0.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:e14df47c1265356715d3d66e90282a645ebc077b70b3806cf47efcb7d1d630cb"}, + {file = "pyzmq-25.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:293a7c2128690f496057f1f1eb6074f8746058d13588389981089ec45d8fdc77"}, + {file = "pyzmq-25.0.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:731b208bc9412deeb553c9519dca47136b5a01ca66667cafd8733211941b17e4"}, + {file = "pyzmq-25.0.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b055a1cddf8035966ad13aa51edae5dc8f1bba0b5d5e06f7a843d8b83dc9b66b"}, + {file = "pyzmq-25.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17e1cb97d573ea84d7cd97188b42ca6f611ab3ee600f6a75041294ede58e3d20"}, + {file = "pyzmq-25.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:60ecbfe7669d3808ffa8a7dd1487d6eb8a4015b07235e3b723d4b2a2d4de7203"}, + {file = "pyzmq-25.0.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4c25c95416133942280faaf068d0fddfd642b927fb28aaf4ab201a738e597c1e"}, + {file = "pyzmq-25.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:be05504af0619d1cffa500af1e0ede69fb683f301003851f5993b5247cc2c576"}, + {file = "pyzmq-25.0.0-cp38-cp38-win32.whl", hash = "sha256:6bf3842af37af43fa953e96074ebbb5315f6a297198f805d019d788a1021dbc8"}, + {file = "pyzmq-25.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:b90bb8dfbbd138558f1f284fecfe328f7653616ff9a972433a00711d9475d1a9"}, + {file = "pyzmq-25.0.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:62b9e80890c0d2408eb42d5d7e1fc62a5ce71be3288684788f74cf3e59ffd6e2"}, + {file = "pyzmq-25.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:484c2c4ee02c1edc07039f42130bd16e804b1fe81c4f428e0042e03967f40c20"}, + {file = "pyzmq-25.0.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9ca6db34b26c4d3e9b0728841ec9aa39484eee272caa97972ec8c8e231b20c7e"}, + {file = "pyzmq-25.0.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:610d2d112acd4e5501fac31010064a6c6efd716ceb968e443cae0059eb7b86de"}, + {file = "pyzmq-25.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3594c0ff604e685d7e907860b61d0e10e46c74a9ffca168f6e9e50ea934ee440"}, + {file = "pyzmq-25.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c21a5f4e54a807df5afdef52b6d24ec1580153a6bcf0607f70a6e1d9fa74c5c3"}, + {file = "pyzmq-25.0.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4725412e27612f0d7d7c2f794d89807ad0227c2fc01dd6146b39ada49c748ef9"}, + {file = "pyzmq-25.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4d3d604fe0a67afd1aff906e54da557a5203368a99dcc50a70eef374f1d2abef"}, + {file = "pyzmq-25.0.0-cp39-cp39-win32.whl", hash = "sha256:3670e8c5644768f214a3b598fe46378a4a6f096d5fb82a67dfd3440028460565"}, + {file = "pyzmq-25.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:e99629a976809fe102ef73e856cf4b2660acd82a412a51e80ba2215e523dfd0a"}, + {file = "pyzmq-25.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:66509c48f7446b640eeae24b60c9c1461799a27b1b0754e438582e36b5af3315"}, + {file = "pyzmq-25.0.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9c464cc508177c09a5a6122b67f978f20e2954a21362bf095a0da4647e3e908"}, + {file = "pyzmq-25.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:28bcb2e66224a7ac2843eb632e4109d6b161479e7a2baf24e37210461485b4f1"}, + {file = "pyzmq-25.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0e7ef9ac807db50b4eb6f534c5dcc22f998f5dae920cc28873d2c1d080a4fc9"}, + {file = "pyzmq-25.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:5050f5c50b58a6e38ccaf9263a356f74ef1040f5ca4030225d1cb1a858c5b7b6"}, + {file = "pyzmq-25.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2a73af6504e0d2805e926abf136ebf536735a13c22f709be7113c2ec65b4bec3"}, + {file = "pyzmq-25.0.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0e8d00228db627ddd1b418c7afd81820b38575f237128c9650365f2dd6ac3443"}, + {file = "pyzmq-25.0.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5605621f2181f20b71f13f698944deb26a0a71af4aaf435b34dd90146092d530"}, + {file = "pyzmq-25.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6136bfb0e5a9cf8c60c6ac763eb21f82940a77e6758ea53516c8c7074f4ff948"}, + {file = "pyzmq-25.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0a90b2480a26aef7c13cff18703ba8d68e181facb40f78873df79e6d42c1facc"}, + {file = "pyzmq-25.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00c94fd4c9dd3c95aace0c629a7fa713627a5c80c1819326b642adf6c4b8e2a2"}, + {file = "pyzmq-25.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20638121b0bdc80777ce0ec8c1f14f1ffec0697a1f88f0b564fa4a23078791c4"}, + {file = "pyzmq-25.0.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6f75b4b8574f3a8a0d6b4b52606fc75b82cb4391471be48ab0b8677c82f9ed4"}, + {file = "pyzmq-25.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cbb885f347eba7ab7681c450dee5b14aed9f153eec224ec0c3f299273d9241f"}, + {file = "pyzmq-25.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c48f257da280b3be6c94e05bd575eddb1373419dbb1a72c3ce64e88f29d1cd6d"}, + {file = "pyzmq-25.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:866eabf7c1315ef2e93e34230db7cbf672e0d7c626b37c11f7e870c8612c3dcc"}, + {file = "pyzmq-25.0.0.tar.gz", hash = "sha256:f330a1a2c7f89fd4b0aa4dcb7bf50243bf1c8da9a2f1efc31daf57a2046b31f2"}, +] +qtconsole = [ + {file = "qtconsole-5.4.1-py3-none-any.whl", hash = "sha256:bae8c7e10170cdcdcaf7e6d53ad7d6a7412249b9b8310a0eaa6b6f3b260f32db"}, + {file = "qtconsole-5.4.1.tar.gz", hash = "sha256:f67a03f40f722e13261791280f73068dbaf9dafcc335cbba644ccc8f892640e5"}, +] +QtPy = [ + {file = "QtPy-2.3.0-py3-none-any.whl", hash = "sha256:8d6d544fc20facd27360ea189592e6135c614785f0dec0b4f083289de6beb408"}, + {file = "QtPy-2.3.0.tar.gz", hash = "sha256:0603c9c83ccc035a4717a12908bf6bc6cb22509827ea2ec0e94c2da7c9ed57c5"}, +] +requests = [ + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, +] +rfc3339-validator = [ + {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, + {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, +] +rfc3986 = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] +rfc3986-validator = [ + {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, + {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, +] +s3transfer = [ + {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, + {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, +] +sammy = [ + {file = "sammy-0.4.3-py2.py3-none-any.whl", hash = "sha256:f7fc12fae537ba22830a9de761686fac439e23c67339807f2f3d026d019bae9e"}, + {file = "sammy-0.4.3.tar.gz", hash = "sha256:dff98017d59c8860349cb07bd8af2eb843ccfc7a78d11ed82599b306ef4ce7ca"}, +] +Send2Trash = [ + {file = "Send2Trash-1.8.0-py3-none-any.whl", hash = "sha256:f20eaadfdb517eaca5ce077640cb261c7d2698385a6a0f072a4a5447fd49fa08"}, + {file = "Send2Trash-1.8.0.tar.gz", hash = "sha256:d2c24762fd3759860a0aff155e45871447ea58d2be6bdd39b5c8f966a0c99c2d"}, +] +setuptools = [ + {file = "setuptools-67.6.0-py3-none-any.whl", hash = "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"}, + {file = "setuptools-67.6.0.tar.gz", hash = "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +sniffio = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] +soupsieve = [ + {file = "soupsieve-2.4-py3-none-any.whl", hash = "sha256:49e5368c2cda80ee7e84da9dbe3e110b70a4575f196efb74e51b94549d921955"}, + {file = "soupsieve-2.4.tar.gz", hash = "sha256:e28dba9ca6c7c00173e34e4ba57448f0688bb681b7c5e8bf4971daafc093d69a"}, +] +stack-data = [ + {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, + {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, +] +stripe = [ + {file = "stripe-2.76.0-py2.py3-none-any.whl", hash = "sha256:756bf6c1206f438d1fa23bb90cdf1233c9383478f854f2720a8a3e1eaf1f715b"}, + {file = "stripe-2.76.0.tar.gz", hash = "sha256:fd3fc6935c3b6189967191607b6f38ebe490005a590b4d0d43fbe3aba45deca8"}, +] +swaggyp = [ + {file = "swaggyp-0.3.0-py3-none-any.whl", hash = "sha256:d678daa026f374c09690bfae93670e8824c616fe7186375cca531dd9530d2c41"}, + {file = "swaggyp-0.3.0.tar.gz", hash = "sha256:2af8b74e014ba83287d4b2fcbe355636ca8ec09ca70bffba4290b3f833c8ad46"}, +] +terminado = [ + {file = "terminado-0.17.1-py3-none-any.whl", hash = "sha256:8650d44334eba354dd591129ca3124a6ba42c3d5b70df5051b6921d506fdaeae"}, + {file = "terminado-0.17.1.tar.gz", hash = "sha256:6ccbbcd3a4f8a25a5ec04991f39a0b8db52dfcd487ea0e578d977e6752380333"}, +] +tinycss2 = [ + {file = "tinycss2-1.2.1-py3-none-any.whl", hash = "sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847"}, + {file = "tinycss2-1.2.1.tar.gz", hash = "sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627"}, +] +tornado = [ + {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"}, + {file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba09ef14ca9893954244fd872798b4ccb2367c165946ce2dd7376aebdde8e3ac"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8150f721c101abdef99073bf66d3903e292d851bee51910839831caba341a75"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a2f5999215a3a06a4fc218026cd84c61b8b2b40ac5296a6db1f1451ef04c1e"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5f8c52d219d4995388119af7ccaa0bcec289535747620116a58d830e7c25d8a8"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_i686.whl", hash = "sha256:6fdfabffd8dfcb6cf887428849d30cf19a3ea34c2c248461e1f7d718ad30b66b"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:1d54d13ab8414ed44de07efecb97d4ef7c39f7438cf5e976ccd356bebb1b5fca"}, + {file = "tornado-6.2-cp37-abi3-win32.whl", hash = "sha256:5c87076709343557ef8032934ce5f637dbb552efa7b21d08e89ae7619ed0eb23"}, + {file = "tornado-6.2-cp37-abi3-win_amd64.whl", hash = "sha256:e5f923aa6a47e133d1cf87d60700889d7eae68988704e20c75fb2d65677a8e4b"}, + {file = "tornado-6.2.tar.gz", hash = "sha256:9b630419bde84ec666bfd7ea0a4cb2a8a651c2d5cccdbdd1972a0c859dfc3c13"}, +] +traitlets = [ + {file = "traitlets-5.9.0-py3-none-any.whl", hash = "sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8"}, + {file = "traitlets-5.9.0.tar.gz", hash = "sha256:f6cde21a9c68cf756af02035f72d5a723bf607e862e7be33ece505abf4a3bad9"}, +] +uri-template = [ + {file = "uri_template-1.2.0-py3-none-any.whl", hash = "sha256:f1699c77b73b925cf4937eae31ab282a86dc885c333f2e942513f08f691fc7db"}, + {file = "uri_template-1.2.0.tar.gz", hash = "sha256:934e4d09d108b70eb8a24410af8615294d09d279ce0e7cbcdaef1bd21f932b06"}, +] +urllib3 = [ + {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, + {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, +] +valley = [ + {file = "valley-1.5.8-py3-none-any.whl", hash = "sha256:c30c0bdb30e5be561dd4332281fc53315c4c34f174d268d8cc7496a6f47ee314"}, + {file = "valley-1.5.8.tar.gz", hash = "sha256:88342fa4af854b8e9e426776995c9c2c690b432ea35c0c9529fa0abb62e553e3"}, +] +wcwidth = [ + {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, + {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, +] +webcolors = [ + {file = "webcolors-1.12-py3-none-any.whl", hash = "sha256:d98743d81d498a2d3eaf165196e65481f0d2ea85281463d856b1e51b09f62dce"}, + {file = "webcolors-1.12.tar.gz", hash = "sha256:16d043d3a08fd6a1b1b7e3e9e62640d09790dce80d2bdd4792a175b35fe794a9"}, +] +webencodings = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] +websocket-client = [ + {file = "websocket-client-1.5.1.tar.gz", hash = "sha256:3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40"}, + {file = "websocket_client-1.5.1-py3-none-any.whl", hash = "sha256:cdf5877568b7e83aa7cf2244ab56a3213de587bbe0ce9d8b9600fc77b455d89e"}, +] +Werkzeug = [ + {file = "Werkzeug-2.1.2-py3-none-any.whl", hash = "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255"}, + {file = "Werkzeug-2.1.2.tar.gz", hash = "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6"}, +] +wheel = [ + {file = "wheel-0.38.4-py3-none-any.whl", hash = "sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8"}, + {file = "wheel-0.38.4.tar.gz", hash = "sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac"}, +] +widgetsnbextension = [ + {file = "widgetsnbextension-4.0.5-py3-none-any.whl", hash = "sha256:eaaaf434fb9b08bd197b2a14ffe45ddb5ac3897593d43c69287091e5f3147bf7"}, + {file = "widgetsnbextension-4.0.5.tar.gz", hash = "sha256:003f716d930d385be3fd9de42dd9bf008e30053f73bddde235d14fbeaeff19af"}, +] +zipp = [ + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, +] diff --git a/pyproject.toml b/pyproject.toml index 65903c8..3dfb385 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,7 +12,6 @@ valley = "1.5.8" requests = "^2.23.0" pytz = "^2021.1" decorator = "^5.0.9" -pyjwt = "^2.1.0" pip = "^23.0.1" cryptography = "^3.4.7" cachetools = "^4.2.2" @@ -27,6 +26,7 @@ swaggyp = "^0.3.0" formy = "1.3.1" Jinja2 = "^3.1.2" Werkzeug = "2.1.2" +PyJWT = "^2.6.0" [tool.poetry.dev-dependencies] jupyter = "^1.0.0" From 9f9f364c758a633aeec017a929395dff25a3d05b Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 14 Mar 2023 15:05:02 +0800 Subject: [PATCH 205/214] Added essential docstrings and little refactors for performance and documentation --- pfunk/collection.py | 14 +- pfunk/contrib/auth/collections.py | 152 +++++++++-- pfunk/contrib/auth/resources.py | 258 +++++++++++------- pfunk/tests/test_aws.py | 3 +- .../test_web_custom_user_group_group_perms.py | 16 +- pfunk/tests/test_web_custom_user_group_m2m.py | 23 +- .../test_web_custom_user_group_users_perms.py | 16 +- 7 files changed, 326 insertions(+), 156 deletions(-) diff --git a/pfunk/collection.py b/pfunk/collection.py index 956229e..ea20f21 100644 --- a/pfunk/collection.py +++ b/pfunk/collection.py @@ -140,10 +140,9 @@ def get_user_field(self) -> str: fields = self._base_properties.items() user_class = self.user_collection or env('USER_COLLECTION', 'User') user_field = None - for k, v in fields: - if user_class in v.get_graphql_type(): - user_field = k - break + user_fields = [k for k, v in fields if user_class in v.get_graphql_type()] + if user_fields: + user_field = user_fields[0] return user_field def get_group_field(self) -> str: @@ -156,10 +155,9 @@ def get_group_field(self) -> str: fields = self._base_properties.items() group_class = self.group_collection or env('GROUP_COLLECTION', 'Group') group_field = None - for k, v in fields: - if group_class in v.get_graphql_type(): - group_field = k - break + group_fields = [k for k, v in fields if group_class in v.get_graphql_type()] + if group_fields: + group_field = group_fields[0] return group_field def get_collection_name(self) -> str: diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections.py index fec6f3a..5e39ed8 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections.py @@ -28,10 +28,16 @@ class BaseGroup(Collection): slug = SlugField(unique=True, required=False) def __unicode__(self): + """Return the name of the group + + Returns: + str: Name of the group + """ return self.name # pragma: no cover class UserGroupByUserAndGroupIndex(Index): + """Lookup index for UserGroup M2M collection""" name = 'usergroups_by_userID_and_groupID' source = 'Usergroups' terms = [ @@ -49,6 +55,11 @@ class BaseUserGroup(Collection): permissions = ListField() def __unicode__(self): + """Return the userID, groupID, and permissions + + Returns: + str: userID, groupID, and permissions + """ return f"{self.userID}, {self.groupID}, {self.permissions}" @@ -58,7 +69,7 @@ class UserGroups(BaseUserGroup): The native fauna-way of holding many-to-many relationship is to only have the ID of the 2 object. Here in pfunk, we leverage the flexibility of the collection to have another - field, which is `permissions`, this field holds the capablities + field, which is `permissions`, this field holds the capabilities of a user, allowing us to add easier permission handling. Instead of manually going to roles and adding individual collections which can be painful in long term. @@ -77,6 +88,15 @@ class UserGroups(BaseUserGroup): env('USER_COLLECTION_DIR', 'pfunk.contrib.auth.collections.User')) groupID = ReferenceField( env('GROUP_COLLECTION_DIR', 'pfunk.contrib.auth.collections.Group')) + permissions = ListField() + + def __unicode__(self): + """Return the userID, groupID, and permissions + + Returns: + str: userID, groupID, and permissions + """ + return f"{self.userID}, {self.groupID}, {self.permissions}" AccountStatus = Enum(name='AccountStatus', choices=['ACTIVE', 'INACTIVE']) @@ -120,14 +140,20 @@ class BaseUser(Collection): AccountStatus, required=True, default_value="INACTIVE") def __unicode__(self): + """Returns the username of the user""" return self.username # pragma: no cover @classmethod def login(cls, username, password, _token=None): """ Logs the user in to Fauna + Args: + username (str, required): Username of the user + password (str, required): Password of the user + _token (str, optional): Token of the user + Returns: - token: the token from fauna + token (str, required): the token from fauna """ c = cls() try: @@ -141,17 +167,35 @@ def login(cls, username, password, _token=None): @classmethod def logout(cls, _token=None): - """ Expires/invalidates the user's login token """ + """ Expires/invalidates the user's login token + + Args: + _token (str, optional): Token of the user + + Returns: + None + """ c = cls() return c.client(_token=_token).query( q.call("logout_user") ) def permissions(self, _token=None): + """Returns an empty array""" return [] @classmethod def api_login(cls, username, password, _token=None): + """ Logs the user in to Fauna and creates a JWT + + Args: + username (str, required): Username of the user + password (str, required): Password of the user + _token (str, optional): Token of the user + + Returns: + token (str, required): the token from fauna + """ token = cls.login(username=username, password=password, _token=_token) user = cls.get_current_user(_token=token) claims = user.to_dict().copy() @@ -166,7 +210,14 @@ def api_login(cls, username, password, _token=None): @classmethod def get_from_id(cls, _token=None): - """ Acquire user from the given Id """ + """ Acquire user from the given Id + + Args: + _token (str, optional): Token of the user + + Returns: + user (BaseUser, required): The user object + """ c = cls() ref = c.client(_token=_token).query( q.current_identity() @@ -176,10 +227,18 @@ def get_from_id(cls, _token=None): def attach_verification_key(self): """ Attaches the verification key to user to enable one-time activate + + Returns: + None """ self.verification_key = str(uuid.uuid4()) def attach_forgot_verification_key(self): + """ Attaches forgot password key to user + + Returns: + None + """ self.forgot_password_key = str(uuid.uuid4()) self.save() @@ -187,11 +246,18 @@ def attach_forgot_verification_key(self): def verify_email(cls, verification_key, verify_type='signup', password=None): """ Activate the user from the verification key - Args: - verification_key (str, required): - verification key in the email to compare the one - attached to the user - """ + Args: + verification_key (str, required): + verification key in the email to compare the one + attached to the user + verify_type (str, optional): + Type of verification being performed. Default: 'signup' + password (str, optional): + Password of the user. Required if verify_type is 'forgot' + + Returns: + None + """ if verify_type == 'signup': user = cls.get_by('unique_User_verification_key', [verification_key]) @@ -205,7 +271,18 @@ def verify_email(cls, verification_key, verify_type='signup', password=None): user.save(_credentials=password) def send_verification_email(self, from_email=None, verification_type='signup'): - """ Send the verification email with the hashed key """ + """ Send the verification email with the hashed key + + Args: + from_email (str, optional): + From email address of the verification email. + Default: env('DEFAULT_FROM_EMAIL') + verification_type (str, optional): + Type of verification being performed. Default: 'signup' + + Returns: + None + """ project_name = env('PROJECT_NAME', '') if verification_type == 'signup': txt_template = 'auth/verification_email.txt' @@ -240,8 +317,11 @@ def forgot_password(cls, email): """ Sends forgot password email to let user use that link to reset their password """ + # get the user object user = cls.get_by('unique_User_email', email) + # attach the forgot verification key user.attach_forgot_verification_key() + # send the verification email user.send_verification_email(verification_type='forgot') @classmethod @@ -254,13 +334,15 @@ def signup(cls, _token=None, **kwargs): **kwargs (dict, required): The user's needed information for creation """ + # create a data dict with the user's needed information data = kwargs data['account_status'] = 'INACTIVE' + # pop the group key if it exists try: data.pop('groups') except KeyError: pass - + # create the user cls.create(**data, _token=_token) @classmethod @@ -287,10 +369,13 @@ def update_password(cls, current_password, new_password, new_password_confirm, _ If current_password is wrong, will return `Wrong current password.` """ + # raise an exception if new password and new password confirm don't match if new_password != new_password_confirm: raise ValidationException( 'new_password: Password field and password confirm field do not match.') + # create a collection instance c = cls() + # update the password using the user's current password and the new password try: return c.client(_token=_token).query( q.call("update_password", { @@ -310,23 +395,31 @@ def get_current_user(cls, _token=None): id (str): Fauna ID of the user in `User` collection """ + # create a collection instance c = cls() + # get the current identity return cls.get(c.client(_token=_token).query(q.current_identity()).id()) - def __unicode__(self): - return self.username # pragma: no cover - class ExtendedUser(BaseUser): """ User that has permission capabilities. Extension of `BaseUser`. - Subclass and define these properties - Provides base methods for group-user permissions. If there are no - supplied `groups` property, will raise `NotImplementedErrror` + Subclass and define the properties needed. """ - # user_group_class = import_util('pfunk.contrib.auth.collections.UserGroups') + user_group_class = None + group_class = None @classmethod def get_permissions(cls, ref, _token=None): + """Returns the permissions of the user + + Args: + ref (str): The user ID + _token (str): Fauna auth token + + Returns: + str[]: Permissions of the user in list: + `['create', 'read', 'delete', 'write']` + """ return cls.get(ref, _token).permissions(_token=_token) def get_groups(self, _token=None): @@ -340,10 +433,12 @@ def get_groups(self, _token=None): index_name = f'{user_class}s_{group_class}s_by_{user_class}' if relation_name: index_name = f'{relation_name}_by_{user_class}' - - return [self.group_class.get(i.id(), _token=_token) for i in self.client(_token=_token).query( + # query Fauna to get the data + data = self.client(_token=_token).query( q.paginate(q.match(index_name, self.ref)) - ).get('data')] + ).get('data') + # create a list of group instances from the data + return [self.group_class.get(i.id(), _token=_token) for i in data] def permissions(self, _token=None): """ Returns the permissions of the user @@ -359,18 +454,24 @@ def permissions(self, _token=None): perm_list (str[]): Permissions of the user in list: `['create', 'read', 'delete', 'write']` """ - index_name = 'usergroups_by_userID_and_groupID' perm_list = [] + # loop over the groups of the user for i in self.get_groups(_token=_token): + # query Fauna to get the UserGroup instance of the user ug = self.user_group_class.get_index(index_name, [ self.ref, i.ref], _token=_token) + # loop over the UserGroup instances for user_group in ug: p = [] + # check if there are any permissions in the instance if isinstance(user_group.permissions, list): + # loop over the permissions p = [ f'{user_group.groupID.slug}-{i}' for i in user_group.permissions] + # add the permissions to the list perm_list.extend(p) + # return a list of the user's permissions return perm_list def add_permissions(self, group, permissions: list, _token=None): @@ -397,21 +498,28 @@ def add_permissions(self, group, permissions: list, _token=None): perm_list = [] index_name = 'usergroups_by_userID_and_groupID' + # loop over the permissions and add to the list for i in permissions: perm_list.extend(i.permissions) + # raise an error if the user_group_class is not defined if not self.user_group_class: raise NotImplementedError + # try to get the UserGroup instance try: user_group = self.user_group_class.get_by( index_name, terms=[self.ref, group.ref]) + # create a new instance if not found except DocNotFound: user_group = self.user_group_class.create( userID=self.ref, groupID=group.ref, permissions=perm_list, _token=_token) + # update the permissions if they're not the same if user_group.permissions != perm_list: user_group.permissions = perm_list + # save the changes user_group.save() + # return the UserGroup instance return user_group diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 36c2064..52ba448 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -181,25 +181,34 @@ def get_lambda(self, resource_type): ) + class GenericAuthorizationRole(Role): + """This class provides generic authorization roles for collections""" - def get_relation_index_name(self): - """ Returns the index name of the created permission index of group and user -> 'usergroups_by_userID_and_groupID' """ + def get_relation_index_name(self) -> str: + """ + Returns the index name of the created permission index of group and user -> 'usergroups_by_userID_and_groupID' + """ return 'usergroups_by_userID_and_groupID' - def get_user_table(self): + def get_user_table(self) -> str: + """Returns the user table name""" return self.collection.user_collection or env('USER_COLLECTION', 'User') - def get_group_table(self): + def get_group_table(self) -> str: + """Returns the group table name""" return self.collection.group_collection or env('GROUP_COLLECTION', 'Group') - def get_name_suffix(self): + def get_name_suffix(self) -> str: + """Returns the name suffix for this role""" return f'{self.collection.get_user_field().lower()}_based_crud_role' - def get_name(self): + def get_name(self) -> str: + """Returns the name for this role""" return self.name or f"{self.collection.get_class_name()}_{self.get_name_suffix()}" - def get_privileges(self): + def get_privileges(self) -> list: + """Returns the list of privileges for this role""" priv_list = [ { "resource": q.collection(self.collection.get_collection_name()), @@ -241,16 +250,24 @@ def get_privileges(self): } for i in self.collection.collection_functions ]) + return priv_list class GenericUserBasedRole(GenericAuthorizationRole): - """ Generic set of permissions for entity to user relationship """ + """Class to provide a generic set of permissions based on the user-entity relationship. + + Args: + GenericAuthorizationRole (class): Inherited class + """ def get_relation_index_name(self): - """ Returns the user-group by user index name + """Returns the user-group by user index name + + Formatted as: {user_group_relation_name}_by_{user_class} - Formatted as: {user_group_relation_name}_by_{user_class} + Returns: + str: User-group by user index name """ # Acquires the `groups` field from the user collection user_field = self.collection.get_user_field() @@ -270,67 +287,86 @@ def get_relation_index_name(self): return None def get_lambda(self, resource_type): + """ Returns the lambda for the specified resource type + + Args: + resource_type (str): Type of resource + + Returns: + q.query: Lambda query + """ current_user_field = self.collection.get_user_field() if resource_type == 'write': lambda_args = ["old_object", "new_object", "object_ref"] - user_ref = q.select(current_user_field, - q.select('data', q.var('old_object'))) + user_ref = q.select( + current_user_field, q.select('data', q.var('old_object'))) return q.query( - q.lambda_(lambda_args, - q.and_( - q.equals( - user_ref, - q.current_identity() - ), - q.equals( - q.select(current_user_field, q.select( - 'data', q.var('new_object'))), - q.current_identity() - ) - ) - - ) + q.lambda_( + lambda_args, + q.and_( + q.equals(user_ref, q.current_identity()), + q.equals( + q.select(current_user_field, q.select( + 'data', q.var('new_object'))), + q.current_identity() + ) + ) + ) ) elif resource_type == 'create': lambda_args = ["new_object"] - user_ref = q.select(current_user_field, - q.select('data', q.var('new_object'))) + user_ref = q.select( + current_user_field, q.select('data', q.var('new_object'))) elif resource_type == 'read' or resource_type == 'delete': lambda_args = ["object_ref"] - user_ref = q.select(current_user_field, - q.select('data', q.get(q.var('object_ref')))) + user_ref = q.select( + current_user_field, q.select('data', q.get(q.var('object_ref')))) return q.query( - q.lambda_(lambda_args, - q.equals( - user_ref, - q.current_identity() - ) - ) + q.lambda_(lambda_args, q.equals(user_ref, q.current_identity())) ) - class GenericGroupBasedRole(GenericAuthorizationRole): + """Class for giving permissions to Group-based entities + """ + # Initialize the `permissions_field` variable permissions_field = 'permissions' def get_name_suffix(self): + """Get the name suffix for the group-based role + + Returns: + str: The name suffix for the group-based role + """ return f'{self.get_group_table().lower()}_based_crud_role' def get_lambda(self, resource_type): - """ Returns the lambda function for giving the permission to Group-based entities + """Returns the lambda function for giving the permission to Group-based entities - Allows modification if: - 1. You belong to the group that owns the document - 2. You have the create permission to perform the action (create, read, write, and delete) + Args: + resource_type (str): The type of operation (create, read, write, and delete) + + Returns: + Lambda: The lambda function for giving the permission to Group-based entities """ current_group_field = self.collection.get_group_field().lower() perm = f'{self.collection.get_collection_name()}-{resource_type}'.lower() + # Initialize the lambda arguments based on the `resource_type` if resource_type == 'write': group_ref = q.select(current_group_field, q.select('data', q.var('old_object'))) lambda_args = ["old_object", "new_object", "object_ref"] + elif resource_type == 'create': + lambda_args = ["new_object"] + group_ref = q.select(current_group_field, + q.select('data', q.var('new_object'))) + elif resource_type == 'read' or resource_type == 'delete': + lambda_args = ["object_ref"] + group_ref = q.select(current_group_field, + q.select('data', q.get(q.var('object_ref')))) + if resource_type == 'write': return q.query( q.lambda_(lambda_args, q.and_( @@ -355,49 +391,47 @@ def get_lambda(self, resource_type): q.select(current_group_field, q.select( 'data', q.var('new_object'))), ) - ) - ) + ) + ) ) - elif resource_type == 'create': - lambda_args = ["new_object"] - group_ref = q.select(current_group_field, - q.select('data', q.var('new_object'))) - elif resource_type == 'read' or resource_type == 'delete': - lambda_args = ["object_ref"] - group_ref = q.select(current_group_field, - q.select('data', q.get(q.var('object_ref')))) - - return q.query( - q.lambda_( - lambda_args, - q.equals( - # NOTE: After acquiring the instance of `UserGroup`, filter the result: permission field - # that matches the `perm` variable AND then see if that is equals to `perm` var - # IMPORTANT: by using this, it will easily filter permissions available, and if there were none, then it is automatically false - q.select(0, q.filter_(lambda i: q.equals(perm, i), - q.select(self.permissions_field, - q.select("data", - q.get(q.match( - q.index( - self.get_relation_index_name()), - q.current_identity(), - group_ref - )))))), - perm + else: + # Return the lambda function for giving the permission to Group-based entities + return q.query( + q.lambda_( + lambda_args, + q.equals( + # NOTE: After acquiring the instance of `UserGroup`, filter the result: permission field + # that matches the `perm` variable AND then see if that is equals to `perm` var + # IMPORTANT: by using this, it will easily filter permissions available, and if there were none, then it is automatically false + q.select(0, q.filter_(lambda i: q.equals(perm, i), + q.select(self.permissions_field, + q.select("data", + q.get(q.match( + q.index( + self.get_relation_index_name()), + q.current_identity(), + group_ref + )))))), + perm + ) ) ) - ) class GenericUserBasedRoleM2M(GenericAuthorizationRole): """ Generic set of permissions for many-to-many entity to user relationship """ def get_privileges(self): - """ Usage of parent `get_privileges()` with addition of access to M2M collection """ + """ + Usage of parent `get_privileges()` with addition of access to M2M collection + Returns: + List: list of privileges + """ priv_list = super().get_privileges() fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') - for k, v in fields.items(): - foreign_col = self.collection._base_properties.get(k) + for field, value in fields.items(): + # Get foreign column + foreign_col = self.collection._base_properties.get(field) relation_name = foreign_col.relation_name if relation_name: priv_list.extend([ @@ -414,10 +448,18 @@ def get_privileges(self): return priv_list def get_name_suffix(self): + """ + Returns: + String: suffix for name of the role + """ return f'{self.collection.get_user_field().lower()}_based_crud_role' - + def get_relation_index_name(self): - """ Returns the index name of the m2m index of an entity and user e.g. 'users_blogs_by_blog_and_newuser' """ + """ + Returns the index name of the m2m index of an entity and user e.g. 'users_blogs_by_blog_and_newuser' + Returns: + String: name of the index + """ user_field = self.collection.get_user_field() if user_field: user_field = user_field.lower() @@ -436,34 +478,40 @@ def get_relation_index_name(self): return relation_index_name return None - def get_lambda(self, resource_type): + """ + Returns lamda expression for the given resource type + Args: + resource_type (String): type of resource + Returns: + Lamda expression + """ current_user_field = self.collection.get_user_field() if resource_type == 'write': lambda_args = ["old_object", "new_object", "object_ref"] obj_ref = q.var('old_object') return q.query( q.lambda_(lambda_args, - q.and_( - q.equals( - q.select(f'{self.get_user_table().lower()}ID', - q.select("data", - q.get(q.match( - q.index( - self.get_relation_index_name()), - obj_ref, - q.current_identity() - ))) - ), - q.current_identity() - ), - q.equals( - q.select(current_user_field, q.select( - 'data', q.var('new_object'))), - q.current_identity() - ) - ) - ) + q.and_( + q.equals( + q.select(f'{self.get_user_table().lower()}ID', + q.select("data", + q.get(q.match( + q.index( + self.get_relation_index_name()), + obj_ref, + q.current_identity() + ))) + ), + q.current_identity() + ), + q.equals( + q.select(current_user_field, q.select( + 'data', q.var('new_object'))), + q.current_identity() + ) + ) + ) ) elif resource_type == 'create': # Create ops will always be allowed @@ -477,15 +525,15 @@ def get_lambda(self, resource_type): lambda_args, q.equals( q.select(f'{self.get_user_table().lower()}ID', - q.select("data", - q.get(q.match( - q.index( - self.get_relation_index_name()), - obj_ref, - q.current_identity() - ))) - ), + q.select("data", + q.get(q.match( + q.index( + self.get_relation_index_name()), + obj_ref, + q.current_identity() + ))) + ), q.current_identity() ) ) - ) + ) \ No newline at end of file diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index d28c852..b3152c7 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -4,8 +4,9 @@ from unittest import mock from pfunk.utils.aws import ApiGateway -from pfunk.tests import User, Group, Person, Sport +from pfunk.tests import Person, Sport from pfunk.project import Project +from pfunk.contrib.auth.collections import Group, User, UserGroups class ApiGatewayTests(unittest.TestCase): diff --git a/pfunk/tests/test_web_custom_user_group_group_perms.py b/pfunk/tests/test_web_custom_user_group_group_perms.py index 38274e5..cf0f057 100644 --- a/pfunk/tests/test_web_custom_user_group_group_perms.py +++ b/pfunk/tests/test_web_custom_user_group_group_perms.py @@ -9,8 +9,10 @@ class UserGroups(ug): - userID = ReferenceField('pfunk.tests.test_web_custom_user_group_group_perms.Newuser') - groupID = ReferenceField('pfunk.tests.test_web_custom_user_group_group_perms.Newgroup') + userID = ReferenceField( + 'pfunk.tests.test_web_custom_user_group_group_perms.Newuser') + groupID = ReferenceField( + 'pfunk.tests.test_web_custom_user_group_group_perms.Newgroup') class Newgroup(BaseGroup): @@ -35,8 +37,10 @@ def __unicode__(self): class Newuser(ExtendedUser): group_collection = 'Newgroup' - user_group_class = import_util('pfunk.tests.test_web_custom_user_group_group_perms.UserGroups') - group_class = import_util('pfunk.tests.test_web_custom_user_group_group_perms.Newgroup') + user_group_class = import_util( + 'pfunk.tests.test_web_custom_user_group_group_perms.UserGroups') + group_class = import_util( + 'pfunk.tests.test_web_custom_user_group_group_perms.Newgroup') groups = ManyToManyField( 'pfunk.tests.test_web_custom_user_group_group_perms.Newgroup', relation_name='custom_users_groups') @@ -89,8 +93,8 @@ def test_update(self): house.address for house in Blog.all()]) res = self.c.put(f'/json/blog/update/{self.blog.ref.id()}/', json={ - "title": "updated blog", - "content": "I updated my blog."}, + "title": "updated blog", + "content": "I updated my blog."}, headers={ "Authorization": self.token}) diff --git a/pfunk/tests/test_web_custom_user_group_m2m.py b/pfunk/tests/test_web_custom_user_group_m2m.py index 3f86e4a..8e04044 100644 --- a/pfunk/tests/test_web_custom_user_group_m2m.py +++ b/pfunk/tests/test_web_custom_user_group_m2m.py @@ -11,8 +11,10 @@ class UserGroups(ug): - userID = ReferenceField('pfunk.tests.test_web_custom_user_group_m2m.Newuser') - groupID = ReferenceField('pfunk.tests.test_web_custom_user_group_m2m.Newgroup') + userID = ReferenceField( + 'pfunk.tests.test_web_custom_user_group_m2m.Newuser') + groupID = ReferenceField( + 'pfunk.tests.test_web_custom_user_group_m2m.Newgroup') class Newgroup(BaseGroup): @@ -22,8 +24,10 @@ class Newgroup(BaseGroup): class Newuser(ExtendedUser): group_collection = 'Newgroup' - user_group_class = import_util('pfunk.tests.test_web_custom_user_group_m2m.UserGroups') - group_class = import_util('pfunk.tests.test_web_custom_user_group_m2m.Newgroup') + user_group_class = import_util( + 'pfunk.tests.test_web_custom_user_group_m2m.UserGroups') + group_class = import_util( + 'pfunk.tests.test_web_custom_user_group_m2m.Newgroup') groups = ManyToManyField( 'pfunk.tests.test_web_custom_user_group_m2m.Newgroup', relation_name='custom_users_groups') blogs = ManyToManyField('pfunk.tests.test_web_custom_user_group_m2m.Blog', @@ -39,7 +43,7 @@ class Blog(Collection): title = StringField(required=True) content = StringField(required=True) users = ManyToManyField('pfunk.tests.test_web_custom_user_group_m2m.Newuser', - relation_name='users_blogs') + relation_name='users_blogs') def __unicode__(self): return self.title @@ -56,13 +60,12 @@ def setUp(self) -> None: last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.group]) self.user2 = Newuser.create(username='test2', email='tlasso2@example.org', first_name='Juliuz', - last_name='Lasso', _credentials='abc123', account_status='ACTIVE', - groups=[self.group]) + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.group]) self.blog = Blog.create( title='test_blog', content='test content', users=[self.user], token=self.secret) self.token, self.exp = Newuser.api_login("test", "abc123") - def test_read(self): res = self.c.get(f'/json/blog/detail/{self.blog.ref.id()}/', headers={ @@ -99,7 +102,7 @@ def test_update(self): "title": "updated blog", "content": "I updated my blog.", "users": [self.user.ref.id()] - }, + }, headers={ "Authorization": self.token}) @@ -115,3 +118,5 @@ def test_delete(self): }) self.assertTrue(res.status_code, 200) + self.assertNotIn("test_blog", [ + blog.title for blog in Blog.all()]) diff --git a/pfunk/tests/test_web_custom_user_group_users_perms.py b/pfunk/tests/test_web_custom_user_group_users_perms.py index a1b7b0c..7dcdcb4 100644 --- a/pfunk/tests/test_web_custom_user_group_users_perms.py +++ b/pfunk/tests/test_web_custom_user_group_users_perms.py @@ -4,7 +4,7 @@ from valley.utils import import_util from pprint import pprint as p -from pfunk.contrib.auth.collections import BaseGroup , ExtendedUser, BaseUserGroup as ug +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, BaseUserGroup as ug from pfunk.testcase import APITestCase from pfunk import Collection, StringField, ReferenceField, ManyToManyField from pfunk.fields import ManyToManyField, StringField @@ -12,8 +12,10 @@ class UserGroups(ug): - userID = ReferenceField('pfunk.tests.test_web_custom_user_group_users_perms.Newuser') - groupID = ReferenceField('pfunk.tests.test_web_custom_user_group_users_perms.Newgroup') + userID = ReferenceField( + 'pfunk.tests.test_web_custom_user_group_users_perms.Newuser') + groupID = ReferenceField( + 'pfunk.tests.test_web_custom_user_group_users_perms.Newgroup') class Newgroup(BaseGroup): @@ -23,8 +25,10 @@ class Newgroup(BaseGroup): class Newuser(ExtendedUser): group_collection = 'Newgroup' - user_group_class = import_util('pfunk.tests.test_web_custom_user_group_users_perms.UserGroups') - group_class = import_util('pfunk.tests.test_web_custom_user_group_users_perms.Newgroup') + user_group_class = import_util( + 'pfunk.tests.test_web_custom_user_group_users_perms.UserGroups') + group_class = import_util( + 'pfunk.tests.test_web_custom_user_group_users_perms.Newgroup') groups = ManyToManyField( 'pfunk.tests.test_web_custom_user_group_users_perms.Newgroup', relation_name='custom_users_groups') blogs = ManyToManyField('pfunk.tests.test_web_custom_user_group_users_perms.Blog', @@ -111,3 +115,5 @@ def test_delete(self): }) self.assertTrue(res.status_code, 200) + self.assertNotIn("test_blog", [ + blog.title for blog in Blog.all()]) From fc122de2d8d221adac3f9e6fb3668ba1070a662c Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Fri, 17 Mar 2023 14:08:30 +0800 Subject: [PATCH 206/214] updated README to show examples of custom user and group collection --- README.md | 121 +++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 120 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index b0634bf..7143deb 100644 --- a/README.md +++ b/README.md @@ -30,7 +30,9 @@ Includes GraphQL and generic ABAC auth workflow integrations. - [Save Some Data](#save-some-data) - [Query Your Data](#query-your-data) - [Delete a Record](#delete-a-record) - +- [Customizing your setup](#customizing-your-setup) + - [Option 1: Environment Variables](#option-1-environment-variables) + - [Option 2: Inline Field](#option-2-inline-field) ### Getting Started @@ -213,3 +215,120 @@ Let's delete the record from above. ```python product.delete() ``` + + + +### Customizing your setup +This section is for customizing your `user` and `group` collections to your liking. e.g. +- Renaming your `User` and `Group` collection to something more verbose to your usage e.g. `Agent (User)` to `Firm (Group)` +- Subclassing the `user` or `group` collection in order to have more control to what kind of auth collection you want to have + + +### Custom User and Groups +Initially, we have multiple ways of defining your custom user-group collections. +Things to keep in mind: +- `UserGroup` class **must** be subclassed and reference to the custom `user` and `group` +- How the permissions work will do the same thing in custom user and group. This just gives you the ability to fully customize your own auth collections. + + + +### Option 1: Environment Variables +This is the easiest way. Just go to your `.env` file and define: +``` +USER_COLLECTION=Newuser # Class name of your custom user class - case-sensitive! +GROUP_COLLECTION=Newgroup # Class name of your custom group class - case-sensitive! +GROUP_COLLECTION_DIR=dir.to.Newgroup # class dir to import your custom group +USER_COLLECTION_DIR=dir.to.Newuser # class dir to import your custom user group +``` +Then you'll end up with this in your `collections.py` +```python +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, BaseUserGroup as ug +from pfunk.contrib.auth.resources import GenericUserBasedRole + + +class UserGroups(ug): + userID = ReferenceField('dir.to.Newuser') + groupID = ReferenceField('dir.to.Newgroup') + + +class Newgroup(BaseGroup): + users = ManyToManyField('dir.to.Newuser', relation_name='custom_users_groups') + + +class Newuser(ExtendedUser): + user_group_class = import_util('dir.to.UserGroups') + group_class = import_util('dir.to.Newgroup') + groups = ManyToManyField('dir.to.Newgroup', relation_name='custom_users_groups') + blogs = ManyToManyField('dir.to.Blog', relation_name='users_blogs') + + +class Blog(Collection): + collection_roles = [GenericUserBasedRole] + title = StringField(required=True) + content = StringField(required=True) + user = ReferenceField('dir.to.Newuser', relation_name='users_blogs') + + def __unicode__(self): + return self.title + +``` + + + +### Option 2: Inline Field +If for some reason you don't want to use the environment variables, you can define the needed fields +directly in your `Collection`. This is what we use in PFunk's unit tests, refer to it if you +need more usage but essentially: +```python +class Blog(Collection): + user_collection = 'Newuser' + group_collection = 'Newgroup' + user_collection_dir = 'dir.to.Newuser' + group_collection_dir = 'dir.to.Newgroup' + ... +``` + + +Generally, this is how your `collections.py` will look like in the end if you want to define +your custom auth collections in fields. + + +```python +from pfunk.contrib.auth.collections import BaseGroup, ExtendedUser, BaseUserGroup as ug +from pfunk.contrib.auth.resources import GenericUserBasedRole + + +class UserGroups(ug): + userID = ReferenceField('this.file.NewUser') + groupID = ReferenceField('this.file.Newgroup') + + +class Newgroup(BaseGroup): + users = ManyToManyField('this.file.Newuser', relation_name='custom_users_groups') + + +class Newuser(ExtendedUser): + user_group_class = import_util('this.file.UserGroups') + group_class = import_util('this.file.Newgroup') + groups = ManyToManyField('this.file.Newgroup', relation_name='custom_users_groups') + blogs = ManyToManyField('this.file.Blog', + relation_name='users_blogs') + + group_collection = 'Newgroup' + + +class Blog(Collection): + collection_roles = [GenericUserBasedRole] + title = StringField(required=True) + content = StringField(required=True) + user = ReferenceField('this.file.Newuser', relation_name='users_blogs') + + user_collection = 'Newuser' + group_collection = 'Newgroup' + user_collection_dir = 'this.file.Newuser' + group_collection_dir = 'this.file.Newgroup' + + def __unicode__(self): + return self.title +``` + From 0a0f80537155fc290cfa1f647375c0d3d5e1a281 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 28 Mar 2023 15:10:09 +0800 Subject: [PATCH 207/214] Refactored tests to work without pre-existing files --- pfunk/tests/test_aws.py | 22 ++++++++++++++++++++++ pfunk/tests/test_email.py | 21 ++++++++++++--------- pfunk/tests/test_project.py | 8 ++++---- pfunk/tests/test_web_json_stripe.py | 16 +++++++++------- 4 files changed, 47 insertions(+), 20 deletions(-) diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py index b3152c7..b181b78 100644 --- a/pfunk/tests/test_aws.py +++ b/pfunk/tests/test_aws.py @@ -1,4 +1,5 @@ import os +import json import unittest import tempfile from unittest import mock @@ -27,10 +28,31 @@ def setUpClass(cls, mocked) -> None: cls.aws_client = ApiGateway() cls.project.add_resources([Person, Sport, Group, User]) + with open(f'pfunk.json', 'x') as f: + json.dump({ + 'name': 'test', + 'api_type': 'rest', + 'description': 'test project', + 'host': 'localhost', + 'stages': {'dev': { + 'key_module': f'test.dev_keys.KEYS', + 'fauna_secret': 'test-key', + 'bucket': 'test-bucket', + 'default_from_email': 'test@example.org' + }} + }, f, indent=4, sort_keys=True) swagger = cls.project.generate_swagger() cls.swagger_dir = swagger['dir'] cls.swagger_file = swagger['swagger_file'] + @classmethod + def tearDownClass(cls) -> None: + os.remove("pfunk.json") + try: + os.remove('swagger.yaml') + except FileNotFoundError: + pass + def test_validate_yaml(self): result = self.aws_client.validate_yaml(self.swagger_dir) self.assertIsNone(result) # if there are no errors, then spec is valid diff --git a/pfunk/tests/test_email.py b/pfunk/tests/test_email.py index a841463..eb93ceb 100644 --- a/pfunk/tests/test_email.py +++ b/pfunk/tests/test_email.py @@ -1,3 +1,4 @@ +import os import tempfile from unittest import mock @@ -22,7 +23,8 @@ def setUp(self) -> None: self.backend = EmailBackend() def test_get_template(self): - template = self.backend.get_template('email/email_template.html') + with tempfile.NamedTemporaryFile(suffix='.html') as tmp: + template = self.backend.get_template(tmp.name.split("/")[-1]) # test jinja render if no exceptions template.render(unittest_value="random value") self.assertTrue(True) # if there are no exceptions, then it is a pass @@ -57,14 +59,15 @@ def setUp(self) -> None: @mock.patch('boto3.client') def test_send_email(self, mocked): - res = self.SES.send_email( - subject="test", - to_emails=["testemail@email.com"], - html_template='email/email_template.html', - from_email="testFromEmail@email.com", - cc_emails=["testCCemail@email.com"], - bcc_emails=["testBCCemail@email.com"], - ) + with tempfile.NamedTemporaryFile(suffix='.html') as tmp: + res = self.SES.send_email( + subject="test", + to_emails=["testemail@email.com"], + html_template=tmp.name.split("/")[-1], + from_email="testFromEmail@email.com", + cc_emails=["testCCemail@email.com"], + bcc_emails=["testBCCemail@email.com"], + ) # if there are no exceptions, then it's a passing test self.assertTrue(True) diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index f7e97e0..2665808 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -29,10 +29,10 @@ def setUp(self) -> None: def tearDown(self) -> None: os.remove("pfunk.json") - # try: - # os.remove('swagger.yaml') - # except FileNotFoundError: - # pass + try: + os.remove('swagger.yaml') + except FileNotFoundError: + pass def test_add_resource(self): self.project.add_resource(Person) diff --git a/pfunk/tests/test_web_json_stripe.py b/pfunk/tests/test_web_json_stripe.py index be45dec..59b484b 100644 --- a/pfunk/tests/test_web_json_stripe.py +++ b/pfunk/tests/test_web_json_stripe.py @@ -1,3 +1,4 @@ +import tempfile from types import SimpleNamespace from unittest import mock @@ -230,13 +231,14 @@ def test_check_ip(self): @mock.patch('boto3.client') def test_send_html_email(self, mocked): - # Requires to have `TEMPLATE_ROOT_DIR=/tmp` in your .env file - res = self.view.send_html_email( - subject='Test Subject', - from_email='unittesting@email.com', - to_email_list=['recipient@email.com'], - template_name=('email/email_template.html') - ) + with tempfile.NamedTemporaryFile(suffix='.html') as tmp: + # Requires to have `TEMPLATE_ROOT_DIR=/tmp` in your .env file + res = self.view.send_html_email( + subject='Test Subject', + from_email='unittesting@email.com', + to_email_list=['recipient@email.com'], + template_name=tmp.name.split("/")[-1] + ) self.assertTrue(True) # if there are no exceptions, then it passed @mock.patch('stripe.Webhook') From f8017566262f4fb4de23af1da856343dd6176ee5 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 3 Apr 2023 08:11:33 +0800 Subject: [PATCH 208/214] Fixed poetry errors --- Dockerfile | 1 + docker-compose.yaml | 2 +- pyproject.toml | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 4b7920a..7f67ec2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,5 @@ FROM capless/capless-docker:jupyter +RUN pip install --upgrade pip COPY . /code RUN poetry run pip install --upgrade pip RUN poetry install diff --git a/docker-compose.yaml b/docker-compose.yaml index daadf03..ff462a4 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -18,7 +18,7 @@ services: - ./:/code/ env_file: .env working_dir: /code/ - command: /root/.cache/pypoetry/virtualenvs/pfunk-MATOk_fk-py3.9/bin/jupyter notebook --port=8888 --ip=0.0.0.0 --allow-root + command: jupyter notebook --port=8888 --ip=0.0.0.0 --allow-root fauna: restart: always diff --git a/pyproject.toml b/pyproject.toml index 3dfb385..4c5e00d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,7 +34,7 @@ coverage = "^5.5" pdoc = "^7.2.0" [build-system] -requires = ["setuptools", "poetry>=0.12"] +requires = ["poetry>=0.12"] [tool.poetry.scripts] pfunk = 'pfunk.cli:pfunk' From 290432a55296c421548ccdaa4a167ccaa2c94b7c Mon Sep 17 00:00:00 2001 From: Brian Jinwright Date: Sun, 2 Apr 2023 21:41:27 -0400 Subject: [PATCH 209/214] updated the poetry dependency list --- poetry.lock | 1343 +++++++++------------------------------------------ 1 file changed, 240 insertions(+), 1103 deletions(-) diff --git a/poetry.lock b/poetry.lock index 855d0b1..317771a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,8 +11,8 @@ idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] +doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] +test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] trio = ["trio (>=0.16,<0.22)"] [[package]] @@ -35,8 +35,8 @@ python-versions = ">=3.6" argon2-cffi-bindings = "*" [package.extras] -dev = ["cogapp", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "pre-commit", "pytest", "sphinx", "sphinx-notfound-page", "tomli"] -docs = ["furo", "sphinx", "sphinx-notfound-page"] +dev = ["pre-commit", "cogapp", "tomli", "coverage[toml] (>=5.0.2)", "hypothesis", "pytest", "sphinx", "sphinx-notfound-page", "furo"] +docs = ["sphinx", "sphinx-notfound-page", "furo"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] [[package]] @@ -51,7 +51,7 @@ python-versions = ">=3.6" cffi = ">=1.0.1" [package.extras] -dev = ["cogapp", "pre-commit", "pytest", "wheel"] +dev = ["pytest", "cogapp", "pre-commit", "wheel"] tests = ["pytest"] [[package]] @@ -89,7 +89,6 @@ python-versions = "*" [package.dependencies] six = ">=1.6.1,<2.0" -wheel = ">=0.23.0,<1.0" [[package]] name = "attrs" @@ -100,12 +99,12 @@ optional = false python-versions = ">=3.6" [package.extras] -cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] -tests = ["attrs[tests-no-zope]", "zope.interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=0.971,<0.990)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests_no_zope = ["cloudpickle", "hypothesis", "mypy (>=0.971,<0.990)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["attrs", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs"] +docs = ["furo", "sphinx", "myst-parser", "zope.interface", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] +tests = ["attrs", "zope.interface"] +tests-no-zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"] +tests_no_zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"] [[package]] name = "backcall" @@ -117,7 +116,7 @@ python-versions = "*" [[package]] name = "beautifulsoup4" -version = "4.11.2" +version = "4.12.0" description = "Screen-scraping library" category = "dev" optional = false @@ -145,14 +144,14 @@ webencodings = "*" [[package]] name = "boto3" -version = "1.26.89" +version = "1.26.104" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.7" [package.dependencies] -botocore = ">=1.29.89,<1.30.0" +botocore = ">=1.29.104,<1.30.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -161,7 +160,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.29.89" +version = "1.29.104" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -231,7 +230,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7 [[package]] name = "comm" -version = "0.1.2" +version = "0.1.3" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." category = "dev" optional = false @@ -241,7 +240,9 @@ python-versions = ">=3.6" traitlets = ">=5.3" [package.extras] +lint = ["black (>=22.6.0)", "mdformat-gfm (>=0.3.5)", "mdformat (>0.7)", "ruff (>=0.0.156)"] test = ["pytest"] +typing = ["mypy (>=0.990)"] [[package]] name = "coverage" @@ -266,12 +267,12 @@ python-versions = ">=3.6" cffi = ">=1.12" [package.extras] -docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx_rtd_theme"] -docstest = ["doc8", "pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] +docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] +test = ["pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] [[package]] name = "debugpy" @@ -306,7 +307,7 @@ optional = false python-versions = ">=3.6,<4.0" [package.extras] -cli = ["Jinja2[cli] (>=3.0.3,<4.0.0)", "click[cli] (>=8.0.3,<9.0.0)", "terminaltables[cli] (>=3.1.10,<4.0.0)"] +cli = ["click[cli] (>=8.0.3,<9.0.0)", "Jinja2[cli] (>=3.0.3,<4.0.0)", "terminaltables[cli] (>=3.1.10,<4.0.0)"] [[package]] name = "executing" @@ -317,7 +318,7 @@ optional = false python-versions = "*" [package.extras] -tests = ["asttokens", "littleutils", "pytest", "rich"] +tests = ["asttokens", "pytest", "littleutils", "rich"] [[package]] name = "fastjsonschema" @@ -328,7 +329,7 @@ optional = false python-versions = "*" [package.extras] -devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] +devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] [[package]] name = "faunadb" @@ -346,7 +347,7 @@ requests = "*" [package.extras] lint = ["pylint"] -test = ["nose2", "nose2[coverage_plugin]"] +test = ["nose2", "nose2"] [[package]] name = "formy" @@ -472,7 +473,7 @@ python-versions = ">=3.5" [[package]] name = "importlib-metadata" -version = "6.0.0" +version = "6.1.0" description = "Read metadata from Python packages" category = "dev" optional = false @@ -482,9 +483,9 @@ python-versions = ">=3.7" zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"] perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8", "importlib-resources (>=1.3)"] [[package]] name = "importlib-resources" @@ -498,12 +499,12 @@ python-versions = ">=3.7" zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"] [[package]] name = "ipykernel" -version = "6.21.3" +version = "6.22.0" description = "IPython Kernel for Jupyter" category = "dev" optional = false @@ -525,15 +526,15 @@ tornado = ">=6.1" traitlets = ">=5.4.0" [package.extras] -cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] +cov = ["coverage", "curio", "matplotlib", "pytest-cov", "trio"] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] pyqt5 = ["pyqt5"] pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest-asyncio", "pytest-cov", "pytest-timeout", "pytest (>=7.0)"] [[package]] name = "ipython" -version = "8.11.0" +version = "8.12.0" description = "IPython: Productive Interactive Computing" category = "dev" optional = false @@ -552,11 +553,12 @@ prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" pygments = ">=2.4.0" stack-data = "*" traitlets = ">=5" +typing-extensions = {version = "*", markers = "python_version < \"3.10\""} [package.extras] -all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +all = ["black", "ipykernel", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "docrepr", "matplotlib", "stack-data", "pytest (<7)", "typing-extensions", "pytest (<7.1)", "pytest-asyncio", "testpath", "nbconvert", "nbformat", "ipywidgets", "notebook", "ipyparallel", "qtconsole", "curio", "matplotlib (!=3.2.0)", "numpy (>=1.21)", "pandas", "trio"] black = ["black"] -doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +doc = ["ipykernel", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "docrepr", "matplotlib", "stack-data", "pytest (<7)", "typing-extensions", "pytest (<7.1)", "pytest-asyncio", "testpath"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] @@ -564,10 +566,10 @@ notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test_extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] +test_extra = ["pytest (<7.1)", "pytest-asyncio", "testpath", "curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "trio"] [[package]] -name = "ipython_genutils" +name = "ipython-genutils" version = "0.2.0" description = "Vestigial utilities from IPython" category = "dev" @@ -576,7 +578,7 @@ python-versions = "*" [[package]] name = "ipywidgets" -version = "8.0.4" +version = "8.0.6" description = "Jupyter interactive widgets" category = "dev" optional = false @@ -585,12 +587,12 @@ python-versions = ">=3.7" [package.dependencies] ipykernel = ">=4.5.1" ipython = ">=6.1.0" -jupyterlab-widgets = ">=3.0,<4.0" +jupyterlab-widgets = ">=3.0.7,<3.1.0" traitlets = ">=4.3.1" -widgetsnbextension = ">=4.0,<5.0" +widgetsnbextension = ">=4.0.7,<4.1.0" [package.extras] -test = ["jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] +test = ["jsonschema", "ipykernel", "pytest (>=3.6.0)", "pytest-cov", "pytz"] [[package]] name = "iso8601" @@ -623,12 +625,12 @@ python-versions = ">=3.6" parso = ">=0.8.0,<0.9.0" [package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx-rtd-theme (==0.4.3)", "sphinx (==1.8.5)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] -name = "Jinja2" +name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." category = "main" @@ -701,7 +703,7 @@ qtconsole = "*" [[package]] name = "jupyter-client" -version = "8.0.3" +version = "8.1.0" description = "Jupyter protocol implementation and client libraries" category = "dev" optional = false @@ -716,7 +718,7 @@ tornado = ">=6.2" traitlets = ">=5.3" [package.extras] -docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinx (>=4)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] test = ["codecov", "coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] [[package]] @@ -742,7 +744,7 @@ test = ["flaky", "pexpect", "pytest"] [[package]] name = "jupyter-core" -version = "5.2.0" +version = "5.3.0" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "dev" optional = false @@ -750,7 +752,7 @@ python-versions = ">=3.8" [package.dependencies] platformdirs = ">=2.5" -pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} traitlets = ">=5.3" [package.extras] @@ -776,11 +778,11 @@ traitlets = ">=5.3" [package.extras] cli = ["click", "rich"] docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] -test = ["click", "coverage", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "pytest-cov", "rich"] +test = ["click", "coverage", "pre-commit", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "pytest-cov", "pytest (>=7.0)", "rich"] [[package]] name = "jupyter-server" -version = "2.4.0" +version = "2.5.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." category = "dev" optional = false @@ -808,7 +810,7 @@ websocket-client = "*" [package.extras] docs = ["docutils (<0.20)", "ipykernel", "jinja2", "jupyter-client", "jupyter-server", "mistune (<1.0.0)", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] -test = ["ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"] +test = ["ipykernel", "pre-commit", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "pytest (>=7.0)", "requests"] [[package]] name = "jupyter-server-terminals" @@ -824,7 +826,7 @@ terminado = ">=0.8.3" [package.extras] docs = ["jinja2", "jupyter-server", "mistune (<3.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] -test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] +test = ["coverage", "jupyter-server (>=2.0.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout", "pytest (>=7.0)"] [[package]] name = "jupyterlab-pygments" @@ -836,14 +838,14 @@ python-versions = ">=3.7" [[package]] name = "jupyterlab-widgets" -version = "3.0.5" +version = "3.0.7" description = "Jupyter interactive widgets for JupyterLab" category = "dev" optional = false python-versions = ">=3.7" [[package]] -name = "MarkupSafe" +name = "markupsafe" version = "2.1.2" description = "Safely add untrusted strings to HTML/XML markup." category = "main" @@ -871,7 +873,7 @@ python-versions = "*" [[package]] name = "nbclassic" -version = "0.5.3" +version = "0.5.4" description = "Jupyter Notebook as a Jupyter Server extension." category = "dev" optional = false @@ -897,9 +899,9 @@ tornado = ">=6.1" traitlets = ">=4.2.1" [package.extras] -docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] +docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"] json-logging = ["json-logging"] -test = ["coverage", "nbval", "pytest", "pytest-cov", "pytest-jupyter", "pytest-playwright", "pytest-tornasync", "requests", "requests-unixsocket", "testpath"] +test = ["pytest", "coverage", "requests", "testpath", "nbval", "pytest-playwright", "pytest-cov", "pytest-jupyter", "pytest-tornasync", "requests-unixsocket"] [[package]] name = "nbclient" @@ -917,12 +919,12 @@ traitlets = ">=5.3" [package.extras] dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme"] -test = ["ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] +docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient", "sphinx-book-theme", "sphinx (>=1.7)"] +test = ["ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "pytest (>=7.0)", "testpath", "xmltodict"] [[package]] name = "nbconvert" -version = "7.2.9" +version = "7.2.10" description = "Converting Jupyter Notebooks" category = "dev" optional = false @@ -947,9 +949,9 @@ tinycss2 = "*" traitlets = ">=5.0" [package.extras] -all = ["nbconvert[docs,qtpdf,serve,test,webpdf]"] +all = ["nbconvert"] docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] -qtpdf = ["nbconvert[qtpng]"] +qtpdf = ["nbconvert"] qtpng = ["pyqtwebengine (>=5.15)"] serve = ["tornado (>=6.1)"] test = ["ipykernel", "ipywidgets (>=7)", "pre-commit", "pytest", "pytest-dependency"] @@ -957,7 +959,7 @@ webpdf = ["pyppeteer (>=1,<1.1)"] [[package]] name = "nbformat" -version = "5.7.3" +version = "5.8.0" description = "The Jupyter Notebook format" category = "dev" optional = false @@ -1008,9 +1010,9 @@ tornado = ">=6.1" traitlets = ">=4.2.1" [package.extras] -docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] +docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"] json-logging = ["json-logging"] -test = ["coverage", "nbval", "pytest", "pytest-cov", "requests", "requests-unixsocket", "selenium (==4.1.5)", "testpath"] +test = ["pytest", "coverage", "requests", "testpath", "nbval", "selenium (==4.1.5)", "pytest-cov", "requests-unixsocket"] [[package]] name = "notebook-shim" @@ -1038,9 +1040,9 @@ python-versions = ">=3.7.0,<4.0.0" jsonschema = ">=3.0.0,<5.0.0" [package.extras] -isodate = ["isodate"] rfc3339-validator = ["rfc3339-validator"] strict-rfc3339 = ["strict-rfc3339"] +isodate = ["isodate"] [[package]] name = "openapi-spec-validator" @@ -1054,7 +1056,6 @@ python-versions = ">=3.7.0,<4.0.0" jsonschema = ">=3.2.0,<5.0.0" openapi-schema-validator = ">=0.2.0,<0.3.0" PyYAML = ">=5.1" -setuptools = "*" [package.extras] requests = ["requests"] @@ -1124,15 +1125,7 @@ optional = false python-versions = "*" [[package]] -name = "pip" -version = "23.0.1" -description = "The PyPA recommended tool for installing Python packages." -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "pkgutil_resolve_name" +name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." category = "main" @@ -1141,15 +1134,15 @@ python-versions = ">=3.6" [[package]] name = "platformdirs" -version = "3.1.1" +version = "3.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)", "sphinx (>=6.1.3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest (>=7.2.2)"] [[package]] name = "ply" @@ -1190,7 +1183,7 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] +test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] [[package]] name = "ptyprocess" @@ -1220,7 +1213,7 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] -name = "Pygments" +name = "pygments" version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" @@ -1231,7 +1224,7 @@ python-versions = ">=3.6" plugins = ["importlib-metadata"] [[package]] -name = "PyJWT" +name = "pyjwt" version = "2.6.0" description = "JSON Web Token implementation in Python" category = "main" @@ -1240,9 +1233,9 @@ python-versions = ">=3.7" [package.extras] crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +dev = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.4.0)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "pre-commit"] docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] +tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"] [[package]] name = "pyrsistent" @@ -1281,7 +1274,7 @@ python-versions = "*" [[package]] name = "pywin32" -version = "305" +version = "306" description = "Python for Window Extensions" category = "dev" optional = false @@ -1296,7 +1289,7 @@ optional = false python-versions = ">=3.7" [[package]] -name = "PyYAML" +name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" category = "main" @@ -1305,7 +1298,7 @@ python-versions = ">=3.6" [[package]] name = "pyzmq" -version = "25.0.0" +version = "25.0.2" description = "Python bindings for 0MQ" category = "dev" optional = false @@ -1316,7 +1309,7 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "qtconsole" -version = "5.4.1" +version = "5.4.2" description = "Jupyter Qt console" category = "dev" optional = false @@ -1338,8 +1331,8 @@ doc = ["Sphinx (>=1.3)"] test = ["flaky", "pytest", "pytest-qt"] [[package]] -name = "QtPy" -version = "2.3.0" +name = "qtpy" +version = "2.3.1" description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." category = "dev" optional = false @@ -1430,7 +1423,7 @@ PyYAML = ">=3.12" valley = ">=1.5.2" [[package]] -name = "Send2Trash" +name = "send2trash" version = "1.8.0" description = "Send file to trash natively under Mac OS X, Windows and Linux." category = "dev" @@ -1438,25 +1431,12 @@ optional = false python-versions = "*" [package.extras] -nativelib = ["pyobjc-framework-Cocoa", "pywin32"] -objc = ["pyobjc-framework-Cocoa"] +nativelib = ["pyobjc-framework-cocoa", "pywin32"] +objc = ["pyobjc-framework-cocoa"] win32 = ["pywin32"] objc = ["pyobjc-framework-cocoa"] nativelib = ["pywin32", "pyobjc-framework-cocoa"] -[[package]] -name = "setuptools" -version = "67.6.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - [[package]] name = "six" version = "1.16.0" @@ -1495,7 +1475,7 @@ executing = ">=1.2.0" pure-eval = "*" [package.extras] -tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] +tests = ["pytest", "typeguard", "pygments", "littleutils", "cython"] [[package]] name = "stripe" @@ -1535,7 +1515,7 @@ tornado = ">=6.1.0" [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] +test = ["pre-commit", "pytest-timeout", "pytest (>=7.0)"] [[package]] name = "tinycss2" @@ -1549,8 +1529,8 @@ python-versions = ">=3.7" webencodings = ">=0.4" [package.extras] -doc = ["sphinx", "sphinx_rtd_theme"] -test = ["flake8", "isort", "pytest"] +doc = ["sphinx", "sphinx-rtd-theme"] +test = ["pytest", "isort", "flake8"] [[package]] name = "tornado" @@ -1572,6 +1552,14 @@ python-versions = ">=3.7" docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] +[[package]] +name = "typing-extensions" +version = "4.5.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "dev" +optional = false +python-versions = ">=3.7" + [[package]] name = "uri-template" version = "1.2.0" @@ -1581,7 +1569,7 @@ optional = false python-versions = ">=3.6" [package.extras] -dev = ["flake8 (<4.0.0)", "flake8-annotations", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-noqa", "flake8-requirements", "flake8-type-annotations", "flake8-use-fstring", "mypy", "pep8-naming"] +dev = ["mypy", "flake8 (<4.0.0)", "flake8-annotations", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-noqa", "flake8-requirements", "flake8-type-annotations", "flake8-use-fstring", "pep8-naming"] [[package]] name = "urllib3" @@ -1592,8 +1580,8 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -1617,12 +1605,16 @@ python-versions = "*" [[package]] name = "webcolors" -version = "1.12" -description = "A library for working with color names and color values formats defined by HTML and CSS." +version = "1.13" +description = "A library for working with the color formats defined by HTML and CSS." category = "dev" optional = false python-versions = ">=3.7" +[package.extras] +docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] +tests = ["pytest", "pytest-cov"] + [[package]] name = "webencodings" version = "0.5.1" @@ -1645,7 +1637,7 @@ optional = ["python-socks", "wsaccel"] test = ["websockets"] [[package]] -name = "Werkzeug" +name = "werkzeug" version = "2.1.2" description = "The comprehensive WSGI web application library." category = "main" @@ -1658,20 +1650,9 @@ MarkupSafe = ">=2.1.1" [package.extras] watchdog = ["watchdog"] -[[package]] -name = "wheel" -version = "0.38.4" -description = "A built-package format for Python" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -test = ["pytest (>=3.0.0)"] - [[package]] name = "widgetsnbextension" -version = "4.0.5" +version = "4.0.7" description = "Jupyter interactive widgets for Jupyter Notebook" category = "dev" optional = false @@ -1686,985 +1667,141 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "sphinx-lint", "jaraco.tidelift (>=1.4)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "jaraco.functools", "more-itertools", "big-o", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "2da6450ab510552fae213960c22acc6456126a4642acf9fd9bc77062959c14f7" +content-hash = "115514b1f1229bd8bf8ae3bbf89d647aea751f1d261ebe80e7beef93315170eb" [metadata.files] -anyio = [ - {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, - {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, -] -appnope = [ - {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, - {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, -] -argon2-cffi = [ - {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, - {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, -] -argon2-cffi-bindings = [ - {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, - {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, -] -arrow = [ - {file = "arrow-1.2.3-py3-none-any.whl", hash = "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2"}, - {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"}, -] -asttokens = [ - {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, - {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, -] -astunparse = [ - {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, - {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, -] -attrs = [ - {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, - {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, -] -backcall = [ - {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, - {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, -] -beautifulsoup4 = [ - {file = "beautifulsoup4-4.11.2-py3-none-any.whl", hash = "sha256:0e79446b10b3ecb499c1556f7e228a53e64a2bfcebd455f370d8927cb5b59e39"}, - {file = "beautifulsoup4-4.11.2.tar.gz", hash = "sha256:bc4bdda6717de5a2987436fb8d72f45dc90dd856bdfd512a1314ce90349a0106"}, -] -bleach = [ - {file = "bleach-4.1.0-py2.py3-none-any.whl", hash = "sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994"}, - {file = "bleach-4.1.0.tar.gz", hash = "sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da"}, -] -boto3 = [ - {file = "boto3-1.26.89-py3-none-any.whl", hash = "sha256:09929b24aaec4951e435d53d31f800e2ca52244af049dc11e5385ce062e106e9"}, - {file = "boto3-1.26.89.tar.gz", hash = "sha256:e819812f16fab46fadf9b2853a46aaa126e108e7f038502dde555ebbbfc80133"}, -] -botocore = [ - {file = "botocore-1.29.89-py3-none-any.whl", hash = "sha256:b757e59feca82ac62934f658918133116b4535cf66f1d72ff4935fa24e522527"}, - {file = "botocore-1.29.89.tar.gz", hash = "sha256:ac8da651f73a9d5759cf5d80beba68deda407e56aaaeb10d249fd557459f3b56"}, -] -cachetools = [ - {file = "cachetools-4.2.4-py3-none-any.whl", hash = "sha256:92971d3cb7d2a97efff7c7bb1657f21a8f5fb309a37530537c71b1774189f2d1"}, - {file = "cachetools-4.2.4.tar.gz", hash = "sha256:89ea6f1b638d5a73a4f9226be57ac5e4f399d22770b92355f92dcb0f7f001693"}, -] -certifi = [ - {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, - {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, -] -cffi = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, -] -charset-normalizer = [ - {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, - {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, -] -click = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, -] -colorama = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] -comm = [ - {file = "comm-0.1.2-py3-none-any.whl", hash = "sha256:9f3abf3515112fa7c55a42a6a5ab358735c9dccc8b5910a9d8e3ef5998130666"}, - {file = "comm-0.1.2.tar.gz", hash = "sha256:3e2f5826578e683999b93716285b3b1f344f157bf75fa9ce0a797564e742f062"}, -] -coverage = [ - {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, - {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, - {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, - {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, - {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, - {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, - {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, - {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, - {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, - {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, - {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, - {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, - {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, - {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, - {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, - {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, - {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, - {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, - {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, - {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, - {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, - {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, - {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, - {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, - {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, - {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, - {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, - {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, - {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, - {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, - {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, - {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, - {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, - {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, - {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, - {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, - {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, - {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, - {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, - {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, - {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, - {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, - {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, - {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, - {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, - {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, - {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, - {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, -] -cryptography = [ - {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, - {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c4129fc3fdc0fa8e40861b5ac0c673315b3c902bbdc05fc176764815b43dd1d"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:695104a9223a7239d155d7627ad912953b540929ef97ae0c34c7b8bf30857e89"}, - {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"}, - {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"}, - {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, -] -debugpy = [ - {file = "debugpy-1.6.6-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:0ea1011e94416e90fb3598cc3ef5e08b0a4dd6ce6b9b33ccd436c1dffc8cd664"}, - {file = "debugpy-1.6.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dff595686178b0e75580c24d316aa45a8f4d56e2418063865c114eef651a982e"}, - {file = "debugpy-1.6.6-cp310-cp310-win32.whl", hash = "sha256:87755e173fcf2ec45f584bb9d61aa7686bb665d861b81faa366d59808bbd3494"}, - {file = "debugpy-1.6.6-cp310-cp310-win_amd64.whl", hash = "sha256:72687b62a54d9d9e3fb85e7a37ea67f0e803aaa31be700e61d2f3742a5683917"}, - {file = "debugpy-1.6.6-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:78739f77c58048ec006e2b3eb2e0cd5a06d5f48c915e2fc7911a337354508110"}, - {file = "debugpy-1.6.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23c29e40e39ad7d869d408ded414f6d46d82f8a93b5857ac3ac1e915893139ca"}, - {file = "debugpy-1.6.6-cp37-cp37m-win32.whl", hash = "sha256:7aa7e103610e5867d19a7d069e02e72eb2b3045b124d051cfd1538f1d8832d1b"}, - {file = "debugpy-1.6.6-cp37-cp37m-win_amd64.whl", hash = "sha256:f6383c29e796203a0bba74a250615ad262c4279d398e89d895a69d3069498305"}, - {file = "debugpy-1.6.6-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:23363e6d2a04d726bbc1400bd4e9898d54419b36b2cdf7020e3e215e1dcd0f8e"}, - {file = "debugpy-1.6.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b5d1b13d7c7bf5d7cf700e33c0b8ddb7baf030fcf502f76fc061ddd9405d16c"}, - {file = "debugpy-1.6.6-cp38-cp38-win32.whl", hash = "sha256:70ab53918fd907a3ade01909b3ed783287ede362c80c75f41e79596d5ccacd32"}, - {file = "debugpy-1.6.6-cp38-cp38-win_amd64.whl", hash = "sha256:c05349890804d846eca32ce0623ab66c06f8800db881af7a876dc073ac1c2225"}, - {file = "debugpy-1.6.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a771739902b1ae22a120dbbb6bd91b2cae6696c0e318b5007c5348519a4211c6"}, - {file = "debugpy-1.6.6-cp39-cp39-win32.whl", hash = "sha256:549ae0cb2d34fc09d1675f9b01942499751d174381b6082279cf19cdb3c47cbe"}, - {file = "debugpy-1.6.6-cp39-cp39-win_amd64.whl", hash = "sha256:de4a045fbf388e120bb6ec66501458d3134f4729faed26ff95de52a754abddb1"}, - {file = "debugpy-1.6.6-py2.py3-none-any.whl", hash = "sha256:be596b44448aac14eb3614248c91586e2bc1728e020e82ef3197189aae556115"}, - {file = "debugpy-1.6.6.zip", hash = "sha256:b9c2130e1c632540fbf9c2c88341493797ddf58016e7cba02e311de9b0a96b67"}, -] -decorator = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] -defusedxml = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] -envs = [ - {file = "envs-1.4-py3-none-any.whl", hash = "sha256:4a1fcf85e4d4443e77c348ff7cdd3bfc4c0178b181d447057de342e4172e5ed1"}, - {file = "envs-1.4.tar.gz", hash = "sha256:9d8435c6985d1cdd68299e04c58e2bdb8ae6cf66b2596a8079e6f9a93f2a0398"}, -] -executing = [ - {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, - {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, -] -fastjsonschema = [ - {file = "fastjsonschema-2.16.3-py3-none-any.whl", hash = "sha256:04fbecc94300436f628517b05741b7ea009506ce8f946d40996567c669318490"}, - {file = "fastjsonschema-2.16.3.tar.gz", hash = "sha256:4a30d6315a68c253cfa8f963b9697246315aa3db89f98b97235e345dedfb0b8e"}, -] -faunadb = [ - {file = "faunadb-4.5.0-py2.py3-none-any.whl", hash = "sha256:5845911a3c16bc405145e16a247b1bcf67b4113822962cbfc40e1d1c6b5ac745"}, -] -formy = [ - {file = "formy-1.3.1-py3-none-any.whl", hash = "sha256:07c2a7ee351039694fe5b958ad4dfec34baeb0ffbddbf4af231609a75994e6f6"}, - {file = "formy-1.3.1.tar.gz", hash = "sha256:4ce7f79185c88f2fd896984a17e3d5cd23360db5408c7e726f64609371c0035d"}, -] -fqdn = [ - {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, - {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, -] -future = [ - {file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"}, -] -graphql-py = [ - {file = "graphql-py-0.8.1.tar.gz", hash = "sha256:a03557d67817a0f23c5bef83ce8791c8fa3f5f9d165e1408abc6393def1d720c"}, -] -h11 = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] -h2 = [ - {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, - {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, -] -hpack = [ - {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, - {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, -] -httpcore = [ - {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, - {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, -] -httpx = [ - {file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"}, - {file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"}, -] -hyperframe = [ - {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, - {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, -] -idna = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] -importlib-metadata = [ - {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"}, - {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"}, -] -importlib-resources = [ - {file = "importlib_resources-5.12.0-py3-none-any.whl", hash = "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"}, - {file = "importlib_resources-5.12.0.tar.gz", hash = "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6"}, -] -ipykernel = [ - {file = "ipykernel-6.21.3-py3-none-any.whl", hash = "sha256:24ebd9715e317c185e37156ab3a87382410185230dde7aeffce389d6c7d4428a"}, - {file = "ipykernel-6.21.3.tar.gz", hash = "sha256:c8ff581905d70e7299bc1473a2f7c113bec1744fb3746d58e5b4b93bd8ee7001"}, -] -ipython = [ - {file = "ipython-8.11.0-py3-none-any.whl", hash = "sha256:5b54478e459155a326bf5f42ee4f29df76258c0279c36f21d71ddb560f88b156"}, - {file = "ipython-8.11.0.tar.gz", hash = "sha256:735cede4099dbc903ee540307b9171fbfef4aa75cfcacc5a273b2cda2f02be04"}, -] -ipython_genutils = [ - {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, - {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, -] -ipywidgets = [ - {file = "ipywidgets-8.0.4-py3-none-any.whl", hash = "sha256:ebb195e743b16c3947fe8827190fb87b4d00979c0fbf685afe4d2c4927059fa1"}, - {file = "ipywidgets-8.0.4.tar.gz", hash = "sha256:c0005a77a47d77889cafed892b58e33b4a2a96712154404c6548ec22272811ea"}, -] -iso8601 = [ - {file = "iso8601-1.1.0-py3-none-any.whl", hash = "sha256:8400e90141bf792bce2634df533dc57e3bee19ea120a87bebcd3da89a58ad73f"}, - {file = "iso8601-1.1.0.tar.gz", hash = "sha256:32811e7b81deee2063ea6d2e94f8819a86d1f3811e49d23623a41fa832bef03f"}, -] -isoduration = [ - {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, - {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, -] -jedi = [ - {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"}, - {file = "jedi-0.18.2.tar.gz", hash = "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612"}, -] -Jinja2 = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, -] -jmespath = [ - {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, - {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, -] -jsonpointer = [ - {file = "jsonpointer-2.3-py2.py3-none-any.whl", hash = "sha256:51801e558539b4e9cd268638c078c6c5746c9ac96bc38152d443400e4f3793e9"}, - {file = "jsonpointer-2.3.tar.gz", hash = "sha256:97cba51526c829282218feb99dab1b1e6bdf8efd1c43dc9d57be093c0d69c99a"}, -] -jsonschema = [ - {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, - {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, -] -jupyter = [ - {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, - {file = "jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"}, - {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, -] -jupyter-client = [ - {file = "jupyter_client-8.0.3-py3-none-any.whl", hash = "sha256:be48ac6bd659cbbddb7a674cf06b3b8afbf53f228253cf58bde604c03bd487b0"}, - {file = "jupyter_client-8.0.3.tar.gz", hash = "sha256:ed65498bea6d876ef9d8da3e0db3dd33c5d129f5b2645f56ae03993782966bd0"}, -] -jupyter-console = [ - {file = "jupyter_console-6.6.3-py3-none-any.whl", hash = "sha256:309d33409fcc92ffdad25f0bcdf9a4a9daa61b6f341177570fdac03de5352485"}, - {file = "jupyter_console-6.6.3.tar.gz", hash = "sha256:566a4bf31c87adbfadf22cdf846e3069b59a71ed5da71d6ba4d8aaad14a53539"}, -] -jupyter-core = [ - {file = "jupyter_core-5.2.0-py3-none-any.whl", hash = "sha256:4bdc2928c37f6917130c667d8b8708f20aee539d8283c6be72aabd2a4b4c83b0"}, - {file = "jupyter_core-5.2.0.tar.gz", hash = "sha256:1407cdb4c79ee467696c04b76633fc1884015fa109323365a6372c8e890cc83f"}, -] -jupyter-events = [ - {file = "jupyter_events-0.6.3-py3-none-any.whl", hash = "sha256:57a2749f87ba387cd1bfd9b22a0875b889237dbf2edc2121ebb22bde47036c17"}, - {file = "jupyter_events-0.6.3.tar.gz", hash = "sha256:9a6e9995f75d1b7146b436ea24d696ce3a35bfa8bfe45e0c33c334c79464d0b3"}, -] -jupyter-server = [ - {file = "jupyter_server-2.4.0-py3-none-any.whl", hash = "sha256:cc22792281bfb0131a728414f28ae74883b44ad6d009971aa975cae9bcc650de"}, - {file = "jupyter_server-2.4.0.tar.gz", hash = "sha256:f31f0ba2c3c44f07143bfa03fb07dd0253f857eb63f0c26f2fea955f04a49765"}, -] -jupyter-server-terminals = [ - {file = "jupyter_server_terminals-0.4.4-py3-none-any.whl", hash = "sha256:75779164661cec02a8758a5311e18bb8eb70c4e86c6b699403100f1585a12a36"}, - {file = "jupyter_server_terminals-0.4.4.tar.gz", hash = "sha256:57ab779797c25a7ba68e97bcfb5d7740f2b5e8a83b5e8102b10438041a7eac5d"}, -] -jupyterlab-pygments = [ - {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, - {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, -] -jupyterlab-widgets = [ - {file = "jupyterlab_widgets-3.0.5-py3-none-any.whl", hash = "sha256:a04a42e50231b355b7087e16a818f541e53589f7647144ea0344c4bf16f300e5"}, - {file = "jupyterlab_widgets-3.0.5.tar.gz", hash = "sha256:eeaecdeaf6c03afc960ddae201ced88d5979b4ca9c3891bcb8f6631af705f5ef"}, -] -MarkupSafe = [ - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, - {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, -] -matplotlib-inline = [ - {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, - {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, -] -mistune = [ - {file = "mistune-2.0.5-py2.py3-none-any.whl", hash = "sha256:bad7f5d431886fcbaf5f758118ecff70d31f75231b34024a1341120340a65ce8"}, - {file = "mistune-2.0.5.tar.gz", hash = "sha256:0246113cb2492db875c6be56974a7c893333bf26cd92891c85f63151cee09d34"}, -] -nbclassic = [ - {file = "nbclassic-0.5.3-py3-none-any.whl", hash = "sha256:e849277872d9ffd8fe4b39a8038d01ba82d6a1def9ce11b1b3c26c9546ed5131"}, - {file = "nbclassic-0.5.3.tar.gz", hash = "sha256:889772a7ba524eb781d2901f396540bcad41151e1f7e043f12ebc14a6540d342"}, -] -nbclient = [ - {file = "nbclient-0.7.2-py3-none-any.whl", hash = "sha256:d97ac6257de2794f5397609df754fcbca1a603e94e924eb9b99787c031ae2e7c"}, - {file = "nbclient-0.7.2.tar.gz", hash = "sha256:884a3f4a8c4fc24bb9302f263e0af47d97f0d01fe11ba714171b320c8ac09547"}, -] -nbconvert = [ - {file = "nbconvert-7.2.9-py3-none-any.whl", hash = "sha256:495638c5e06005f4a5ce828d8a81d28e34f95c20f4384d5d7a22254b443836e7"}, - {file = "nbconvert-7.2.9.tar.gz", hash = "sha256:a42c3ac137c64f70cbe4d763111bf358641ea53b37a01a5c202ed86374af5234"}, -] -nbformat = [ - {file = "nbformat-5.7.3-py3-none-any.whl", hash = "sha256:22a98a6516ca216002b0a34591af5bcb8072ca6c63910baffc901cfa07fefbf0"}, - {file = "nbformat-5.7.3.tar.gz", hash = "sha256:4b021fca24d3a747bf4e626694033d792d594705829e5e35b14ee3369f9f6477"}, -] -nest-asyncio = [ - {file = "nest_asyncio-1.5.6-py3-none-any.whl", hash = "sha256:b9a953fb40dceaa587d109609098db21900182b16440652454a146cffb06e8b8"}, - {file = "nest_asyncio-1.5.6.tar.gz", hash = "sha256:d267cc1ff794403f7df692964d1d2a3fa9418ffea2a3f6859a439ff482fef290"}, -] -notebook = [ - {file = "notebook-6.5.3-py3-none-any.whl", hash = "sha256:50a334ad9d60b30cb759405168ef6fc3d60350ab5439fb1631544bb09dcb2cce"}, - {file = "notebook-6.5.3.tar.gz", hash = "sha256:b12bee3292211d85dd7e588a790ddce30cb3e8fbcfa1e803522a207f60819e05"}, -] -notebook-shim = [ - {file = "notebook_shim-0.2.2-py3-none-any.whl", hash = "sha256:9c6c30f74c4fbea6fce55c1be58e7fd0409b1c681b075dcedceb005db5026949"}, - {file = "notebook_shim-0.2.2.tar.gz", hash = "sha256:090e0baf9a5582ff59b607af523ca2db68ff216da0c69956b62cab2ef4fc9c3f"}, -] -openapi-schema-validator = [ - {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, - {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, -] -openapi-spec-validator = [ - {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, - {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, -] -packaging = [ - {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, - {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, -] -pandocfilters = [ - {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, - {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, -] -parso = [ - {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, - {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, -] -pdoc = [ - {file = "pdoc-7.4.0-py3-none-any.whl", hash = "sha256:681a2f243e4ca51bedd0645c2d18275b8b83444e9b6e42b502882ec45369e679"}, -] -pexpect = [ - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, - {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, -] -pickleshare = [ - {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, - {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, -] -pip = [ - {file = "pip-23.0.1-py3-none-any.whl", hash = "sha256:236bcb61156d76c4b8a05821b988c7b8c35bf0da28a4b614e8d6ab5212c25c6f"}, - {file = "pip-23.0.1.tar.gz", hash = "sha256:cd015ea1bfb0fcef59d8a286c1f8bebcb983f6317719d415dc5351efb7cd7024"}, -] -pkgutil_resolve_name = [ - {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, - {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, -] -platformdirs = [ - {file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"}, - {file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"}, -] -ply = [ - {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"}, - {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, -] -prometheus-client = [ - {file = "prometheus_client-0.16.0-py3-none-any.whl", hash = "sha256:0836af6eb2c8f4fed712b2f279f6c0a8bbab29f9f4aa15276b91c7cb0d1616ab"}, - {file = "prometheus_client-0.16.0.tar.gz", hash = "sha256:a03e35b359f14dd1630898543e2120addfdeacd1a6069c1367ae90fd93ad3f48"}, -] -prompt-toolkit = [ - {file = "prompt_toolkit-3.0.38-py3-none-any.whl", hash = "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"}, - {file = "prompt_toolkit-3.0.38.tar.gz", hash = "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b"}, -] -psutil = [ - {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, - {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"}, - {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"}, - {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"}, - {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"}, - {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"}, - {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, - {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, -] -ptyprocess = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] -pure-eval = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, -] -pycparser = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] -Pygments = [ - {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, - {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, -] -PyJWT = [ - {file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"}, - {file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"}, -] -pyrsistent = [ - {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, - {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, - {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, - {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, - {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, - {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, -] -python-dateutil = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] -python-json-logger = [ - {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, - {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, -] -pytz = [ - {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, - {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, -] -pywin32 = [ - {file = "pywin32-305-cp310-cp310-win32.whl", hash = "sha256:421f6cd86e84bbb696d54563c48014b12a23ef95a14e0bdba526be756d89f116"}, - {file = "pywin32-305-cp310-cp310-win_amd64.whl", hash = "sha256:73e819c6bed89f44ff1d690498c0a811948f73777e5f97c494c152b850fad478"}, - {file = "pywin32-305-cp310-cp310-win_arm64.whl", hash = "sha256:742eb905ce2187133a29365b428e6c3b9001d79accdc30aa8969afba1d8470f4"}, - {file = "pywin32-305-cp311-cp311-win32.whl", hash = "sha256:19ca459cd2e66c0e2cc9a09d589f71d827f26d47fe4a9d09175f6aa0256b51c2"}, - {file = "pywin32-305-cp311-cp311-win_amd64.whl", hash = "sha256:326f42ab4cfff56e77e3e595aeaf6c216712bbdd91e464d167c6434b28d65990"}, - {file = "pywin32-305-cp311-cp311-win_arm64.whl", hash = "sha256:4ecd404b2c6eceaca52f8b2e3e91b2187850a1ad3f8b746d0796a98b4cea04db"}, - {file = "pywin32-305-cp36-cp36m-win32.whl", hash = "sha256:48d8b1659284f3c17b68587af047d110d8c44837736b8932c034091683e05863"}, - {file = "pywin32-305-cp36-cp36m-win_amd64.whl", hash = "sha256:13362cc5aa93c2beaf489c9c9017c793722aeb56d3e5166dadd5ef82da021fe1"}, - {file = "pywin32-305-cp37-cp37m-win32.whl", hash = "sha256:a55db448124d1c1484df22fa8bbcbc45c64da5e6eae74ab095b9ea62e6d00496"}, - {file = "pywin32-305-cp37-cp37m-win_amd64.whl", hash = "sha256:109f98980bfb27e78f4df8a51a8198e10b0f347257d1e265bb1a32993d0c973d"}, - {file = "pywin32-305-cp38-cp38-win32.whl", hash = "sha256:9dd98384da775afa009bc04863426cb30596fd78c6f8e4e2e5bbf4edf8029504"}, - {file = "pywin32-305-cp38-cp38-win_amd64.whl", hash = "sha256:56d7a9c6e1a6835f521788f53b5af7912090674bb84ef5611663ee1595860fc7"}, - {file = "pywin32-305-cp39-cp39-win32.whl", hash = "sha256:9d968c677ac4d5cbdaa62fd3014ab241718e619d8e36ef8e11fb930515a1e918"}, - {file = "pywin32-305-cp39-cp39-win_amd64.whl", hash = "sha256:50768c6b7c3f0b38b7fb14dd4104da93ebced5f1a50dc0e834594bff6fbe1271"}, -] -pywinpty = [ - {file = "pywinpty-2.0.10-cp310-none-win_amd64.whl", hash = "sha256:4c7d06ad10f6e92bc850a467f26d98f4f30e73d2fe5926536308c6ae0566bc16"}, - {file = "pywinpty-2.0.10-cp311-none-win_amd64.whl", hash = "sha256:7ffbd66310b83e42028fc9df7746118978d94fba8c1ebf15a7c1275fdd80b28a"}, - {file = "pywinpty-2.0.10-cp37-none-win_amd64.whl", hash = "sha256:38cb924f2778b5751ef91a75febd114776b3af0ae411bc667be45dd84fc881d3"}, - {file = "pywinpty-2.0.10-cp38-none-win_amd64.whl", hash = "sha256:902d79444b29ad1833b8d5c3c9aabdfd428f4f068504430df18074007c8c0de8"}, - {file = "pywinpty-2.0.10-cp39-none-win_amd64.whl", hash = "sha256:3c46aef80dd50979aff93de199e4a00a8ee033ba7a03cadf0a91fed45f0c39d7"}, - {file = "pywinpty-2.0.10.tar.gz", hash = "sha256:cdbb5694cf8c7242c2ecfaca35c545d31fa5d5814c3d67a4e628f803f680ebea"}, -] -PyYAML = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, -] -pyzmq = [ - {file = "pyzmq-25.0.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:2d05d904f03ddf1e0d83d97341354dfe52244a619b5a1440a5f47a5b3451e84e"}, - {file = "pyzmq-25.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a154ef810d44f9d28868be04641f837374a64e7449df98d9208e76c260c7ef1"}, - {file = "pyzmq-25.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:487305c2a011fdcf3db1f24e8814bb76d23bc4d2f46e145bc80316a59a9aa07d"}, - {file = "pyzmq-25.0.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e7b87638ee30ab13230e37ce5331b3e730b1e0dda30120b9eeec3540ed292c8"}, - {file = "pyzmq-25.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75243e422e85a62f0ab7953dc315452a56b2c6a7e7d1a3c3109ac3cc57ed6b47"}, - {file = "pyzmq-25.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:31e523d067ce44a04e876bed3ff9ea1ff8d1b6636d16e5fcace9d22f8c564369"}, - {file = "pyzmq-25.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8539216173135e9e89f6b1cc392e74e6b935b91e8c76106cf50e7a02ab02efe5"}, - {file = "pyzmq-25.0.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2754fa68da08a854f4816e05160137fa938a2347276471103d31e04bcee5365c"}, - {file = "pyzmq-25.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4a1bc30f0c18444d51e9b0d0dd39e3a4e7c53ee74190bebef238cd58de577ea9"}, - {file = "pyzmq-25.0.0-cp310-cp310-win32.whl", hash = "sha256:01d53958c787cfea34091fcb8ef36003dbb7913b8e9f8f62a0715234ebc98b70"}, - {file = "pyzmq-25.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:58fc3ad5e1cfd2e6d24741fbb1e216b388115d31b0ca6670f894187f280b6ba6"}, - {file = "pyzmq-25.0.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:e4bba04ea779a3d7ef25a821bb63fd0939142c88e7813e5bd9c6265a20c523a2"}, - {file = "pyzmq-25.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:af1fbfb7ad6ac0009ccee33c90a1d303431c7fb594335eb97760988727a37577"}, - {file = "pyzmq-25.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85456f0d8f3268eecd63dede3b99d5bd8d3b306310c37d4c15141111d22baeaf"}, - {file = "pyzmq-25.0.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0645b5a2d2a06fd8eb738018490c514907f7488bf9359c6ee9d92f62e844b76f"}, - {file = "pyzmq-25.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f72ea279b2941a5203e935a4588b9ba8a48aeb9a926d9dfa1986278bd362cb8"}, - {file = "pyzmq-25.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:4e295f7928a31ae0f657e848c5045ba6d693fe8921205f408ca3804b1b236968"}, - {file = "pyzmq-25.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ac97e7d647d5519bcef48dd8d3d331f72975afa5c4496c95f6e854686f45e2d9"}, - {file = "pyzmq-25.0.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:656281d496aaf9ca4fd4cea84e6d893e3361057c4707bd38618f7e811759103c"}, - {file = "pyzmq-25.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f6116991568aac48b94d6d8aaed6157d407942ea385335a6ed313692777fb9d"}, - {file = "pyzmq-25.0.0-cp311-cp311-win32.whl", hash = "sha256:0282bba9aee6e0346aa27d6c69b5f7df72b5a964c91958fc9e0c62dcae5fdcdc"}, - {file = "pyzmq-25.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:526f884a27e8bba62fe1f4e07c62be2cfe492b6d432a8fdc4210397f8cf15331"}, - {file = "pyzmq-25.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ccb3e1a863222afdbda42b7ca8ac8569959593d7abd44f5a709177d6fa27d266"}, - {file = "pyzmq-25.0.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4046d03100aca266e70d54a35694cb35d6654cfbef633e848b3c4a8d64b9d187"}, - {file = "pyzmq-25.0.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3100dddcada66ec5940ed6391ebf9d003cc3ede3d320748b2737553019f58230"}, - {file = "pyzmq-25.0.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7877264aa851c19404b1bb9dbe6eed21ea0c13698be1eda3784aab3036d1c861"}, - {file = "pyzmq-25.0.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5049e75cc99db65754a3da5f079230fb8889230cf09462ec972d884d1704a3ed"}, - {file = "pyzmq-25.0.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:81f99fb1224d36eb91557afec8cdc2264e856f3464500b55749020ce4c848ef2"}, - {file = "pyzmq-25.0.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a1cd4a95f176cdc0ee0a82d49d5830f13ae6015d89decbf834c273bc33eeb3d3"}, - {file = "pyzmq-25.0.0-cp36-cp36m-win32.whl", hash = "sha256:926236ca003aec70574754f39703528947211a406f5c6c8b3e50eca04a9e87fc"}, - {file = "pyzmq-25.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:94f0a7289d0f5c80807c37ebb404205e7deb737e8763eb176f4770839ee2a287"}, - {file = "pyzmq-25.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f3f96d452e9580cb961ece2e5a788e64abaecb1232a80e61deffb28e105ff84a"}, - {file = "pyzmq-25.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:930e6ad4f2eaac31a3d0c2130619d25db754b267487ebc186c6ad18af2a74018"}, - {file = "pyzmq-25.0.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e1081d7030a1229c8ff90120346fb7599b54f552e98fcea5170544e7c6725aab"}, - {file = "pyzmq-25.0.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:531866c491aee5a1e967c286cfa470dffac1e2a203b1afda52d62b58782651e9"}, - {file = "pyzmq-25.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fc7c1421c5b1c916acf3128bf3cc7ea7f5018b58c69a6866d70c14190e600ce9"}, - {file = "pyzmq-25.0.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9a2d5e419bd39a1edb6cdd326d831f0120ddb9b1ff397e7d73541bf393294973"}, - {file = "pyzmq-25.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:183e18742be3621acf8908903f689ec520aee3f08449bfd29f583010ca33022b"}, - {file = "pyzmq-25.0.0-cp37-cp37m-win32.whl", hash = "sha256:02f5cb60a7da1edd5591a15efa654ffe2303297a41e1b40c3c8942f8f11fc17c"}, - {file = "pyzmq-25.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:cac602e02341eaaf4edfd3e29bd3fdef672e61d4e6dfe5c1d065172aee00acee"}, - {file = "pyzmq-25.0.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:e14df47c1265356715d3d66e90282a645ebc077b70b3806cf47efcb7d1d630cb"}, - {file = "pyzmq-25.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:293a7c2128690f496057f1f1eb6074f8746058d13588389981089ec45d8fdc77"}, - {file = "pyzmq-25.0.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:731b208bc9412deeb553c9519dca47136b5a01ca66667cafd8733211941b17e4"}, - {file = "pyzmq-25.0.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b055a1cddf8035966ad13aa51edae5dc8f1bba0b5d5e06f7a843d8b83dc9b66b"}, - {file = "pyzmq-25.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17e1cb97d573ea84d7cd97188b42ca6f611ab3ee600f6a75041294ede58e3d20"}, - {file = "pyzmq-25.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:60ecbfe7669d3808ffa8a7dd1487d6eb8a4015b07235e3b723d4b2a2d4de7203"}, - {file = "pyzmq-25.0.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4c25c95416133942280faaf068d0fddfd642b927fb28aaf4ab201a738e597c1e"}, - {file = "pyzmq-25.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:be05504af0619d1cffa500af1e0ede69fb683f301003851f5993b5247cc2c576"}, - {file = "pyzmq-25.0.0-cp38-cp38-win32.whl", hash = "sha256:6bf3842af37af43fa953e96074ebbb5315f6a297198f805d019d788a1021dbc8"}, - {file = "pyzmq-25.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:b90bb8dfbbd138558f1f284fecfe328f7653616ff9a972433a00711d9475d1a9"}, - {file = "pyzmq-25.0.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:62b9e80890c0d2408eb42d5d7e1fc62a5ce71be3288684788f74cf3e59ffd6e2"}, - {file = "pyzmq-25.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:484c2c4ee02c1edc07039f42130bd16e804b1fe81c4f428e0042e03967f40c20"}, - {file = "pyzmq-25.0.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9ca6db34b26c4d3e9b0728841ec9aa39484eee272caa97972ec8c8e231b20c7e"}, - {file = "pyzmq-25.0.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:610d2d112acd4e5501fac31010064a6c6efd716ceb968e443cae0059eb7b86de"}, - {file = "pyzmq-25.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3594c0ff604e685d7e907860b61d0e10e46c74a9ffca168f6e9e50ea934ee440"}, - {file = "pyzmq-25.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c21a5f4e54a807df5afdef52b6d24ec1580153a6bcf0607f70a6e1d9fa74c5c3"}, - {file = "pyzmq-25.0.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4725412e27612f0d7d7c2f794d89807ad0227c2fc01dd6146b39ada49c748ef9"}, - {file = "pyzmq-25.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4d3d604fe0a67afd1aff906e54da557a5203368a99dcc50a70eef374f1d2abef"}, - {file = "pyzmq-25.0.0-cp39-cp39-win32.whl", hash = "sha256:3670e8c5644768f214a3b598fe46378a4a6f096d5fb82a67dfd3440028460565"}, - {file = "pyzmq-25.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:e99629a976809fe102ef73e856cf4b2660acd82a412a51e80ba2215e523dfd0a"}, - {file = "pyzmq-25.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:66509c48f7446b640eeae24b60c9c1461799a27b1b0754e438582e36b5af3315"}, - {file = "pyzmq-25.0.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9c464cc508177c09a5a6122b67f978f20e2954a21362bf095a0da4647e3e908"}, - {file = "pyzmq-25.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:28bcb2e66224a7ac2843eb632e4109d6b161479e7a2baf24e37210461485b4f1"}, - {file = "pyzmq-25.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0e7ef9ac807db50b4eb6f534c5dcc22f998f5dae920cc28873d2c1d080a4fc9"}, - {file = "pyzmq-25.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:5050f5c50b58a6e38ccaf9263a356f74ef1040f5ca4030225d1cb1a858c5b7b6"}, - {file = "pyzmq-25.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2a73af6504e0d2805e926abf136ebf536735a13c22f709be7113c2ec65b4bec3"}, - {file = "pyzmq-25.0.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0e8d00228db627ddd1b418c7afd81820b38575f237128c9650365f2dd6ac3443"}, - {file = "pyzmq-25.0.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5605621f2181f20b71f13f698944deb26a0a71af4aaf435b34dd90146092d530"}, - {file = "pyzmq-25.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6136bfb0e5a9cf8c60c6ac763eb21f82940a77e6758ea53516c8c7074f4ff948"}, - {file = "pyzmq-25.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0a90b2480a26aef7c13cff18703ba8d68e181facb40f78873df79e6d42c1facc"}, - {file = "pyzmq-25.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00c94fd4c9dd3c95aace0c629a7fa713627a5c80c1819326b642adf6c4b8e2a2"}, - {file = "pyzmq-25.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20638121b0bdc80777ce0ec8c1f14f1ffec0697a1f88f0b564fa4a23078791c4"}, - {file = "pyzmq-25.0.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6f75b4b8574f3a8a0d6b4b52606fc75b82cb4391471be48ab0b8677c82f9ed4"}, - {file = "pyzmq-25.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cbb885f347eba7ab7681c450dee5b14aed9f153eec224ec0c3f299273d9241f"}, - {file = "pyzmq-25.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c48f257da280b3be6c94e05bd575eddb1373419dbb1a72c3ce64e88f29d1cd6d"}, - {file = "pyzmq-25.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:866eabf7c1315ef2e93e34230db7cbf672e0d7c626b37c11f7e870c8612c3dcc"}, - {file = "pyzmq-25.0.0.tar.gz", hash = "sha256:f330a1a2c7f89fd4b0aa4dcb7bf50243bf1c8da9a2f1efc31daf57a2046b31f2"}, -] -qtconsole = [ - {file = "qtconsole-5.4.1-py3-none-any.whl", hash = "sha256:bae8c7e10170cdcdcaf7e6d53ad7d6a7412249b9b8310a0eaa6b6f3b260f32db"}, - {file = "qtconsole-5.4.1.tar.gz", hash = "sha256:f67a03f40f722e13261791280f73068dbaf9dafcc335cbba644ccc8f892640e5"}, -] -QtPy = [ - {file = "QtPy-2.3.0-py3-none-any.whl", hash = "sha256:8d6d544fc20facd27360ea189592e6135c614785f0dec0b4f083289de6beb408"}, - {file = "QtPy-2.3.0.tar.gz", hash = "sha256:0603c9c83ccc035a4717a12908bf6bc6cb22509827ea2ec0e94c2da7c9ed57c5"}, -] -requests = [ - {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, - {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, -] -rfc3339-validator = [ - {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, - {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, -] -rfc3986 = [ - {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, - {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, -] -rfc3986-validator = [ - {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, - {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, -] -s3transfer = [ - {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, - {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, -] -sammy = [ - {file = "sammy-0.4.3-py2.py3-none-any.whl", hash = "sha256:f7fc12fae537ba22830a9de761686fac439e23c67339807f2f3d026d019bae9e"}, - {file = "sammy-0.4.3.tar.gz", hash = "sha256:dff98017d59c8860349cb07bd8af2eb843ccfc7a78d11ed82599b306ef4ce7ca"}, -] -Send2Trash = [ - {file = "Send2Trash-1.8.0-py3-none-any.whl", hash = "sha256:f20eaadfdb517eaca5ce077640cb261c7d2698385a6a0f072a4a5447fd49fa08"}, - {file = "Send2Trash-1.8.0.tar.gz", hash = "sha256:d2c24762fd3759860a0aff155e45871447ea58d2be6bdd39b5c8f966a0c99c2d"}, -] -setuptools = [ - {file = "setuptools-67.6.0-py3-none-any.whl", hash = "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"}, - {file = "setuptools-67.6.0.tar.gz", hash = "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -sniffio = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, -] -soupsieve = [ - {file = "soupsieve-2.4-py3-none-any.whl", hash = "sha256:49e5368c2cda80ee7e84da9dbe3e110b70a4575f196efb74e51b94549d921955"}, - {file = "soupsieve-2.4.tar.gz", hash = "sha256:e28dba9ca6c7c00173e34e4ba57448f0688bb681b7c5e8bf4971daafc093d69a"}, -] -stack-data = [ - {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, - {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, -] -stripe = [ - {file = "stripe-2.76.0-py2.py3-none-any.whl", hash = "sha256:756bf6c1206f438d1fa23bb90cdf1233c9383478f854f2720a8a3e1eaf1f715b"}, - {file = "stripe-2.76.0.tar.gz", hash = "sha256:fd3fc6935c3b6189967191607b6f38ebe490005a590b4d0d43fbe3aba45deca8"}, -] -swaggyp = [ - {file = "swaggyp-0.3.0-py3-none-any.whl", hash = "sha256:d678daa026f374c09690bfae93670e8824c616fe7186375cca531dd9530d2c41"}, - {file = "swaggyp-0.3.0.tar.gz", hash = "sha256:2af8b74e014ba83287d4b2fcbe355636ca8ec09ca70bffba4290b3f833c8ad46"}, -] -terminado = [ - {file = "terminado-0.17.1-py3-none-any.whl", hash = "sha256:8650d44334eba354dd591129ca3124a6ba42c3d5b70df5051b6921d506fdaeae"}, - {file = "terminado-0.17.1.tar.gz", hash = "sha256:6ccbbcd3a4f8a25a5ec04991f39a0b8db52dfcd487ea0e578d977e6752380333"}, -] -tinycss2 = [ - {file = "tinycss2-1.2.1-py3-none-any.whl", hash = "sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847"}, - {file = "tinycss2-1.2.1.tar.gz", hash = "sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627"}, -] -tornado = [ - {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"}, - {file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"}, - {file = "tornado-6.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba09ef14ca9893954244fd872798b4ccb2367c165946ce2dd7376aebdde8e3ac"}, - {file = "tornado-6.2-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8150f721c101abdef99073bf66d3903e292d851bee51910839831caba341a75"}, - {file = "tornado-6.2-cp37-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a2f5999215a3a06a4fc218026cd84c61b8b2b40ac5296a6db1f1451ef04c1e"}, - {file = "tornado-6.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5f8c52d219d4995388119af7ccaa0bcec289535747620116a58d830e7c25d8a8"}, - {file = "tornado-6.2-cp37-abi3-musllinux_1_1_i686.whl", hash = "sha256:6fdfabffd8dfcb6cf887428849d30cf19a3ea34c2c248461e1f7d718ad30b66b"}, - {file = "tornado-6.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:1d54d13ab8414ed44de07efecb97d4ef7c39f7438cf5e976ccd356bebb1b5fca"}, - {file = "tornado-6.2-cp37-abi3-win32.whl", hash = "sha256:5c87076709343557ef8032934ce5f637dbb552efa7b21d08e89ae7619ed0eb23"}, - {file = "tornado-6.2-cp37-abi3-win_amd64.whl", hash = "sha256:e5f923aa6a47e133d1cf87d60700889d7eae68988704e20c75fb2d65677a8e4b"}, - {file = "tornado-6.2.tar.gz", hash = "sha256:9b630419bde84ec666bfd7ea0a4cb2a8a651c2d5cccdbdd1972a0c859dfc3c13"}, -] -traitlets = [ - {file = "traitlets-5.9.0-py3-none-any.whl", hash = "sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8"}, - {file = "traitlets-5.9.0.tar.gz", hash = "sha256:f6cde21a9c68cf756af02035f72d5a723bf607e862e7be33ece505abf4a3bad9"}, -] -uri-template = [ - {file = "uri_template-1.2.0-py3-none-any.whl", hash = "sha256:f1699c77b73b925cf4937eae31ab282a86dc885c333f2e942513f08f691fc7db"}, - {file = "uri_template-1.2.0.tar.gz", hash = "sha256:934e4d09d108b70eb8a24410af8615294d09d279ce0e7cbcdaef1bd21f932b06"}, -] -urllib3 = [ - {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, - {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, -] -valley = [ - {file = "valley-1.5.8-py3-none-any.whl", hash = "sha256:c30c0bdb30e5be561dd4332281fc53315c4c34f174d268d8cc7496a6f47ee314"}, - {file = "valley-1.5.8.tar.gz", hash = "sha256:88342fa4af854b8e9e426776995c9c2c690b432ea35c0c9529fa0abb62e553e3"}, -] -wcwidth = [ - {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, - {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, -] -webcolors = [ - {file = "webcolors-1.12-py3-none-any.whl", hash = "sha256:d98743d81d498a2d3eaf165196e65481f0d2ea85281463d856b1e51b09f62dce"}, - {file = "webcolors-1.12.tar.gz", hash = "sha256:16d043d3a08fd6a1b1b7e3e9e62640d09790dce80d2bdd4792a175b35fe794a9"}, -] -webencodings = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] -websocket-client = [ - {file = "websocket-client-1.5.1.tar.gz", hash = "sha256:3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40"}, - {file = "websocket_client-1.5.1-py3-none-any.whl", hash = "sha256:cdf5877568b7e83aa7cf2244ab56a3213de587bbe0ce9d8b9600fc77b455d89e"}, -] -Werkzeug = [ - {file = "Werkzeug-2.1.2-py3-none-any.whl", hash = "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255"}, - {file = "Werkzeug-2.1.2.tar.gz", hash = "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6"}, -] -wheel = [ - {file = "wheel-0.38.4-py3-none-any.whl", hash = "sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8"}, - {file = "wheel-0.38.4.tar.gz", hash = "sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac"}, -] -widgetsnbextension = [ - {file = "widgetsnbextension-4.0.5-py3-none-any.whl", hash = "sha256:eaaaf434fb9b08bd197b2a14ffe45ddb5ac3897593d43c69287091e5f3147bf7"}, - {file = "widgetsnbextension-4.0.5.tar.gz", hash = "sha256:003f716d930d385be3fd9de42dd9bf008e30053f73bddde235d14fbeaeff19af"}, -] -zipp = [ - {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, - {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, -] +anyio = [] +appnope = [] +argon2-cffi = [] +argon2-cffi-bindings = [] +arrow = [] +asttokens = [] +astunparse = [] +attrs = [] +backcall = [] +beautifulsoup4 = [] +bleach = [] +boto3 = [] +botocore = [] +cachetools = [] +certifi = [] +cffi = [] +charset-normalizer = [] +click = [] +colorama = [] +comm = [] +coverage = [] +cryptography = [] +debugpy = [] +decorator = [] +defusedxml = [] +envs = [] +executing = [] +fastjsonschema = [] +faunadb = [] +formy = [] +fqdn = [] +future = [] +graphql-py = [] +h11 = [] +h2 = [] +hpack = [] +httpcore = [] +httpx = [] +hyperframe = [] +idna = [] +importlib-metadata = [] +importlib-resources = [] +ipykernel = [] +ipython = [] +ipython-genutils = [] +ipywidgets = [] +iso8601 = [] +isoduration = [] +jedi = [] +jinja2 = [] +jmespath = [] +jsonpointer = [] +jsonschema = [] +jupyter = [] +jupyter-client = [] +jupyter-console = [] +jupyter-core = [] +jupyter-events = [] +jupyter-server = [] +jupyter-server-terminals = [] +jupyterlab-pygments = [] +jupyterlab-widgets = [] +markupsafe = [] +matplotlib-inline = [] +mistune = [] +nbclassic = [] +nbclient = [] +nbconvert = [] +nbformat = [] +nest-asyncio = [] +notebook = [] +notebook-shim = [] +openapi-schema-validator = [] +openapi-spec-validator = [] +packaging = [] +pandocfilters = [] +parso = [] +pdoc = [] +pexpect = [] +pickleshare = [] +pkgutil-resolve-name = [] +platformdirs = [] +ply = [] +prometheus-client = [] +prompt-toolkit = [] +psutil = [] +ptyprocess = [] +pure-eval = [] +pycparser = [] +pygments = [] +pyjwt = [] +pyrsistent = [] +python-dateutil = [] +python-json-logger = [] +pytz = [] +pywin32 = [] +pywinpty = [] +pyyaml = [] +pyzmq = [] +qtconsole = [] +qtpy = [] +requests = [] +rfc3339-validator = [] +rfc3986 = [] +rfc3986-validator = [] +s3transfer = [] +sammy = [] +send2trash = [] +six = [] +sniffio = [] +soupsieve = [] +stack-data = [] +stripe = [] +swaggyp = [] +terminado = [] +tinycss2 = [] +tornado = [] +traitlets = [] +typing-extensions = [] +uri-template = [] +urllib3 = [] +valley = [] +wcwidth = [] +webcolors = [] +webencodings = [] +websocket-client = [] +werkzeug = [] +widgetsnbextension = [] +zipp = [] From d03636949128b390170d2e8b3cfa4fed4e46fece Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 3 Apr 2023 09:44:32 +0800 Subject: [PATCH 210/214] reverted docker-compose file --- docker-compose.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index ff462a4..daadf03 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -18,7 +18,7 @@ services: - ./:/code/ env_file: .env working_dir: /code/ - command: jupyter notebook --port=8888 --ip=0.0.0.0 --allow-root + command: /root/.cache/pypoetry/virtualenvs/pfunk-MATOk_fk-py3.9/bin/jupyter notebook --port=8888 --ip=0.0.0.0 --allow-root fauna: restart: always From 0c05a496e970c05bba314c4a122c8693dc27cf1c Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Mon, 3 Apr 2023 10:02:02 +0800 Subject: [PATCH 211/214] Updated tests to use local html file instead of tempfile --- pfunk/testcase.py | 2 +- pfunk/tests/test_email.py | 22 +++++++++++----------- pfunk/tests/test_web_json_stripe.py | 15 +++++++-------- 3 files changed, 19 insertions(+), 20 deletions(-) diff --git a/pfunk/testcase.py b/pfunk/testcase.py index a07ef34..d438807 100644 --- a/pfunk/testcase.py +++ b/pfunk/testcase.py @@ -14,7 +14,7 @@ class PFunkTestCase(unittest.TestCase): def setUp(self) -> None: os.environ['PFUNK_TEST_MODE'] = 'True' - os.environ['TEMPLATE_ROOT_DIR'] = '/tmp' + os.environ['TEMPLATE_ROOT_DIR'] = '/' self.client = FaunaClient(secret='secret') self.db_name = str(uuid.uuid4()) self.client.query( diff --git a/pfunk/tests/test_email.py b/pfunk/tests/test_email.py index eb93ceb..b885441 100644 --- a/pfunk/tests/test_email.py +++ b/pfunk/tests/test_email.py @@ -15,6 +15,7 @@ class TestEmailBackend(APITestCase): collections = [User, Group, UserGroups] def setUp(self) -> None: + # NOTE: env var TEMPLATE_ROOT_DIR should be set to "/" super(TestEmailBackend, self).setUp() self.group = Group.create(name='Power Users', slug='power-users') self.user = User.create(username='test', email='tlasso@example.org', first_name='Ted', @@ -23,8 +24,8 @@ def setUp(self) -> None: self.backend = EmailBackend() def test_get_template(self): - with tempfile.NamedTemporaryFile(suffix='.html') as tmp: - template = self.backend.get_template(tmp.name.split("/")[-1]) + template = self.backend.get_template( + '/code/pfunk/tests/templates/email/email_template.html') # test jinja render if no exceptions template.render(unittest_value="random value") self.assertTrue(True) # if there are no exceptions, then it is a pass @@ -59,15 +60,14 @@ def setUp(self) -> None: @mock.patch('boto3.client') def test_send_email(self, mocked): - with tempfile.NamedTemporaryFile(suffix='.html') as tmp: - res = self.SES.send_email( - subject="test", - to_emails=["testemail@email.com"], - html_template=tmp.name.split("/")[-1], - from_email="testFromEmail@email.com", - cc_emails=["testCCemail@email.com"], - bcc_emails=["testBCCemail@email.com"], - ) + res = self.SES.send_email( + subject="test", + to_emails=["testemail@email.com"], + html_template='code/pfunk/tests/templates/email/email_template.html', + from_email="testFromEmail@email.com", + cc_emails=["testCCemail@email.com"], + bcc_emails=["testBCCemail@email.com"], + ) # if there are no exceptions, then it's a passing test self.assertTrue(True) diff --git a/pfunk/tests/test_web_json_stripe.py b/pfunk/tests/test_web_json_stripe.py index 59b484b..1b7dcc5 100644 --- a/pfunk/tests/test_web_json_stripe.py +++ b/pfunk/tests/test_web_json_stripe.py @@ -231,14 +231,13 @@ def test_check_ip(self): @mock.patch('boto3.client') def test_send_html_email(self, mocked): - with tempfile.NamedTemporaryFile(suffix='.html') as tmp: - # Requires to have `TEMPLATE_ROOT_DIR=/tmp` in your .env file - res = self.view.send_html_email( - subject='Test Subject', - from_email='unittesting@email.com', - to_email_list=['recipient@email.com'], - template_name=tmp.name.split("/")[-1] - ) + # Requires to have `TEMPLATE_ROOT_DIR=/` in your .env file + res = self.view.send_html_email( + subject='Test Subject', + from_email='unittesting@email.com', + to_email_list=['recipient@email.com'], + template_name='/code/pfunk/tests/templates/email/email_template.html' + ) self.assertTrue(True) # if there are no exceptions, then it passed @mock.patch('stripe.Webhook') From c9d4e1d700177b840c7091bddd6cff13bf151c58 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Wed, 19 Apr 2023 14:50:35 +0800 Subject: [PATCH 212/214] fixed poetry files --- poetry.lock | 4 +--- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 317771a..72aad27 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1434,8 +1434,6 @@ python-versions = "*" nativelib = ["pyobjc-framework-cocoa", "pywin32"] objc = ["pyobjc-framework-cocoa"] win32 = ["pywin32"] -objc = ["pyobjc-framework-cocoa"] -nativelib = ["pywin32", "pyobjc-framework-cocoa"] [[package]] name = "six" @@ -1804,4 +1802,4 @@ webencodings = [] websocket-client = [] werkzeug = [] widgetsnbextension = [] -zipp = [] +zipp = [] \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 4c5e00d..4594977 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,4 +37,4 @@ pdoc = "^7.2.0" requires = ["poetry>=0.12"] [tool.poetry.scripts] -pfunk = 'pfunk.cli:pfunk' +pfunk = 'pfunk.cli:pfunk' \ No newline at end of file From 49336ec0a9eb3fb5968c685b494a05345fee3c57 Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 25 Apr 2023 14:43:07 +0800 Subject: [PATCH 213/214] changed order of docker commands in gh actions --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 2a2f56e..de67f70 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -39,7 +39,6 @@ jobs: echo PROJECT_NAME=${{ secrets.PROJECT_NAME }} >> .env echo KEY_MODULE=${{ secrets.KEY_MODULE }} >> .env cat .env - - run: docker-compose pull # In this step, this action saves a list of existing images, # the cache is created without them in the post run. @@ -50,6 +49,7 @@ jobs: - run: docker volume create --name=pfunk-fauna-data - run: docker-compose build + - run: docker-compose pull # Runs a single command using the runners shell - name: Run Unit Tests From a879c2491427c884835d5a6c831e56065038a73d Mon Sep 17 00:00:00 2001 From: Juliuz Christian Llanillo Date: Tue, 25 Apr 2023 15:07:55 +0800 Subject: [PATCH 214/214] reverted gh actions. added dependency condition on compose --- .github/workflows/main.yml | 2 +- docker-compose.yaml | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index de67f70..2a2f56e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -39,6 +39,7 @@ jobs: echo PROJECT_NAME=${{ secrets.PROJECT_NAME }} >> .env echo KEY_MODULE=${{ secrets.KEY_MODULE }} >> .env cat .env + - run: docker-compose pull # In this step, this action saves a list of existing images, # the cache is created without them in the post run. @@ -49,7 +50,6 @@ jobs: - run: docker volume create --name=pfunk-fauna-data - run: docker-compose build - - run: docker-compose pull # Runs a single command using the runners shell - name: Run Unit Tests diff --git a/docker-compose.yaml b/docker-compose.yaml index daadf03..66e29cb 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -10,7 +10,8 @@ services: - "8010" - "3434" depends_on: - - fauna + fauna: + condition: service_healthy ports: - 8010:8888 - 3434:3434