diff --git a/README.md b/README.md index 4b13458..3ca77b5 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,11 @@ ![alt text](https://s3.amazonaws.com/capless/images/kev-small.png "KEV - Keys, Extra Stuff, and Values") - # kev K.E.V. (Keys, Extra Stuff, and Values) is a Python ORM for key-value stores and document databases based on [**Valley**](https://www.github.com/capless/valley). Currently supported backends are Redis, S3, DynamoDB and a S3/Redis hybrid backend. +## PROJECT UPDATE +The DynamoDB and Cloudant backends were moved to [DocB](https://github.com/capless/docb). The Redis, S3, and S3/Redis backend will continue supported here in Kev. We felt the need to split up the key-value stores and document databases. + [![Build Status](https://travis-ci.org/capless/kev.svg?branch=master)](https://travis-ci.org/capless/kev) ## Python Versions @@ -49,12 +51,6 @@ kev_handler = KevHandler({ 'host': 'your-redis-host.com', 'port': 6379, } - }, - 'dynamodb': { - 'backend': 'kev.backends.dynamodb.db.DynamoDB', - 'connection': { - 'table': 'your-dynamodb-table', - } } }) ``` @@ -173,93 +169,6 @@ Prefix filters currently only work with the S3 backend. Use wildcard filters wit >>>TestDocument.objects().filter({'state':'N'}) [] ``` -### DynamoDB setup -#### Create a table -* **Table name** should be between 3 and 255 characters long. (A-Z,a-z,0-9,_,-,.) -* **Primary key** (partition key) should be equal to `_id` - -#### Filter Documents -If you want to make `filter()` queries, you should create an index for every attribute that you want to filter by. -* **Primary key** should be equal to attribute name. -* **Index name** should be equal to attribute name postfixed by *"-index"*. (It will be filled by AWS automatically). -For example, for attribute *"city"*: *Primary key* = *"city"* and index name = *"city-index"*. -* **Index name** can be directly specified by `index_name` argument: -```python - name = CharProperty(required=True,unique=True,min_length=5,max_length=20,index_name='name_index') -``` -- **IMPORTANT: In other words, if your indexed attribute is named city, then your index name should be city-index, -if you didn't specify `index_name` argument.** -* **Projected attributes**: *All*. - -### Use DynamoDB locally -#### Run DynamoDB -* with persistent storage `docker run -d -p 8000:8000 -v /tmp/data:/data/ dwmkerr/dynamodb -dbPath /data/` - -#### Configuration -**Example:** loading.py -```python -from kev.loading import KevHandler - - -kev_handler = KevHandler({ - 'dynamodb': { - 'backend': 'kev.backends.dynamodb.db.DynamoDB', - 'connection': { - 'table': 'your-dynamodb-table', - 'endpoint_url': 'http://127.0.0.1:8000' - } - } -}) -``` - -#### Testing -##### Run DynamoDB -* in memory (best performance) `docker run -d -p 8000:8000 dwmkerr/dynamodb -inMemory` - -##### Create a table for testing. - -```python -import boto3 - - -table_wcu = 2000 -table_rcu = 2000 -index_wcu = 3000 -index_rcu = 2000 -table_name = 'localtable' - -dynamodb = boto3.resource('dynamodb', endpoint_url="http://127.0.0.1:8000") -dynamodb.create_table(TableName=table_name, KeySchema=[{'AttributeName': '_id', 'KeyType': 'HASH'}], - ProvisionedThroughput={'ReadCapacityUnits': table_rcu, - 'WriteCapacityUnits': table_wcu}, - AttributeDefinitions=[{'AttributeName': '_id', 'AttributeType': 'S'}, - {u'AttributeName': u'city', u'AttributeType': u'S'}, - {u'AttributeName': u'email', u'AttributeType': u'S'}, - {u'AttributeName': u'name', u'AttributeType': u'S'}, - {u'AttributeName': u'slug', u'AttributeType': u'S'}], - GlobalSecondaryIndexes=[ - {'IndexName': 'city-index', 'Projection': {'ProjectionType': 'ALL'}, - 'ProvisionedThroughput': {'WriteCapacityUnits': index_wcu, - 'ReadCapacityUnits': index_rcu}, - 'KeySchema': [{'KeyType': 'HASH', 'AttributeName': 'city'}]}, - {'IndexName': 'name-index', 'Projection': {'ProjectionType': 'ALL'}, - 'ProvisionedThroughput': {'WriteCapacityUnits': index_wcu, - 'ReadCapacityUnits': index_rcu}, - 'KeySchema': [{'KeyType': 'HASH', 'AttributeName': 'name'}]}, - {'IndexName': 'slug-index', 'Projection': {'ProjectionType': 'ALL'}, - 'ProvisionedThroughput': {'WriteCapacityUnits': index_wcu, - 'ReadCapacityUnits': index_rcu}, - 'KeySchema': [{'KeyType': 'HASH', 'AttributeName': 'slug'}]}, - {'IndexName': 'email-index', 'Projection': {'ProjectionType': 'ALL'}, - 'ProvisionedThroughput': {'WriteCapacityUnits': index_wcu, - 'ReadCapacityUnits': index_rcu}, - 'KeySchema': [{'KeyType': 'HASH', 'AttributeName': 'email'}]}]) -``` -##### Setup environment variables. -```bash -export DYNAMO_TABLE_TEST='localtable' -export DYNAMO_ENDPOINT_URL_TEST='http://127.0.0.1:8000' -``` ### Backup and Restore diff --git a/kev/backends/s3/db.py b/kev/backends/s3/db.py index 586826f..fb62df9 100644 --- a/kev/backends/s3/db.py +++ b/kev/backends/s3/db.py @@ -15,16 +15,26 @@ class S3DB(DocDB): '[-\W\w\s]+)/(?P[-\w]+):id:' \ '(?P[-\w]+):(?P[-\w]+)$' session_kwargs = ['aws_secret_access_key', 'aws_access_key_id', - 'endpoint_url'] + 'region_name', 'endpoint_url', + 'signature_version'] def __init__(self,**kwargs): # - session_kwargs = {k: v for k, v in kwargs.items() if k in - self.session_kwargs} - if len(session_kwargs.keys()) > 0: - boto3.Session(**session_kwargs) - - self._db = boto3.resource('s3') + session_kwargs = { + k: v for k, v in kwargs.items() if k in self.session_kwargs + } + if 'signature_version' in session_kwargs.keys(): + signature_version = session_kwargs['signature_version'] + del(session_kwargs['signature_version']) + self._db = boto3.resource( + 's3', + config=boto3.session.Config( + signature_version=signature_version + ), + **session_kwargs + ) + else: + self._db = boto3.resource('s3', **session_kwargs) self.bucket = kwargs['bucket'] self._indexer = self._db.Bucket(self.bucket) diff --git a/setup.py b/setup.py index 90ff614..57b8987 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,9 @@ from setuptools import setup, find_packages -from pip.req import parse_requirements + +try: # Fix for pip >= 10 + from pip._internal.req import parse_requirements +except ImportError: # for pip <= 9.0.3 + from pip.req import parse_requirements install_reqs = parse_requirements('requirements.txt', session=False)