Skip to content

Commit cf490d3

Browse files
authored
prepare 6.7.0 release (#103)
1 parent a745cac commit cf490d3

18 files changed

+1330
-174
lines changed

.circleci/config.yml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,28 +40,34 @@ jobs:
4040
docker:
4141
- image: circleci/python:2.7-jessie
4242
- image: redis
43+
- image: amazon/dynamodb-local
4344
test-3.3:
4445
<<: *test-template
4546
docker:
4647
- image: circleci/python:3.3-jessie
4748
- image: redis
49+
- image: amazon/dynamodb-local
4850
test-3.4:
4951
<<: *test-template
5052
docker:
5153
- image: circleci/python:3.4-jessie
5254
- image: redis
55+
- image: amazon/dynamodb-local
5356
test-3.5:
5457
<<: *test-template
5558
docker:
5659
- image: circleci/python:3.5-jessie
5760
- image: redis
61+
- image: amazon/dynamodb-local
5862
test-3.6:
5963
<<: *test-template
6064
docker:
6165
- image: circleci/python:3.6-jessie
6266
- image: redis
67+
- image: amazon/dynamodb-local
6368
test-3.7:
6469
<<: *test-template
6570
docker:
6671
- image: circleci/python:3.7-stretch
6772
- image: redis
73+
- image: amazon/dynamodb-local

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ nosetests.xml
4444
coverage.xml
4545
*,cover
4646
.hypothesis/
47+
.pytest_cache
4748

4849
# Translations
4950
*.mo

README.md

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,6 @@ Or it can be set from within python:
5252
os.environ["https_proxy"] = "https://web-proxy.domain.com:8080"
5353
```
5454

55-
5655
If your proxy requires authentication then you can prefix the URN with your login information:
5756
```
5857
export HTTPS_PROXY=http://user:[email protected]:8080
@@ -75,12 +74,19 @@ Your first feature flag
7574
# the code to run if the feature is off
7675

7776
Supported Python versions
78-
----------
77+
-------------------------
78+
7979
The SDK is tested with the most recent patch releases of Python 2.7, 3.3, 3.4, 3.5, and 3.6. Python 2.6 is no longer supported.
8080

81+
Database integrations
82+
---------------------
83+
84+
Feature flag data can be kept in a persistent store using Redis or DynamoDB. These adapters are implemented in the `DynamoDB` and `Redis` classes in `ldclient.integrations`; to use them, call the `new_feature_store` method in the appropriate class, and put the returned object in the `feature_store` property of your client configuration. See [`ldclient.integrations`](https://github.com/launchdarkly/python-client-private/blob/master/ldclient/integrations.py) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information.
85+
8186
Using flag data from a file
8287
---------------------------
83-
For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.py`](https://github.com/launchdarkly/python-client/blob/master/ldclient/file_data_source.py) for more details.
88+
89+
For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.py`](https://github.com/launchdarkly/python-client/blob/master/ldclient/file_data_source.py) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/reading-flags-from-a-file) for more details.
8490

8591
Learn more
8692
-----------
@@ -100,7 +106,7 @@ Contributing
100106
See [CONTRIBUTING](CONTRIBUTING.md) for more information.
101107

102108
About LaunchDarkly
103-
-----------
109+
------------------
104110

105111
* LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can:
106112
* Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases.

dynamodb-requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
boto3>=1.9.71

ldclient/client.py

Lines changed: 43 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,10 @@
1010
from ldclient.config import Config as Config
1111
from ldclient.event_processor import NullEventProcessor
1212
from ldclient.feature_requester import FeatureRequesterImpl
13+
from ldclient.feature_store import _FeatureStoreDataSetSorter
1314
from ldclient.flag import EvaluationDetail, evaluate, error_reason
1415
from ldclient.flags_state import FeatureFlagsState
16+
from ldclient.interfaces import FeatureStore
1517
from ldclient.polling import PollingUpdateProcessor
1618
from ldclient.streaming import StreamingUpdateProcessor
1719
from ldclient.util import check_uwsgi, log
@@ -27,6 +29,35 @@
2729
from threading import Lock
2830

2931

32+
class _FeatureStoreClientWrapper(FeatureStore):
33+
"""Provides additional behavior that the client requires before or after feature store operations.
34+
Currently this just means sorting the data set for init(). In the future we may also use this
35+
to provide an update listener capability.
36+
"""
37+
38+
def __init__(self, store):
39+
self.store = store
40+
41+
def init(self, all_data):
42+
return self.store.init(_FeatureStoreDataSetSorter.sort_all_collections(all_data))
43+
44+
def get(self, kind, key, callback):
45+
return self.store.get(kind, key, callback)
46+
47+
def all(self, kind, callback):
48+
return self.store.all(kind, callback)
49+
50+
def delete(self, kind, key, version):
51+
return self.store.delete(kind, key, version)
52+
53+
def upsert(self, kind, item):
54+
return self.store.upsert(kind, item)
55+
56+
@property
57+
def initialized(self):
58+
return self.store.initialized
59+
60+
3061
class LDClient(object):
3162
def __init__(self, sdk_key=None, config=None, start_wait=5):
3263
"""Constructs a new LDClient instance.
@@ -55,7 +86,7 @@ def __init__(self, sdk_key=None, config=None, start_wait=5):
5586
self._event_processor = None
5687
self._lock = Lock()
5788

58-
self._store = self._config.feature_store
89+
self._store = _FeatureStoreClientWrapper(self._config.feature_store)
5990
""" :type: FeatureStore """
6091

6192
if self._config.offline or not self._config.send_events:
@@ -243,7 +274,14 @@ def send_event(value, variation=None, flag=None, reason=None):
243274
if user is not None and user.get('key', "") == "":
244275
log.warn("User key is blank. Flag evaluation will proceed, but the user will not be stored in LaunchDarkly.")
245276

246-
flag = self._store.get(FEATURES, key, lambda x: x)
277+
try:
278+
flag = self._store.get(FEATURES, key, lambda x: x)
279+
except Exception as e:
280+
log.error("Unexpected error while retrieving feature flag \"%s\": %s" % (key, repr(e)))
281+
log.debug(traceback.format_exc())
282+
reason = error_reason('EXCEPTION')
283+
send_event(default, None, None, reason)
284+
return EvaluationDetail(default, None, reason)
247285
if not flag:
248286
reason = error_reason('FLAG_NOT_FOUND')
249287
send_event(default, None, None, reason)
@@ -264,7 +302,7 @@ def send_event(value, variation=None, flag=None, reason=None):
264302
send_event(detail.value, detail.variation_index, flag, detail.reason)
265303
return detail
266304
except Exception as e:
267-
log.error("Unexpected error while evaluating feature flag \"%s\": %s" % (key, e))
305+
log.error("Unexpected error while evaluating feature flag \"%s\": %s" % (key, repr(e)))
268306
log.debug(traceback.format_exc())
269307
reason = error_reason('EXCEPTION')
270308
send_event(default, None, flag, reason)
@@ -328,7 +366,7 @@ def all_flags_state(self, user, **kwargs):
328366
if flags_map is None:
329367
raise ValueError("feature store error")
330368
except Exception as e:
331-
log.error("Unable to read flags for all_flag_state: %s" % e)
369+
log.error("Unable to read flags for all_flag_state: %s" % repr(e))
332370
return FeatureFlagsState(False)
333371

334372
for key, flag in flags_map.items():
@@ -339,7 +377,7 @@ def all_flags_state(self, user, **kwargs):
339377
state.add_flag(flag, detail.value, detail.variation_index,
340378
detail.reason if with_reasons else None, details_only_if_tracked)
341379
except Exception as e:
342-
log.error("Error evaluating flag \"%s\" in all_flags_state: %s" % (key, e))
380+
log.error("Error evaluating flag \"%s\" in all_flags_state: %s" % (key, repr(e)))
343381
log.debug(traceback.format_exc())
344382
reason = {'kind': 'ERROR', 'errorKind': 'EXCEPTION'}
345383
state.add_flag(flag, None, None, reason if with_reasons else None, details_only_if_tracked)

ldclient/dynamodb_feature_store.py

Lines changed: 191 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,191 @@
1+
import json
2+
3+
have_dynamodb = False
4+
try:
5+
import boto3
6+
have_dynamodb = True
7+
except ImportError:
8+
pass
9+
10+
from ldclient import log
11+
from ldclient.feature_store import CacheConfig
12+
from ldclient.feature_store_helpers import CachingStoreWrapper
13+
from ldclient.interfaces import FeatureStore, FeatureStoreCore
14+
15+
#
16+
# Internal implementation of the DynamoDB feature store.
17+
#
18+
# Implementation notes:
19+
#
20+
# * Feature flags, segments, and any other kind of entity the LaunchDarkly client may wish
21+
# to store, are all put in the same table. The only two required attributes are "key" (which
22+
# is present in all storeable entities) and "namespace" (a parameter from the client that is
23+
# used to disambiguate between flags and segments).
24+
#
25+
# * Because of DynamoDB's restrictions on attribute values (e.g. empty strings are not
26+
# allowed), the standard DynamoDB marshaling mechanism with one attribute per object property
27+
# is not used. Instead, the entire object is serialized to JSON and stored in a single
28+
# attribute, "item". The "version" property is also stored as a separate attribute since it
29+
# is used for updates.
30+
#
31+
# * Since DynamoDB doesn't have transactions, the init() method - which replaces the entire data
32+
# store - is not atomic, so there can be a race condition if another process is adding new data
33+
# via upsert(). To minimize this, we don't delete all the data at the start; instead, we update
34+
# the items we've received, and then delete all other items. That could potentially result in
35+
# deleting new data from another process, but that would be the case anyway if the init()
36+
# happened to execute later than the upsert(); we are relying on the fact that normally the
37+
# process that did the init() will also receive the new data shortly and do its own upsert().
38+
#
39+
# * DynamoDB has a maximum item size of 400KB. Since each feature flag or user segment is
40+
# stored as a single item, this mechanism will not work for extremely large flags or segments.
41+
#
42+
43+
class _DynamoDBFeatureStoreCore(FeatureStoreCore):
44+
PARTITION_KEY = 'namespace'
45+
SORT_KEY = 'key'
46+
VERSION_ATTRIBUTE = 'version'
47+
ITEM_JSON_ATTRIBUTE = 'item'
48+
49+
def __init__(self, table_name, prefix, dynamodb_opts):
50+
if not have_dynamodb:
51+
raise NotImplementedError("Cannot use DynamoDB feature store because AWS SDK (boto3 package) is not installed")
52+
self._table_name = table_name
53+
self._prefix = None if prefix == "" else prefix
54+
self._client = boto3.client('dynamodb', **dynamodb_opts)
55+
56+
def init_internal(self, all_data):
57+
# Start by reading the existing keys; we will later delete any of these that weren't in all_data.
58+
unused_old_keys = self._read_existing_keys(all_data.keys())
59+
requests = []
60+
num_items = 0
61+
inited_key = self._inited_key()
62+
63+
# Insert or update every provided item
64+
for kind, items in all_data.items():
65+
for key, item in items.items():
66+
encoded_item = self._marshal_item(kind, item)
67+
requests.append({ 'PutRequest': { 'Item': encoded_item } })
68+
combined_key = (self._namespace_for_kind(kind), key)
69+
unused_old_keys.discard(combined_key)
70+
num_items = num_items + 1
71+
72+
# Now delete any previously existing items whose keys were not in the current data
73+
for combined_key in unused_old_keys:
74+
if combined_key[0] != inited_key:
75+
requests.append({ 'DeleteRequest': { 'Key': self._make_keys(combined_key[0], combined_key[1]) } })
76+
77+
# Now set the special key that we check in initialized_internal()
78+
requests.append({ 'PutRequest': { 'Item': self._make_keys(inited_key, inited_key) } })
79+
80+
_DynamoDBHelpers.batch_write_requests(self._client, self._table_name, requests)
81+
log.info('Initialized table %s with %d items', self._table_name, num_items)
82+
83+
def get_internal(self, kind, key):
84+
resp = self._get_item_by_keys(self._namespace_for_kind(kind), key)
85+
return self._unmarshal_item(resp.get('Item'))
86+
87+
def get_all_internal(self, kind):
88+
items_out = {}
89+
paginator = self._client.get_paginator('query')
90+
for resp in paginator.paginate(**self._make_query_for_kind(kind)):
91+
for item in resp['Items']:
92+
item_out = self._unmarshal_item(item)
93+
items_out[item_out['key']] = item_out
94+
return items_out
95+
96+
def upsert_internal(self, kind, item):
97+
encoded_item = self._marshal_item(kind, item)
98+
try:
99+
req = {
100+
'TableName': self._table_name,
101+
'Item': encoded_item,
102+
'ConditionExpression': 'attribute_not_exists(#namespace) or attribute_not_exists(#key) or :version > #version',
103+
'ExpressionAttributeNames': {
104+
'#namespace': self.PARTITION_KEY,
105+
'#key': self.SORT_KEY,
106+
'#version': self.VERSION_ATTRIBUTE
107+
},
108+
'ExpressionAttributeValues': {
109+
':version': { 'N': str(item['version']) }
110+
}
111+
}
112+
self._client.put_item(**req)
113+
except self._client.exceptions.ConditionalCheckFailedException:
114+
# The item was not updated because there's a newer item in the database. We must now
115+
# read the item that's in the database and return it, so CachingStoreWrapper can cache it.
116+
return self.get_internal(kind, item['key'])
117+
return item
118+
119+
def initialized_internal(self):
120+
resp = self._get_item_by_keys(self._inited_key(), self._inited_key())
121+
return resp.get('Item') is not None and len(resp['Item']) > 0
122+
123+
def _prefixed_namespace(self, base):
124+
return base if self._prefix is None else (self._prefix + ':' + base)
125+
126+
def _namespace_for_kind(self, kind):
127+
return self._prefixed_namespace(kind.namespace)
128+
129+
def _inited_key(self):
130+
return self._prefixed_namespace('$inited')
131+
132+
def _make_keys(self, namespace, key):
133+
return {
134+
self.PARTITION_KEY: { 'S': namespace },
135+
self.SORT_KEY: { 'S': key }
136+
}
137+
138+
def _make_query_for_kind(self, kind):
139+
return {
140+
'TableName': self._table_name,
141+
'ConsistentRead': True,
142+
'KeyConditions': {
143+
self.PARTITION_KEY: {
144+
'AttributeValueList': [
145+
{ 'S': self._namespace_for_kind(kind) }
146+
],
147+
'ComparisonOperator': 'EQ'
148+
}
149+
}
150+
}
151+
152+
def _get_item_by_keys(self, namespace, key):
153+
return self._client.get_item(TableName=self._table_name, Key=self._make_keys(namespace, key))
154+
155+
def _read_existing_keys(self, kinds):
156+
keys = set()
157+
for kind in kinds:
158+
req = self._make_query_for_kind(kind)
159+
req['ProjectionExpression'] = '#namespace, #key'
160+
req['ExpressionAttributeNames'] = {
161+
'#namespace': self.PARTITION_KEY,
162+
'#key': self.SORT_KEY
163+
}
164+
paginator = self._client.get_paginator('query')
165+
for resp in paginator.paginate(**req):
166+
for item in resp['Items']:
167+
namespace = item[self.PARTITION_KEY]['S']
168+
key = item[self.SORT_KEY]['S']
169+
keys.add((namespace, key))
170+
return keys
171+
172+
def _marshal_item(self, kind, item):
173+
json_str = json.dumps(item)
174+
ret = self._make_keys(self._namespace_for_kind(kind), item['key'])
175+
ret[self.VERSION_ATTRIBUTE] = { 'N': str(item['version']) }
176+
ret[self.ITEM_JSON_ATTRIBUTE] = { 'S': json_str }
177+
return ret
178+
179+
def _unmarshal_item(self, item):
180+
if item is None:
181+
return None
182+
json_attr = item.get(self.ITEM_JSON_ATTRIBUTE)
183+
return None if json_attr is None else json.loads(json_attr['S'])
184+
185+
186+
class _DynamoDBHelpers(object):
187+
@staticmethod
188+
def batch_write_requests(client, table_name, requests):
189+
batch_size = 25
190+
for batch in (requests[i:i+batch_size] for i in range(0, len(requests), batch_size)):
191+
client.batch_write_item(RequestItems={ table_name: batch })

0 commit comments

Comments
 (0)