Skip to content

Commit

Permalink
Fix threading issues and update logo image to svg
Browse files Browse the repository at this point in the history
  • Loading branch information
AA-Turner committed Jun 9, 2020
1 parent a4732d4 commit 3563c57
Show file tree
Hide file tree
Showing 7 changed files with 27 additions and 17 deletions.
3 changes: 2 additions & 1 deletion app.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,8 @@ def serve_report(path):
server.logger.info("#############################################################")
server.logger.info("Running server on gunicorn")
server.logger.info(f"Debugger PIN: {debug_pin}")

else:
server.logger.setLevel(logging.DEBUG)

redis_cache = cache.CacheInterface(app)
redis_cache.load_from_disk()
Expand Down
2 changes: 2 additions & 0 deletions assets/compliance.css
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@
.logo-image{
width: calc(var(--vmin-em) * 9.375);
height: auto;
/* As we're using a <img src=svg> rather than inline we can't use fill: #7413DC. Filter from codepen.io/pen/Pjoqqp */
filter: invert(12%) sepia(100%) saturate(5969%) hue-rotate(272deg) brightness(87%) contrast(101%);
}

.logo-text{
Expand Down
Binary file removed assets/scout-logo-purple-stack.png
Binary file not shown.
1 change: 1 addition & 0 deletions assets/scout-logo-stack.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
19 changes: 12 additions & 7 deletions src/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def __init__(self, app: dash.Dash):
self.cache: flask_caching.Cache = flask_caching.Cache(app.server, config=dict(
CACHE_TYPE="redis",
CACHE_REDIS_HOST=config.redis_host,
CACHE_REDIS_PASSWORD=config.redis_key,
CACHE_REDIS_PASSWORD="OFwueYfOjelJRYaFCbPZ0L4E6rlPzvLsGmyjLuIvVxU=",
CACHE_REDIS_PORT=6380,
CACHE_REDIS_DB=0,
CACHE_OPTIONS={"ssl": True}
Expand All @@ -27,7 +27,7 @@ def __init__(self, app: dash.Dash):
self.environment = "prod" if config.is_production else "dev"
self.app = app

def _serialize_path(self, *path):
def _serialize_path(self, *path) -> str:
return "/".join([self.environment, *path])

@staticmethod
Expand All @@ -38,15 +38,19 @@ def _set_by_path(root: dict, items, value):
root = root[key]
root[items[-1]] = value

def get_keys_from_partial(self, *path):
@staticmethod
def _keys_to_strings(keys: list, key_prefix: str = "") -> list:
return [key.decode("UTF8")[len(key_prefix):] for key in keys] # TODO key.removeprefix(root) when py3.9 is released (Oct 20)

def get_keys_from_partial(self, *path) -> list:
"""Return a list of keys from the global cache"""
return self.r.keys(f"{self.key_prefix}{self._serialize_path(*path)}*")

def get_dict_from_partial(self, *path):
keys = self.get_keys_from_partial(*path)
root = self._serialize_path(*path) + "/"
keys = [key.decode("UTF8").lstrip(self.key_prefix) for key in keys]
results = {key[len(root):]: self.cache.get(key) for key in keys} # TODO key.removeprefix(root) when py3.9 is released (Oct 20)
stringified_keys = self._keys_to_strings(keys, self.key_prefix)
results = {key[len(root):]: self.cache.get(key) for key in stringified_keys} # TODO key.removeprefix(root) when py3.9 is released (Oct 20)
tree = self._build_tree(results)
return tree

Expand Down Expand Up @@ -83,13 +87,14 @@ def save_to_disk(self):
# Ensure state is not changed whilst getting data
with self.r.lock("saving", timeout=5, blocking_timeout=2.5):
keys = self.r.keys()
vals = self.cache.get_many(*[key.decode("UTF8") for key in keys])
vals = self.cache.get_many(*self._keys_to_strings(keys, self.key_prefix))
timestamps = []
for key in keys:
timestamps.append(self.get_key_timestamp(key).isoformat())

stringified_keys = self._keys_to_strings(keys, self.key_prefix)
vals_with_timestamps = [*zip(vals, timestamps)]
pairs = dict(zip(keys, vals_with_timestamps))
pairs = dict(zip(stringified_keys, vals_with_timestamps))
json_pairs = json.dumps(pairs)
self.cache_path.write_text(json_pairs, encoding="UTF8")

Expand Down
2 changes: 1 addition & 1 deletion src/components/render_dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ def _generate_dashboard(self):
], className="legend-entry"),
], className="legend"),
html.Div([
html.Img(src=self._asset_path("scout-logo-purple-stack.png"), className="logo-image"),
html.Img(src=self._asset_path("scout-logo-stack.svg"), className="logo-image"),
html.Span(f"{self.report_location}", className="logo-text")
], className="logo")

Expand Down
17 changes: 9 additions & 8 deletions src/create_dashbord_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ def __init__(self, app: dash.Dash = None, session_id: bool = False, cache: cache
@staticmethod
def run_thread(lambda_func):
threading.Thread(target=lambda_func).start()
threading.Thread(target=lambda_func).start()


class ReportsParser(ReportBase):
Expand Down Expand Up @@ -85,21 +86,21 @@ def create_query_string(self, title: str, valid_disclosures: float) -> dict:
def get_parsed_values(self) -> dict:
self.logger.info("Reading main sheets")

def get_processed_workbooks_values():
def get_processed_workbooks_values() -> list:
processed = self.cache.get_dict_from_partial("session_cache", self.session_id, "processed_workbooks") or {}
return processed.values()

self.logger.info(f"Processed workbook vals: {get_processed_workbooks_values()}")
return list(processed.values())

i = 0
while not get_processed_workbooks_values() and len(get_processed_workbooks_values()) < 1 and i < 90 / 0.25:
processed_wbs = get_processed_workbooks_values()
while (not all(processed_wbs) or len(processed_wbs) == 0) and i < 90 / 0.25:
self.logger.info(f"Processed workbook vals: {processed_wbs}")
time.sleep(0.25)
i += 1
self.logger.info(f"Processed workbook vals: {get_processed_workbooks_values()}")
processed_wbs = get_processed_workbooks_values()
self.logger.info("ALL PROCESSED!")

reports_paths = {}
for paths_dict in [self.cache.get_dict_from_partial("b64_cache", code) for code in get_processed_workbooks_values()]:
for paths_dict in [self.cache.get_dict_from_partial("b64_cache", code) for code in processed_wbs]:
reports_paths = {**reports_paths, **paths_dict}
self.logger.info("Reports Paths:")
self.logger.info(reports_paths)
Expand Down Expand Up @@ -249,7 +250,7 @@ def save_trends(trend_props: dict):
trends = pd.DataFrame(columns=['Location', 'Include Descendents', 'Date', 'JSON'])

trend_props["JSON"] = json.dumps(trend_props["JSON"])
idx_cols = trend_props.copy().pop("JSON").keys()
idx_cols = [k for k in trend_props if k != "JSON"]
new_trends = trends.append(trend_props, ignore_index=True).drop_duplicates(subset=idx_cols) # can't drop_duplicates with non-hashable
if not trends.equals(new_trends): # Only save if changed
new_trends.to_feather(trends_path)
Expand Down

0 comments on commit 3563c57

Please sign in to comment.