Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
__pycache__
25 changes: 24 additions & 1 deletion classes/mend.bbclass
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@ MEND_CHECK_SUMMARY_DIR ?= "${LOG_DIR}/mend/"

HOSTTOOLS += "java"

MEND_LATEST_LOG_NAME = "latest.json"

AUTO_PATCH = "true"

def mend_request(encoded_data):
import urllib.request

Expand Down Expand Up @@ -65,13 +69,15 @@ python mend_report_handler() {
response_json = json.loads(res)
timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
out_path = os.path.join(d.getVar('MEND_CHECK_SUMMARY_DIR'), "mend-report-%s.json" % (timestamp))
link_path = os.path.join(d.getVar('MEND_CHECK_SUMMARY_DIR'), d.getVar('MEND_LATEST_LOG_NAME'))

os.makedirs(d.getVar('MEND_CHECK_SUMMARY_DIR'), exist_ok=True)

with open(out_path, "w") as f:
json.dump(response_json, f, indent=2)
os.system(f"ln {out_path} {link_path}")
bb.note(f"Mend report succesfully generated at {out_path}")

bb.note(f"Latest report can also be accessed at {link_path}")
except Exception as err:
bb.warn(f"Generating Mend report failed. Details: {err}")
}
Expand Down Expand Up @@ -160,6 +166,23 @@ python do_mend_check() {
bb.note("Mend Unified Agent scan completed.")
}


python download_patches() {
import sys
# bb.note(f"CWD = {sys.path}")
if not d.getVar("AUTO_PATCH") == "true":
return
from patch_download import get_patches
bb.note(f"Downloading patches...")
save_path = os.path.join(d.getVar('MEND_CHECK_SUMMARY_DIR'), d.getVar('MEND_LATEST_LOG_NAME'))
get_patches(save_path, "temp")
bb.note(f"Patches downloaded and saved in 'temp' directory")
}

addhandler download_patches
download_patches[eventmask] = "bb.event.BuildCompleted"
download_patches[deptask] = "mend_report_handler"

addtask mend_check after do_patch before do_build
do_mend_check[nostamp] = "1"
do_rootfs[recrdeptask] += "do_mend_check"
69 changes: 69 additions & 0 deletions lib/patch_download/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
import os
import json
from patch_download.methods import PATCH_DOWNLOAD_METHODS


def load_json(json_path: str) -> dict:
'''
Loads json with vulnerabilities
'''
try:
with open(json_path, "r", encoding="utf-8") as fin:
data = fin.read()
data = data.replace("\u003d", "=")
return json.loads(data)
except OSError:
raise RuntimeError(f"Could not find file {json_path}")


def save_patch(file_name: str, patch: str, force: bool = False):
'''
Saves a patch downloaded through the download methods
'''
if file_name not in os.listdir() or force:
with open(file_name, "w") as fout:
fout.write(patch)


def download_all_patches(json: str) -> dict[str, str]:
'''
Downloads all the patches and stores them in memory.
Return: A dictionary where the key is the CVE addressed and the value is the
patch itself.
'''
patches = {}
for idx, alert in enumerate(json["alerts"]):
vulnerability = alert["vulnerability"]
name = vulnerability["name"]
length = len(json["alerts"])
bb.note(f"{idx}/{length}) Found {name}")
if "topFix" not in vulnerability:
bb.warn("This package has no top fix")
url = "None"
else:
top_fix = vulnerability["topFix"]
url = top_fix["url"]
check_success = False
for check in PATCH_DOWNLOAD_METHODS:
if check(url):
check_success = True
bb.note(f"Retrieving patch for {name}...")
patches[name] = PATCH_DOWNLOAD_METHODS[check](url)
if not check_success:
bb.warn("Missing method for retrieving fix to vulnerability")
return patches


def get_patches(path: str, save_path: str):
'''
Main method to get called from external libraries. This method downloads all
the patches it can and stores them in save_path.
Param path: the path to the vulnerability json
'''
data = load_json(path)
patches = download_all_patches(data)
if not os.path.exists(save_path):
os.makedirs(save_path)
for patch_name in patches:
save_patch(f"{save_path}/{patch_name}.patch", patches[patch_name])

29 changes: 29 additions & 0 deletions lib/patch_download/github.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import time
import urllib.request
from urllib.error import HTTPError


def check_github(url: str) -> bool:
'''
Method to check if the patch is on Github
'''
return "github.com" in url and "/commit/" in url


def download_github_commit(url: str) -> str:
'''
Method to download the patch from Github
'''
MAX_RETRIES = 10
url = url + ".patch"
for retry in range(MAX_RETRIES):
try:
with urllib.request.urlopen(url) as stream:
return stream.read().decode("utf-8")
except HTTPError as e:
if e.code == 429:
# GitHub uses a maximum number of requests per minute for not authenticated users
# This is a workaround to their solution
bb.warn(f"Retrying download... {retry}/{MAX_RETRIES}")
time.sleep(60)
return ""
12 changes: 12 additions & 0 deletions lib/patch_download/methods.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from patch_download.github import check_github, download_github_commit


'''
This dictionary contains all the methods to download patches from various
sources. Once a source is added, the check function shall be introduced as a key
and the download function shall be introduced as the value.
For a short example, please see how the github.py file works.
'''
PATCH_DOWNLOAD_METHODS = {
check_github: download_github_commit
}