Skip to content

Commit a076f5b

Browse files
authored
Syntax highlight and hyperlink crash functions + agent sections (#1139)
This PR adds syntax highlighting to the stack traces and turns file paths into clickable links that point directly to the source code on GitHub using the `get_project_repository` method in [`oss_fuzz_checkout.py`](https://github.com/google/oss-fuzz-gen/blob/46693b387e7e32c38a2206692bf4440a5b86c072/experiment/oss_fuzz_checkout.py#L183). ## Preview ![image](https://github.com/user-attachments/assets/9dc75773-c53c-4b0c-a063-fe6d31a3aec8)
1 parent 65308d6 commit a076f5b

File tree

14 files changed

+590
-139
lines changed

14 files changed

+590
-139
lines changed

experiment/builder_runner.py

Lines changed: 39 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1032,23 +1032,50 @@ def build_and_run_cloud(
10321032
if blob.exists():
10331033
blob.download_to_file(f)
10341034

1035+
code_coverage_report_dir = self.work_dirs.code_coverage_report(
1036+
generated_target_name)
1037+
report_prefix = f'{coverage_name}/report/linux/'
1038+
blobs = bucket.list_blobs(prefix=report_prefix)
1039+
1040+
for blob in blobs:
1041+
if blob.name.endswith('/'):
1042+
continue
1043+
1044+
# Get the relative path within report/linux/
1045+
relative_path = blob.name[len(report_prefix):]
1046+
local_path = os.path.join(code_coverage_report_dir, 'report', 'linux',
1047+
relative_path)
1048+
os.makedirs(os.path.dirname(local_path), exist_ok=True)
1049+
1050+
blob.download_to_filename(local_path)
1051+
10351052
run_result = RunResult(corpus_path=corpus_path,
10361053
coverage_report_path=coverage_path,
10371054
reproducer_path=reproducer_path,
10381055
log_path=run_log_path)
10391056

1040-
blob = bucket.blob(f'{coverage_name}/report/linux/summary.json')
1041-
if blob.exists():
1042-
# Download summary.json to our workdir.
1043-
cov_summary_folder = os.path.join(
1044-
self.work_dirs.code_coverage_report(generated_target_name),
1045-
'report/linux/')
1046-
os.makedirs(cov_summary_folder, exist_ok=True)
1047-
coverage_summary_file = os.path.join(cov_summary_folder, 'summary.json')
1048-
with open(coverage_summary_file, 'wb') as f:
1049-
blob.download_to_file(f)
1050-
1051-
# Load the coverage summary
1057+
# blob = bucket.blob(f'{coverage_name}/report/linux/summary.json')
1058+
# if blob.exists():
1059+
# # Download summary.json to our workdir.
1060+
# cov_summary_folder = os.path.join(
1061+
# self.work_dirs.code_coverage_report(generated_target_name),
1062+
# 'report/linux/')
1063+
# os.makedirs(cov_summary_folder, exist_ok=True)
1064+
# coverage_summary_file = os.path.join(cov_summary_folder, 'summary.json')
1065+
# with open(coverage_summary_file, 'wb') as f:
1066+
# blob.download_to_file(f)
1067+
1068+
# # Load the coverage summary
1069+
# with open(coverage_summary_file, 'r') as f:
1070+
# run_result.coverage_summary = json.load(f)
1071+
1072+
# summary.json is already downloaded as part of the bulk download above
1073+
coverage_summary_file = os.path.join(
1074+
self.work_dirs.code_coverage_report(generated_target_name),
1075+
'report/linux/summary.json')
1076+
1077+
# Load the coverage summary if it exists
1078+
if os.path.exists(coverage_summary_file):
10521079
with open(coverage_summary_file, 'r') as f:
10531080
run_result.coverage_summary = json.load(f)
10541081

logger.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,15 +67,15 @@ def write_result(self,
6767
with open(os.path.join(trial_result_dir, FINAL_RESULT_JSON), 'w') as f:
6868
json.dump(result.to_dict() | {'finished': finished}, f)
6969

70-
def write_chat_history(self, result: Result) -> None:
70+
def write_chat_history(self, result: Result, cycle_count: int) -> None:
7171
"""Writes chat history."""
7272
# TODO(dongge): Find a proper way to write this.
7373
trial_result_dir = os.path.join(result.work_dirs.status,
7474
f'{result.trial:02d}')
7575
os.makedirs(trial_result_dir, exist_ok=True)
7676
chat_history_path = os.path.join(trial_result_dir, 'log.txt')
7777
chat_history = '\n'.join(
78-
f'\n\n\n************************{agent_name}************************\n'
78+
f'\n\n\n************************{agent_name} (Cycle {cycle_count})************************\n'
7979
f'{chat_history}\n'
8080
for agent_name, chat_history in result.chat_history.items())
8181
self.write_to_file(chat_history_path, chat_history)

pipeline.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,8 @@ def _execute_one_cycle(self, result_history: list[Result],
112112
# and if it fails then we will return from this cycle and terminate
113113
# the pipeline.
114114
result_history.append(
115-
self.writing_stage.execute(result_history=result_history))
115+
self.writing_stage.execute(result_history=result_history,
116+
cycle_count=cycle_count))
116117
self._update_status(result_history=result_history)
117118
if (not isinstance(result_history[-1], BuildResult) or
118119
not result_history[-1].success):
@@ -124,7 +125,8 @@ def _execute_one_cycle(self, result_history: list[Result],
124125
# and if it fails then we will return from this cycle and terminate
125126
# the pipeline.
126127
result_history.append(
127-
self.execution_stage.execute(result_history=result_history))
128+
self.execution_stage.execute(result_history=result_history,
129+
cycle_count=cycle_count))
128130
self._update_status(result_history=result_history)
129131
if (not isinstance(result_history[-1], RunResult) or
130132
not result_history[-1].log_path):
@@ -137,7 +139,8 @@ def _execute_one_cycle(self, result_history: list[Result],
137139
# pipeline and retry making a harness. If the analysis stage is successful,
138140
# then we will terminate the pipeline.
139141
result_history.append(
140-
self.analysis_stage.execute(result_history=result_history))
142+
self.analysis_stage.execute(result_history=result_history,
143+
cycle_count=cycle_count))
141144
# TODO(maoyi): add the indicator for the success of analysis stage
142145
if not isinstance(result_history[-1], AnalysisResult):
143146
self.logger.warning(

report/export.py

Lines changed: 16 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
from abc import abstractmethod
1919

2020
from report.common import Results
21+
from report.parse_logs import RunLogsParser
2122

2223

2324
class BaseExporter:
@@ -42,6 +43,10 @@ def _get_full_url(self, relative_path: str) -> str:
4243
return relative_path
4344
return f"{self._base_url}/{relative_path}"
4445

46+
def get_url_path(self) -> str:
47+
"""Get the URL path to the CSV file."""
48+
return os.path.join(self._output_dir, 'crashes.csv')
49+
4550

4651
class CSVExporter(BaseExporter):
4752
"""Export a report to CSV."""
@@ -65,9 +70,19 @@ def generate(self):
6570

6671
project_name = benchmark_id.split("-")[1]
6772

73+
project_name = benchmark_id.split("-")[1]
74+
6875
for sample in samples:
76+
run_logs = self._results.get_run_logs(benchmark_id, sample.id) or ""
77+
parser = RunLogsParser(run_logs, benchmark_id, sample.id)
78+
crash_reproduction_path = parser.get_crash_reproduction_path()
79+
6980
report_url = self._get_full_url(
7081
f"sample/{benchmark_id}/{sample.id}.html")
82+
reproducer_path = self._get_full_url(
83+
f'results/{benchmark_id}/artifacts/{sample.id}.fuzz_target-F0-01/'
84+
f'{crash_reproduction_path}') if crash_reproduction_path else ""
85+
7186
writer.writerow({
7287
"Project":
7388
project_name,
@@ -87,12 +102,8 @@ def generate(self):
87102
"Line Coverage Diff":
88103
sample.result.line_coverage_diff,
89104
"Reproducer Path":
90-
sample.result.reproducer_path
105+
reproducer_path
91106
})
92107

93108
logging.info("Created CSV file at %s", csv_path)
94109
return csv_path
95-
96-
def get_url_path(self):
97-
"""Get the URL path to the CSV file."""
98-
return os.path.join(self._output_dir, 'crashes.csv')

0 commit comments

Comments
 (0)