Skip to content

Commit b0c46fa

Browse files
authored
chore: improve docs and console logging (#1209)
This PR enhances console logging by creating an Error Logs Panel which can let the user know if any error has occurred while running the commands. Signed-off-by: Demolus13 <parth.govale@oracle.com>
1 parent 384303c commit b0c46fa

File tree

13 files changed

+65
-55
lines changed

13 files changed

+65
-55
lines changed

src/macaron/console.py

Lines changed: 24 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ def __init__(self, *args: Any, verbose: bool = False, **kwargs: Any) -> None:
3737
self.setLevel(logging.DEBUG)
3838
self.command = ""
3939
self.logs: list[str] = []
40+
self.error_logs: list[str] = []
4041
self.description_table = Table(show_header=False, box=None)
4142
self.description_table_content: dict[str, str | Status] = {
4243
"Package URL:": Status("[green]Processing[/]"),
@@ -118,6 +119,7 @@ def emit(self, record: logging.LogRecord) -> None:
118119

119120
if record.levelno >= logging.ERROR:
120121
self.logs.append(f"[red][ERROR][/red] {log_time} {msg}")
122+
self.error_logs.append(f"[red][ERROR][/red] {log_time} {msg}")
121123
elif record.levelno >= logging.WARNING:
122124
self.logs.append(f"[yellow][WARNING][/yellow] {log_time} {msg}")
123125
else:
@@ -386,10 +388,17 @@ def make_layout(self) -> Group:
386388
A rich Group object containing the layout for the live console display.
387389
"""
388390
layout: list[RenderableType] = []
391+
if self.error_logs:
392+
error_log_panel = Panel(
393+
"\n".join(self.error_logs),
394+
title="Error Logs",
395+
title_align="left",
396+
border_style="red",
397+
)
398+
layout = layout + [error_log_panel]
389399
if self.command == "analyze":
390-
layout = layout + [Rule(" DESCRIPTION", align="left")]
391400
if self.description_table.row_count > 0:
392-
layout = layout + ["", self.description_table]
401+
layout = layout + [Rule(" DESCRIPTION", align="left"), "", self.description_table]
393402
if self.progress_table.row_count > 0:
394403
layout = layout + ["", self.progress, "", self.progress_table]
395404
if self.failed_checks_table.row_count > 0:
@@ -418,25 +427,25 @@ def make_layout(self) -> Group:
418427
]
419428
elif self.command == "verify-policy":
420429
if self.policy_summary_table.row_count > 0:
421-
if self.components_violates_table.row_count > 0:
430+
if self.components_satisfy_table.row_count > 0:
422431
layout = layout + [
423-
"[bold red] Components Violate Policy[/]",
424-
self.components_violates_table,
432+
"[bold green] Components Satisfy Policy[/]",
433+
self.components_satisfy_table,
425434
]
426435
else:
427436
layout = layout + [
428-
"[bold red] Components Violate Policy[/] [white not italic]None[/]",
437+
"[bold green] Components Satisfy Policy[/] [white not italic]None[/]",
429438
]
430-
if self.components_satisfy_table.row_count > 0:
439+
if self.components_violates_table.row_count > 0:
431440
layout = layout + [
432441
"",
433-
"[bold green] Components Satisfy Policy[/]",
434-
self.components_satisfy_table,
442+
"[bold red] Components Violate Policy[/]",
443+
self.components_violates_table,
435444
]
436445
else:
437446
layout = layout + [
438447
"",
439-
"[bold green] Components Satisfy Policy[/] [white not italic]None[/]",
448+
"[bold red] Components Violate Policy[/] [white not italic]None[/]",
440449
]
441450
layout = layout + ["", self.policy_summary_table]
442451
if self.verification_summary_attestation:
@@ -448,10 +457,11 @@ def make_layout(self) -> Group:
448457
"[bold blue]Verification Summary Attestation[/]",
449458
self.verification_summary_attestation,
450459
)
451-
vsa_table.add_row(
452-
"[bold blue]Decode and Inspect the Content[/]",
453-
f"cat {self.verification_summary_attestation} | jq -r [white]'.payload'[/] | base64 -d | jq",
454-
)
460+
if self.verification_summary_attestation != "No VSA generated.":
461+
vsa_table.add_row(
462+
"[bold blue]Decode and Inspect the Content[/]",
463+
f"cat {self.verification_summary_attestation} | jq -r [white]'.payload'[/] | base64 -d | jq",
464+
)
455465

456466
layout = layout + [vsa_table]
457467
elif self.command == "find-source":

src/macaron/dependency_analyzer/cyclonedx.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -250,7 +250,7 @@ def add_latest_version(
250250
):
251251
latest_deps[key] = item
252252
except ValueError as error:
253-
logger.error("Could not parse dependency version number: %s", error)
253+
logger.debug("Could not parse dependency version number: %s", error)
254254

255255
@staticmethod
256256
def to_configs(resolved_deps: dict[str, DependencyInfo]) -> list[Configuration]:
@@ -344,7 +344,7 @@ def resolve_dependencies(main_ctx: Any, sbom_path: str, recursive: bool = False)
344344
# We allow dependency analysis if SBOM is provided but no repository is found.
345345
dep_analyzer = build_tool.get_dep_analyzer()
346346
except DependencyAnalyzerError as error:
347-
logger.error("Unable to find a dependency analyzer for %s: %s", build_tool.name, error)
347+
logger.debug("Unable to find a dependency analyzer for %s: %s", build_tool.name, error)
348348
return {}
349349

350350
if isinstance(dep_analyzer, NoneDependencyAnalyzer):
@@ -381,11 +381,11 @@ def resolve_dependencies(main_ctx: Any, sbom_path: str, recursive: bool = False)
381381
log_file.write(analyzer_output.stdout.decode("utf-8"))
382382

383383
except (subprocess.CalledProcessError, subprocess.TimeoutExpired) as error:
384-
logger.error(error)
384+
logger.debug(error)
385385
with open(log_path, mode="a", encoding="utf-8") as log_file:
386386
log_file.write(error.output.decode("utf-8"))
387387
except FileNotFoundError as error:
388-
logger.error(error)
388+
logger.debug(error)
389389

390390
# We collect the generated SBOM as a best effort, even if the build exits with errors.
391391
# TODO: add improvements to help the SBOM build succeed as much as possible.
@@ -437,12 +437,12 @@ def get_root_component(self, root_bom_path: Path) -> CDXComponent | None:
437437
try:
438438
root_bom = deserialize_bom_json(root_bom_path)
439439
except CycloneDXParserError as error:
440-
logger.error(error)
440+
logger.debug(error)
441441
return None
442442
try:
443443
return root_bom.metadata.component
444444
except AttributeError as error:
445-
logger.error(error)
445+
logger.debug(error)
446446

447447
return None
448448

@@ -482,7 +482,7 @@ def _is_target_cmp(cmp: CDXComponent | None) -> bool:
482482
if _is_target_cmp(root_bom.metadata.component):
483483
return root_bom.metadata.component
484484
if root_bom.metadata.component:
485-
logger.error(
485+
logger.debug(
486486
(
487487
"The analysis target %s and the metadata component %s in the BOM file do not match."
488488
" Please fix the PURL input and try again."
@@ -494,7 +494,7 @@ def _is_target_cmp(cmp: CDXComponent | None) -> bool:
494494
)
495495
return None
496496

497-
logger.error(
497+
logger.debug(
498498
"Unable to find the analysis target %s in the BOM file. Please fix the PURL input and try again.",
499499
target_component.purl,
500500
)
@@ -528,11 +528,11 @@ def get_dep_components(
528528
try:
529529
root_bom = deserialize_bom_json(root_bom_path)
530530
except CycloneDXParserError as error:
531-
logger.error(error)
531+
logger.debug(error)
532532
return
533533

534534
if root_bom.components is None:
535-
logger.error("The BOM file at %s misses components.", str(root_bom_path))
535+
logger.debug("The BOM file at %s misses components.", str(root_bom_path))
536536
return
537537

538538
dependencies: list[CDXDependency] = []
@@ -559,7 +559,7 @@ def get_dep_components(
559559
try:
560560
child_bom_objects.append(deserialize_bom_json(child_path))
561561
except CycloneDXParserError as error:
562-
logger.error(error)
562+
logger.debug(error)
563563
continue
564564

565565
for bom in child_bom_objects:
@@ -663,7 +663,7 @@ def convert_components_to_artifacts(
663663
with open(os.path.join(global_config.output_path, "sbom_debug.json"), "w", encoding="utf8") as debug_file:
664664
debug_file.write(json.dumps(all_versions, indent=4))
665665
except OSError as error:
666-
logger.error(error)
666+
logger.debug(error)
667667

668668
return latest_deps
669669

src/macaron/parsers/yaml/loader.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright (c) 2022 - 2022, Oracle and/or its affiliates. All rights reserved.
1+
# Copyright (c) 2022 - 2025, Oracle and/or its affiliates. All rights reserved.
22
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/.
33

44
"""This module contains the loader for YAML files."""
@@ -77,10 +77,10 @@ def validate_yaml_data(cls, schema: Schema, data: list) -> bool:
7777
yamale.validate(schema, data)
7878
return True
7979
except yamale.YamaleError as error:
80-
logger.error("Yaml data validation failed.")
80+
logger.debug("Yaml data validation failed.")
8181
for result in error.results:
8282
for err_str in result.errors:
83-
logger.error("\t%s", err_str)
83+
logger.debug("\t%s", err_str)
8484
return False
8585

8686
@classmethod
@@ -105,11 +105,11 @@ def load(cls, path: os.PathLike | str, schema: Schema = None) -> Any:
105105
logger.info("Loading yaml content for %s", path)
106106
loaded_data = YamlLoader._load_yaml_content(path=path)
107107
if not loaded_data:
108-
logger.error("Error while loading the config yaml file %s.", path)
108+
logger.debug("Error while loading the config yaml file %s.", path)
109109
return None
110110

111111
if schema and not YamlLoader.validate_yaml_data(schema, loaded_data):
112-
logger.error("The yaml content in %s is invalid according to the schema.", path)
112+
logger.debug("The yaml content in %s is invalid according to the schema.", path)
113113
return None
114114

115115
result = None

src/macaron/provenance/provenance_finder.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -189,15 +189,15 @@ def find_npm_provenance(purl: PackageURL, registry: NPMRegistry) -> list[Provena
189189
# Load the provenance file (provenance attestation).
190190
provenance_payload = load_provenance_payload(download_path)
191191
except LoadIntotoAttestationError as error:
192-
logger.error("Error while loading provenance attestation: %s", error)
192+
logger.debug("Error while loading provenance attestation: %s", error)
193193
return []
194194

195195
signed_download_path = f"{download_path}.signed"
196196
try:
197197
# Load the other npm provenance file (publish attestation).
198198
publish_payload = load_provenance_payload(signed_download_path)
199199
except LoadIntotoAttestationError as error:
200-
logger.error("Error while loading publish attestation: %s", error)
200+
logger.debug("Error while loading publish attestation: %s", error)
201201
return [ProvenanceAsset(provenance_payload, npm_provenance_asset.name, npm_provenance_asset.url)]
202202

203203
return [
@@ -206,7 +206,7 @@ def find_npm_provenance(purl: PackageURL, registry: NPMRegistry) -> list[Provena
206206
]
207207

208208
except OSError as error:
209-
logger.error("Error while storing provenance in the temporary directory: %s", error)
209+
logger.debug("Error while storing provenance in the temporary directory: %s", error)
210210
return []
211211

212212

@@ -331,7 +331,7 @@ def find_pypi_provenance(purl: PackageURL) -> list[ProvenanceAsset]:
331331
payload.verified = verified
332332
return [ProvenanceAsset(payload, purl.name, url)]
333333
except LoadIntotoAttestationError as load_error:
334-
logger.error("Error while loading provenance: %s", load_error)
334+
logger.debug("Error while loading provenance: %s", load_error)
335335
return []
336336

337337

@@ -484,7 +484,7 @@ def download_provenances_from_ci_service(ci_info: CIInfo, download_path: str) ->
484484
try:
485485
payload = load_provenance_payload(provenance_filepath)
486486
except LoadIntotoAttestationError as error:
487-
logger.error("Error logging provenance: %s", error)
487+
logger.debug("Error logging provenance: %s", error)
488488
continue
489489

490490
# Add the provenance file.

src/macaron/repo_finder/repo_finder.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -372,11 +372,11 @@ def get_latest_purl_if_different(purl: PackageURL) -> PackageURL | None:
372372

373373
latest_version_purl, _ = DepsDevRepoFinder.get_latest_version(no_version_purl)
374374
if not latest_version_purl:
375-
logger.error("Latest version PURL could not be found.")
375+
logger.debug("Latest version PURL could not be found.")
376376
return None
377377

378378
if latest_version_purl == purl:
379-
logger.error("Latest version PURL is the same as the current.")
379+
logger.debug("Latest version PURL is the same as the current.")
380380
return None
381381

382382
logger.debug("Found new version of PURL: %s", latest_version_purl)
@@ -400,11 +400,11 @@ def get_latest_repo_if_different(latest_version_purl: PackageURL, original_repo:
400400
"""
401401
latest_repo, _ = find_repo(latest_version_purl, False)
402402
if not latest_repo:
403-
logger.error("Could not find repository from latest PURL: %s", latest_version_purl)
403+
logger.debug("Could not find repository from latest PURL: %s", latest_version_purl)
404404
return ""
405405

406406
if check_repo_urls_are_equivalent(original_repo, latest_repo):
407-
logger.error(
407+
logger.debug(
408408
"Repository from latest PURL is equivalent to original repository: %s ~= %s",
409409
latest_repo,
410410
original_repo,
@@ -470,7 +470,7 @@ def prepare_repo(
470470
logger.info("The path to repo %s is a remote path.", repo_path)
471471
resolved_remote_path = get_remote_vcs_url(repo_path)
472472
if not resolved_remote_path:
473-
logger.error("The provided path to repo %s is not a valid remote path.", repo_path)
473+
logger.debug("The provided path to repo %s is not a valid remote path.", repo_path)
474474
return None, commit_finder_outcome
475475

476476
git_service = get_git_service(resolved_remote_path)

src/macaron/slsa_analyzer/analyzer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -673,7 +673,7 @@ def add_repository(self, branch_name: str | None, git_obj: Git) -> Repository |
673673
# We only allow complete_name's length to be 2 or 3 because we need to construct PURL
674674
# strings using the complete_name, i.e., type/namespace/name@commitsha
675675
if (parts_len := len(Path(complete_name).parts)) < 2 or parts_len > 3:
676-
logger.error("The repository path %s is not valid.", complete_name)
676+
logger.debug("The repository path %s is not valid.", complete_name)
677677
return None
678678

679679
repository = Repository(

src/macaron/slsa_analyzer/build_tool/poetry.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -86,11 +86,11 @@ def is_detected(self, repo_path: str) -> bool:
8686
if ("tool" in data) and ("poetry" in data["tool"]):
8787
return True
8888
except tomllib.TOMLDecodeError:
89-
logger.error("Failed to read the %s file: invalid toml file.", conf)
89+
logger.debug("Failed to read the %s file: invalid toml file.", conf)
9090
return False
9191
return False
9292
except FileNotFoundError:
93-
logger.error("Failed to read the %s file.", conf)
93+
logger.debug("Failed to read the %s file.", conf)
9494
return False
9595

9696
return False

src/macaron/slsa_analyzer/checks/infer_artifact_pipeline_check.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ def run_check(self, ctx: AnalyzeContext) -> CheckResultData:
160160
try:
161161
build_def = ProvenancePredicate.find_build_def(prov_payload.statement)
162162
except ProvenanceError as error:
163-
logger.error(error)
163+
logger.debug(error)
164164
return CheckResultData(result_tables=[], result_type=CheckResultType.FAILED)
165165
prov_workflow, prov_trigger_run = build_def.get_build_invocation(prov_payload.statement)
166166

src/macaron/slsa_analyzer/ci_service/github_actions/analyzer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -398,7 +398,7 @@ def build_call_graph_from_path(root: BaseNode, workflow_path: str, repo_path: st
398398
try:
399399
parsed_obj: Workflow = parse_action(workflow_path)
400400
except ParseError as error:
401-
logger.error("Unable to parse GitHub Actions at the target %s: %s", repo_path, error)
401+
logger.debug("Unable to parse GitHub Actions at the target %s: %s", repo_path, error)
402402
raise ParseError from error
403403

404404
# Add internal workflows.

src/macaron/slsa_analyzer/ci_service/github_actions/github_actions_ci.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright (c) 2022 - 2024, Oracle and/or its affiliates. All rights reserved.
1+
# Copyright (c) 2022 - 2025, Oracle and/or its affiliates. All rights reserved.
22
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/.
33

44
"""This module analyzes GitHub Actions CI."""
@@ -186,13 +186,13 @@ def has_latest_run_passed(
186186

187187
workflow_data = self.api_client.get_repo_workflow_data(repo_full_name, workflow)
188188
if not workflow_data:
189-
logger.error("Cannot find data of workflow %s.", workflow)
189+
logger.debug("Cannot find data of workflow %s.", workflow)
190190
return ""
191191

192192
try:
193193
workflow_id = workflow_data["id"]
194194
except KeyError:
195-
logger.error("Cannot get unique ID of workflow %s.", workflow)
195+
logger.debug("Cannot get unique ID of workflow %s.", workflow)
196196
return ""
197197

198198
logger.info("The unique ID of workflow %s is %s", workflow, workflow_id)
@@ -540,7 +540,7 @@ def search_for_workflow_run(
540540
full_name, branch_name=branch_name, created_after=created_after, page=query_page
541541
)
542542
except KeyError:
543-
logger.error("Error while reading run data. Skipping ...")
543+
logger.debug("Error while reading run data. Skipping ...")
544544
continue
545545

546546
return {}

0 commit comments

Comments
 (0)