Skip to content

feat: No-Scan SOC for binary checker pipeline #5122

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jun 9, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 10 additions & 1 deletion cve_bin_tool/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -353,6 +353,9 @@ def main(argv=None):
help="strip scan directory from sbom evidence location paths and CVE paths (useful with a firmware dump)",
default=False,
)
output_group.add_argument(
"--no-scan", action="store_true", help="No-Scan Mode", default=False
)
vex_output_group = parser.add_argument_group(
"Vex Output", "Arguments related to Vex output document."
)
Expand Down Expand Up @@ -1121,6 +1124,7 @@ def main(argv=None):
error_mode=error_mode,
validate=not args["disable_validation_check"],
sources=enabled_sources,
no_scan=args["no_scan"],
)
version_scanner.remove_skiplist(skips)
LOGGER.info(f"Number of checkers: {version_scanner.number_of_checkers()}")
Expand All @@ -1137,19 +1141,24 @@ def main(argv=None):
for scan_info in version_scanner.recursive_scan(args["directory"]):
if scan_info:
product_info, path = scan_info
LOGGER.debug(f"{product_info}: {path}")
LOGGER.debug(f"Product Info: {product_info}, Path: {path}")
# add product_info to parsed_data to check for with vex file
if product_info in parsed_data:
# update the paths in triage_data with the new path
triage_data = parsed_data[product_info]
LOGGER.debug("Product info in parsed data")
LOGGER.debug(f"Triage Data: {triage_data}")
triage_data["paths"].add(path)
else:
# create a new entry if product_info not in parsed_data
LOGGER.debug("Product info not in parsed data")
triage_data = {"default": {}, "paths": {path}}
LOGGER.debug(f"Triage Data: {triage_data}")
parsed_data[product_info] = triage_data

cve_scanner.get_cves(product_info, triage_data)
total_files = version_scanner.total_scanned_files
LOGGER.info(f"Total files: {total_files}")

if args["merge"]:
cve_scanner = merge_cve_scanner
Expand Down
13 changes: 12 additions & 1 deletion cve_bin_tool/version_scanner.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,9 @@ def __init__(
score: int = 0,
validate: bool = True,
sources=None,
no_scan=False,
):
self.no_scan = no_scan
self.logger = logger or LOGGER.getChild(self.__class__.__name__)
# Update egg if installed in development mode
if IS_DEVELOP():
Expand All @@ -68,7 +70,7 @@ def __init__(
self.error_mode = error_mode
self.cve_db = CVEDB(sources=sources)
self.validate = validate
# self.logger.info("Checkers loaded: %s" % (", ".join(self.checkers.keys())))
self.logger.info("Checkers loaded: %s" % (", ".join(self.checkers.keys())))
self.language_checkers = valid_files
self.language_checkers_names = self.available_language_checkers()

Expand Down Expand Up @@ -260,6 +262,10 @@ def scan_file(self, filename: str) -> Iterator[ScanInfo]:
# parse binary file's strings
lines = parse_strings(filename)

if self.no_scan:
yield from self.run_checkers(filename, lines)
return

if output:
valid_file = False
for file in list(self.language_checkers.keys()):
Expand Down Expand Up @@ -334,6 +340,11 @@ def scan_and_or_extract_file(

def recursive_scan(self, scan_path: str) -> Iterator[ScanInfo]:
"""Recursively scan files and directories, extracting information, and yielding the results using a generator."""
if self.no_scan:
LOGGER.info("No Scan Mode: No CVE Scanning")
LOGGER.info(
"Currently the No Scan Mode is being built, and we will release a beta version soon"
)
with Extractor(logger=self.logger, error_mode=self.error_mode) as ectx:
if Path(scan_path).is_dir():
for filepath in self.walker([scan_path]):
Expand Down
Loading