Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 19 additions & 13 deletions dojo/importers/default_reimporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -409,6 +409,7 @@ def process_findings(
finding = self.finding_post_processing(
finding,
unsaved_finding,
is_matched_finding=bool(matched_findings),
)
# all data is already saved on the finding, we only need to trigger post processing in batches
push_to_jira = self.push_to_jira and ((not self.findings_groups_enabled or not self.group_by) or not finding_will_be_grouped)
Expand Down Expand Up @@ -926,6 +927,8 @@ def finding_post_processing(
self,
finding: Finding,
finding_from_report: Finding,
*,
is_matched_finding: bool = False,
) -> Finding:
"""
Save all associated objects to the finding after it has been saved
Expand All @@ -940,19 +943,22 @@ def finding_post_processing(
self.endpoint_manager.chunk_endpoints_and_disperse(finding, finding_from_report.unsaved_endpoints)
if len(self.endpoints_to_add) > 0:
self.endpoint_manager.chunk_endpoints_and_disperse(finding, self.endpoints_to_add)
# Parsers shouldn't use the tags field, and use unsaved_tags instead.
# Merge any tags set by parser into unsaved_tags
tags_from_parser = finding_from_report.tags if isinstance(finding_from_report.tags, list) else []
unsaved_tags_from_parser = finding_from_report.unsaved_tags if isinstance(finding_from_report.unsaved_tags, list) else []
merged_tags = unsaved_tags_from_parser + tags_from_parser
if merged_tags:
finding_from_report.unsaved_tags = merged_tags
if finding_from_report.unsaved_tags:
cleaned_tags = clean_tags(finding_from_report.unsaved_tags)
if isinstance(cleaned_tags, list):
finding.tags.add(*cleaned_tags)
elif isinstance(cleaned_tags, str):
finding.tags.add(cleaned_tags)
# For matched/existing findings, do not update tags from the report,
# consistent with how other fields are handled on reimport.
if not is_matched_finding:
# Parsers shouldn't use the tags field, and use unsaved_tags instead.
# Merge any tags set by parser into unsaved_tags
tags_from_parser = finding_from_report.tags if isinstance(finding_from_report.tags, list) else []
unsaved_tags_from_parser = finding_from_report.unsaved_tags if isinstance(finding_from_report.unsaved_tags, list) else []
merged_tags = unsaved_tags_from_parser + tags_from_parser
if merged_tags:
finding_from_report.unsaved_tags = merged_tags
if finding_from_report.unsaved_tags:
cleaned_tags = clean_tags(finding_from_report.unsaved_tags)
if isinstance(cleaned_tags, list):
finding.tags.add(*cleaned_tags)
elif isinstance(cleaned_tags, str):
finding.tags.add(cleaned_tags)
# Process any files
if finding_from_report.unsaved_files:
finding.unsaved_files = finding_from_report.unsaved_files
Expand Down
6 changes: 3 additions & 3 deletions unittests/test_tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,12 +369,12 @@ def assert_tags_in_findings(findings: list[dict], expected_finding_count: int, d
findings = response["results"]
# Make sure we have what we are looking for
assert_tags_in_findings(findings, 2, ["security", "network"])
# Reimport with a different report that has more tags
# Reimport with a different report that has more tags — matched findings should retain their original tags
self.reimport_scan_with_params(test_id, self.generic_sample_with_more_tags_filename, scan_type="Generic Findings Import")
response = self.get_test_findings_api(test_id)
findings = response["results"]
# Make sure we have what we are looking for
assert_tags_in_findings(findings, 2, ["security", "network", "hardened"])
# Tags from the report are not applied to matched findings on reimport, consistent with other fields
assert_tags_in_findings(findings, 2, ["security", "network"])


@versioned_fixtures
Expand Down
Loading