4
4
import json
5
5
import logging
6
6
import textwrap
7
+ from datetime import datetime
7
8
8
9
from dojo .models import Finding
9
10
@@ -24,6 +25,71 @@ def parse_issue(self, row, test):
24
25
data_cvss = row .get ("CVSS" , "" )
25
26
data_description = row .get ("Description" , "" )
26
27
28
+ # Parse timestamp information (Item 4)
29
+ published_date = row .get ("Published" , "" )
30
+ discovered_date = row .get ("Discovered" , "" )
31
+ finding_date = None
32
+
33
+ # Use Published date as primary, fallback to Discovered
34
+ date_str = published_date or discovered_date
35
+ if date_str :
36
+ try :
37
+ # Handle format like "2020-09-04 00:15:00.000"
38
+ finding_date = datetime .strptime (date_str .split ("." )[0 ], "%Y-%m-%d %H:%M:%S" ).date ()
39
+ except ValueError :
40
+ try :
41
+ # Handle alternative formats
42
+ finding_date = datetime .strptime (date_str [:10 ], "%Y-%m-%d" ).date ()
43
+ except ValueError :
44
+ logger .warning (f"Could not parse date: { date_str } " )
45
+
46
+ # Build container/image metadata for impact field (Item 3)
47
+ impact_parts = []
48
+
49
+ # Registry and repository information which can change between scans, so we add it to the impact field as the description field is sometimes used for hash code calculation
50
+ registry = row .get ("Registry" , "" )
51
+ repository = row .get ("Repository" , "" )
52
+ tag = row .get ("Tag" , "" )
53
+ image_id = row .get ("Id" , "" )
54
+ distro = row .get ("Distro" , "" )
55
+
56
+ if registry :
57
+ impact_parts .append (f"Registry: { registry } " )
58
+ if repository :
59
+ impact_parts .append (f"Repository: { repository } " )
60
+ if tag :
61
+ impact_parts .append (f"Tag: { tag } " )
62
+ if image_id :
63
+ impact_parts .append (f"Image ID: { image_id } " )
64
+ if distro :
65
+ impact_parts .append (f"Distribution: { distro } " )
66
+
67
+ # Host and container information
68
+ hosts = row .get ("Hosts" , "" )
69
+ containers = row .get ("Containers" , "" )
70
+ clusters = row .get ("Clusters" , "" )
71
+ binaries = row .get ("Binaries" , "" )
72
+ custom_labels = row .get ("Custom Labels" , "" )
73
+
74
+ if hosts :
75
+ impact_parts .append (f"Hosts: { hosts } " )
76
+ if containers :
77
+ impact_parts .append (f"Containers: { containers } " )
78
+ if clusters :
79
+ impact_parts .append (f"Clusters: { clusters } " )
80
+ if binaries :
81
+ impact_parts .append (f"Binaries: { binaries } " )
82
+ if custom_labels :
83
+ impact_parts .append (f"Custom Labels: { custom_labels } " )
84
+
85
+ # Add timestamp information to impact
86
+ if published_date :
87
+ impact_parts .append (f"Published: { published_date } " )
88
+ if discovered_date :
89
+ impact_parts .append (f"Discovered: { discovered_date } " )
90
+
91
+ impact_text = "\n " .join (impact_parts ) if impact_parts else data_severity
92
+
27
93
if data_vulnerability_id and data_package_name :
28
94
title = (
29
95
data_vulnerability_id
@@ -40,6 +106,7 @@ def parse_issue(self, row, test):
40
106
finding = Finding (
41
107
title = textwrap .shorten (title , width = 255 , placeholder = "..." ),
42
108
test = test ,
109
+ date = finding_date ,
43
110
severity = convert_severity (data_severity ),
44
111
description = data_description
45
112
+ "<p> Vulnerable Package: "
@@ -52,12 +119,8 @@ def parse_issue(self, row, test):
52
119
data_package_name , width = 200 , placeholder = "..." ,
53
120
),
54
121
component_version = data_package_version ,
55
- false_p = False ,
56
- duplicate = False ,
57
- out_of_scope = False ,
58
- mitigated = None ,
59
122
severity_justification = f"(CVSS v3 base score: { data_cvss } )" ,
60
- impact = data_severity ,
123
+ impact = impact_text ,
61
124
)
62
125
finding .description = finding .description .strip ()
63
126
if data_vulnerability_id :
@@ -116,19 +179,53 @@ def parse_json(self, json_output):
116
179
def get_items (self , tree , test ):
117
180
items = {}
118
181
if "results" in tree :
119
- vulnerabilityTree = tree ["results" ][0 ].get ("vulnerabilities" , [])
182
+ # Extract image metadata for impact field (Item 3)
183
+ result = tree ["results" ][0 ]
184
+ image_metadata = self .build_image_metadata (result )
185
+
186
+ # Parse vulnerabilities
187
+ vulnerabilityTree = result .get ("vulnerabilities" , [])
120
188
for node in vulnerabilityTree :
121
- item = get_item (node , test )
189
+ item = get_item (node , test , image_metadata )
122
190
unique_key = node ["id" ] + str (
123
191
node ["packageName" ]
124
192
+ str (node ["packageVersion" ])
125
193
+ str (node ["severity" ]),
126
194
)
127
195
items [unique_key ] = item
196
+
197
+ # Parse compliance findings
198
+ complianceTree = result .get ("compliances" , [])
199
+ for node in complianceTree :
200
+ item = get_compliance_item (node , test , image_metadata )
201
+ # Create unique key for compliance findings - prefer ID if available
202
+ if node .get ("id" ):
203
+ unique_key = f"compliance_{ node ['id' ]} "
204
+ else :
205
+ # Fallback to hash of title + description
206
+ unique_key = "compliance_" + hashlib .md5 (
207
+ (node .get ("title" , "" ) + node .get ("description" , "" )).encode ("utf-8" ),
208
+ usedforsecurity = False ,
209
+ ).hexdigest ()
210
+ items [unique_key ] = item
128
211
return list (items .values ())
129
212
213
+ def build_image_metadata (self , result ):
214
+ """Build image metadata string for impact field"""
215
+ metadata_parts = []
216
+
217
+ image_id = result .get ("id" , "" )
218
+ distro = result .get ("distro" , "" )
219
+
220
+ if image_id :
221
+ metadata_parts .append (f"Image ID: { image_id } " )
222
+ if distro :
223
+ metadata_parts .append (f"Distribution: { distro } " )
130
224
131
- def get_item (vulnerability , test ):
225
+ return "\n " .join (metadata_parts )
226
+
227
+
228
+ def get_item (vulnerability , test , image_metadata = "" ):
132
229
severity = (
133
230
convert_severity (vulnerability ["severity" ])
134
231
if "severity" in vulnerability
@@ -147,6 +244,12 @@ def get_item(vulnerability, test):
147
244
vulnerability .get ("riskFactors" , "No risk factors." )
148
245
)
149
246
247
+ # Build impact field combining severity and image metadata which can change between scans, so we add it to the impact field as the description field is sometimes used for hash code calculation
248
+ impact_parts = [severity ]
249
+ if image_metadata :
250
+ impact_parts .append (image_metadata )
251
+ impact_text = "\n " .join (impact_parts )
252
+
150
253
# create the finding object
151
254
finding = Finding (
152
255
title = vulnerability .get ("id" , "Unknown Vulnerability" )
@@ -166,19 +269,71 @@ def get_item(vulnerability, test):
166
269
references = vulnerability .get ("link" ),
167
270
component_name = vulnerability .get ("packageName" , "" ),
168
271
component_version = vulnerability .get ("packageVersion" , "" ),
169
- false_p = False ,
170
- duplicate = False ,
171
- out_of_scope = False ,
172
- mitigated = None ,
173
272
severity_justification = f"{ vector } (CVSS v3 base score: { cvss } )\n \n { riskFactors } " ,
174
- impact = severity ,
273
+ cvssv3_score = cvss ,
274
+ impact = impact_text ,
175
275
)
176
276
finding .unsaved_vulnerability_ids = [vulnerability ["id" ]] if "id" in vulnerability else None
177
277
finding .description = finding .description .strip ()
178
278
179
279
return finding
180
280
181
281
282
+ def get_compliance_item (compliance , test , image_metadata = "" ):
283
+ """Create a Finding object for compliance issues"""
284
+ severity = (
285
+ convert_severity (compliance ["severity" ])
286
+ if "severity" in compliance
287
+ else "Info"
288
+ )
289
+
290
+ title = compliance .get ("title" , "Unknown Compliance Issue" )
291
+ description = compliance .get ("description" , "No description specified" )
292
+ compliance_id = compliance .get ("id" , "" )
293
+ category = compliance .get ("category" , "" )
294
+ layer_time = compliance .get ("layerTime" , "" )
295
+
296
+ # Build comprehensive description
297
+ desc_parts = [f"<p><strong>Compliance Issue:</strong> { title } </p>" ]
298
+
299
+ if compliance_id :
300
+ desc_parts .append (f"<p><strong>Compliance ID:</strong> { compliance_id } </p>" )
301
+
302
+ if category :
303
+ desc_parts .append (f"<p><strong>Category:</strong> { category } </p>" )
304
+
305
+ desc_parts .append (f"<p><strong>Description:</strong> { description } </p>" )
306
+
307
+ # Build impact field combining severity and image metadata
308
+ impact_parts = [severity ]
309
+ if image_metadata :
310
+ impact_parts .append (image_metadata )
311
+ if layer_time :
312
+ desc_parts .append (f"Layer Time: { layer_time } " )
313
+ impact_text = "\n " .join (impact_parts )
314
+
315
+ # create the finding object for compliance
316
+ finding = Finding (
317
+ title = f"Compliance: { title } " ,
318
+ test = test ,
319
+ severity = severity ,
320
+ description = "" .join (desc_parts ),
321
+ mitigation = "Review and address the compliance issue as described in the description." ,
322
+ severity_justification = f"Compliance severity: { severity } " ,
323
+ impact = impact_text ,
324
+ vuln_id_from_tool = str (compliance_id ) if compliance_id else None ,
325
+ )
326
+ finding .description = finding .description .strip ()
327
+
328
+ # Add compliance-specific tags
329
+ tags = ["compliance" ]
330
+ if category :
331
+ tags .append (category .lower ())
332
+ finding .unsaved_tags = tags
333
+
334
+ return finding
335
+
336
+
182
337
def convert_severity (severity ):
183
338
if severity .lower () == "important" :
184
339
return "High"
0 commit comments