Skip to content

Commit 096bd04

Browse files
authored
Merge pull request #10810 from DefectDojo/master-into-dev/2.37.3-2.38.0-dev
Release: Merge back 2.37.3 into dev from: master-into-dev/2.37.3-2.38.0-dev
2 parents a6efe61 + 2e6b9a4 commit 096bd04

File tree

19 files changed

+728
-41
lines changed

19 files changed

+728
-41
lines changed

Dockerfile.integration-tests-debian

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,13 @@ RUN pip install --no-cache-dir selenium==4.9.0 requests
2525

2626
# Install the latest Google Chrome stable release
2727
WORKDIR /opt/chrome
28+
29+
# TODO: figure out whatever fix is necessary to use Chrome >= 128 and put this back in the RUN below so we stay
30+
# up-to-date
31+
# chrome_url=$(curl https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions-with-downloads.json | jq -r '.channels[] | select(.channel == "Stable") | .downloads.chrome[] | select(.platform == "linux64").url') && \
32+
2833
RUN \
29-
chrome_url=$(curl https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions-with-downloads.json | jq -r '.channels[] | select(.channel == "Stable") | .downloads.chrome[] | select(.platform == "linux64").url') && \
34+
chrome_url="https://storage.googleapis.com/chrome-for-testing-public/127.0.6533.119/linux64/chrome-linux64.zip" && \
3035
wget $chrome_url && \
3136
unzip chrome-linux64.zip && \
3237
rm -rf chrome-linux64.zip && \
@@ -49,8 +54,12 @@ RUN apt-get install -y libxi6 libgconf-2-4 jq libjq1 libonig5 libxkbcommon0 libx
4954

5055
# Installing the latest stable Google Chrome driver release
5156
WORKDIR /opt/chrome-driver
57+
# TODO: figure out whatever fix is necessary to use Chrome >= 128 and put this back in the RUN below so we stay
58+
# up-to-date
59+
# chromedriver_url=$(curl https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions-with-downloads.json | jq -r '.channels[] | select(.channel == "Stable") | .downloads.chromedriver[] | select(.platform == "linux64").url') && \
60+
5261
RUN \
53-
chromedriver_url=$(curl https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions-with-downloads.json | jq -r '.channels[] | select(.channel == "Stable") | .downloads.chromedriver[] | select(.platform == "linux64").url') && \
62+
chromedriver_url="https://storage.googleapis.com/chrome-for-testing-public/127.0.6533.119/linux64/chromedriver-linux64.zip" && \
5463
wget $chromedriver_url && \
5564
unzip -j chromedriver-linux64.zip chromedriver-linux64/chromedriver && \
5665
rm -rf chromedriver-linux64.zip && \

docs/content/en/integrations/api-v2-docs.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,7 @@ For example: :
4747

4848
If you use [an alternative authentication method](../social-authentication/) for users, you may want to disable DefectDojo API tokens because it could bypass your authentication concept. \
4949
Using of DefectDojo API tokens can be disabled by specifying the environment variable `DD_API_TOKENS_ENABLED` to `False`.
50+
Or only `api/v2/api-token-auth/` endpoint can be disabled by setting `DD_API_TOKEN_AUTH_ENDPOINT_ENABLED` to `False`.
5051

5152
## Sample Code
5253

dojo/context_processors.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ def globalize_vars(request):
2525
"SAML2_LOGOUT_URL": settings.SAML2_LOGOUT_URL,
2626
"DOCUMENTATION_URL": settings.DOCUMENTATION_URL,
2727
"API_TOKENS_ENABLED": settings.API_TOKENS_ENABLED,
28+
"API_TOKEN_AUTH_ENDPOINT_ENABLED": settings.API_TOKEN_AUTH_ENDPOINT_ENABLED,
2829
}
2930

3031

dojo/engagement/views.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,7 @@
6868
TypedNoteForm,
6969
UploadThreatForm,
7070
)
71+
from dojo.importers.base_importer import BaseImporter
7172
from dojo.importers.default_importer import DefaultImporter
7273
from dojo.models import (
7374
Check_List,
@@ -922,6 +923,15 @@ def create_engagement(
922923
# Return the engagement
923924
return engagement
924925

926+
def get_importer(
927+
self,
928+
context: dict,
929+
) -> BaseImporter:
930+
"""
931+
Gets the importer to use
932+
"""
933+
return DefaultImporter(**context)
934+
925935
def import_findings(
926936
self,
927937
context: dict,
@@ -930,7 +940,7 @@ def import_findings(
930940
Attempt to import with all the supplied information
931941
"""
932942
try:
933-
importer_client = DefaultImporter(**context)
943+
importer_client = self.get_importer(context)
934944
context["test"], _, finding_count, closed_finding_count, _, _, _ = importer_client.process_scan(
935945
context.pop("scan", None),
936946
)

dojo/importers/default_importer.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ def process_scan(
108108
new_findings = self.determine_process_method(self.parsed_findings, **kwargs)
109109
# Close any old findings in the processed list if the the user specified for that
110110
# to occur in the form that is then passed to the kwargs
111-
closed_findings = self.close_old_findings(self.test.finding_set.values(), **kwargs)
111+
closed_findings = self.close_old_findings(self.test.finding_set.all(), **kwargs)
112112
# Update the timestamps of the test object by looking at the findings imported
113113
self.update_timestamps()
114114
# Update the test meta
@@ -247,11 +247,12 @@ def close_old_findings(
247247
logger.debug("REIMPORT_SCAN: Closing findings no longer present in scan report")
248248
# Close old active findings that are not reported by this scan.
249249
# Refactoring this to only call test.finding_set.values() once.
250+
findings = findings.values()
250251
mitigated_hash_codes = []
251252
new_hash_codes = []
252253
for finding in findings:
253254
new_hash_codes.append(finding["hash_code"])
254-
if getattr(finding, "is_mitigated", None):
255+
if finding.get("is_mitigated", None):
255256
mitigated_hash_codes.append(finding["hash_code"])
256257
for hash_code in new_hash_codes:
257258
if hash_code == finding["hash_code"]:

dojo/importers/default_reimporter.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -147,6 +147,13 @@ def process_scan(
147147
test_import_history,
148148
)
149149

150+
def determine_deduplication_algorithm(self) -> str:
151+
"""
152+
Determines what dedupe algorithm to use for the Test being processed.
153+
:return: A string representing the dedupe algorithm to use.
154+
"""
155+
return self.test.deduplication_algorithm
156+
150157
def process_findings(
151158
self,
152159
parsed_findings: List[Finding],
@@ -160,7 +167,7 @@ def process_findings(
160167
at import time
161168
"""
162169

163-
self.deduplication_algorithm = self.test.deduplication_algorithm
170+
self.deduplication_algorithm = self.determine_deduplication_algorithm()
164171
self.original_items = list(self.test.finding_set.all())
165172
self.new_items = []
166173
self.reactivated_items = []

dojo/models.py

Lines changed: 15 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2640,14 +2640,7 @@ def save(self, dedupe_option=True, rules_option=True, product_grading_option=Tru
26402640
except Exception as ex:
26412641
logger.error("Can't compute cvssv3 score for finding id %i. Invalid cvssv3 vector found: '%s'. Exception: %s", self.id, self.cvssv3, ex)
26422642

2643-
# Finding.save is called once from serializers.py with dedupe_option=False because the finding is not ready yet, for example the endpoints are not built
2644-
# It is then called a second time with dedupe_option defaulted to true; now we can compute the hash_code and run the deduplication
2645-
if dedupe_option:
2646-
if (self.hash_code is not None):
2647-
deduplicationLogger.debug("Hash_code already computed for finding")
2648-
else:
2649-
self.hash_code = self.compute_hash_code()
2650-
deduplicationLogger.debug("Hash_code computed for finding: %s", self.hash_code)
2643+
self.set_hash_code(dedupe_option)
26512644

26522645
if self.pk is None:
26532646
# We enter here during the first call from serializers.py
@@ -3346,6 +3339,20 @@ def inherit_tags(self, potentially_existing_tags):
33463339
def violates_sla(self):
33473340
return (self.sla_expiration_date and self.sla_expiration_date < timezone.now().date())
33483341

3342+
def set_hash_code(self, dedupe_option):
3343+
from dojo.utils import get_custom_method
3344+
if hash_method := get_custom_method("FINDING_HASH_METHOD"):
3345+
hash_method(self, dedupe_option)
3346+
else:
3347+
# Finding.save is called once from serializers.py with dedupe_option=False because the finding is not ready yet, for example the endpoints are not built
3348+
# It is then called a second time with dedupe_option defaulted to true; now we can compute the hash_code and run the deduplication
3349+
if dedupe_option:
3350+
if self.hash_code is not None:
3351+
deduplicationLogger.debug("Hash_code already computed for finding")
3352+
else:
3353+
self.hash_code = self.compute_hash_code()
3354+
deduplicationLogger.debug("Hash_code computed for finding: %s", self.hash_code)
3355+
33493356

33503357
class FindingAdmin(admin.ModelAdmin):
33513358
# For efficiency with large databases, display many-to-many fields with raw
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
7a605674ff68576fef116e62103d11d55f25fb8dc15c87b93e850dde56604639
1+
38096a82c7cdeec6ca9c663c1ec3d6a5692a0e7bbfdea8fd2f05c58f753430d4

dojo/settings/settings.dist.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -282,6 +282,9 @@
282282
# When disabled, existing user tokens will not be removed but it will not be
283283
# possible to create new and it will not be possible to use exising.
284284
DD_API_TOKENS_ENABLED=(bool, True),
285+
# Enable endpoint which allow user to get API token when user+pass is provided
286+
# It is useful to disable when non-local authentication (like SAML, Azure, ...) is in place
287+
DD_API_TOKEN_AUTH_ENDPOINT_ENABLED=(bool, True),
285288
# You can set extra Jira headers by suppling a dictionary in header: value format (pass as env var like "headr_name=value,another_header=anohter_value")
286289
DD_ADDITIONAL_HEADERS=(dict, {}),
287290
# Set fields used by the hashcode generator for deduplication, via en env variable that contains a JSON string
@@ -747,6 +750,8 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param
747750

748751
API_TOKENS_ENABLED = env("DD_API_TOKENS_ENABLED")
749752

753+
API_TOKEN_AUTH_ENDPOINT_ENABLED = env("DD_API_TOKEN_AUTH_ENDPOINT_ENABLED")
754+
750755
REST_FRAMEWORK = {
751756
"DEFAULT_SCHEMA_CLASS": "drf_spectacular.openapi.AutoSchema",
752757
"DEFAULT_AUTHENTICATION_CLASSES": (

dojo/templates/dojo/api_v2_key.html

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,11 @@ <h2> {{ name }}</h2>
1515
<input class="btn btn-primary" type="submit" value="{% trans "Generate New Key" %}"/>
1616
</form>
1717
<hr/>
18+
{% if API_TOKEN_AUTH_ENDPOINT_ENABLED %}
1819
<p>{% trans "Alternatively, you can use /api/v2/api-token-auth/ to get your token. Example:" %}</p>
1920
<pre>
2021
curl -X POST -H 'content-type: application/json' {% if request.is_secure %}https{% else %}http{% endif %}://{{ request.META.HTTP_HOST }}/api/v2/api-token-auth/ -d '{"username": "&lt;YOURUSERNAME&gt;", "password": "&lt;YOURPASSWORD&gt;"}'</pre>
22+
{% endif %}
2123
<p>{% trans "To use your API Key you need to specify an Authorization header. Example:" %}</p>
2224
<pre>
2325
# As a header

0 commit comments

Comments
 (0)