From e25378ca794dfa7c2a40286807e11f586050e162 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 16:43:38 -0600 Subject: [PATCH 01/23] chore(deps): update dependency vite from 6.0.11 to v6.1.0 (docs/package.json) (#11742) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- docs/package-lock.json | 183 ++++++++++++++++++++++------------------- 1 file changed, 99 insertions(+), 84 deletions(-) diff --git a/docs/package-lock.json b/docs/package-lock.json index 7feeee13d8..c0b683670b 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -2147,9 +2147,9 @@ } }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.27.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.27.4.tgz", - "integrity": "sha512-2Y3JT6f5MrQkICUyRVCw4oa0sutfAsgaSsb0Lmmy1Wi2y7X5vT9Euqw4gOsCyy0YfKURBg35nhUKZS4mDcfULw==", + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.34.3.tgz", + "integrity": "sha512-8kq/NjMKkMTGKMPldWihncOl62kgnLYk7cW+/4NCUWfS70/wz4+gQ7rMxMMpZ3dIOP/xw7wKNzIuUnN/H2GfUg==", "cpu": [ "arm" ], @@ -2161,9 +2161,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.27.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.27.4.tgz", - "integrity": "sha512-wzKRQXISyi9UdCVRqEd0H4cMpzvHYt1f/C3CoIjES6cG++RHKhrBj2+29nPF0IB5kpy9MS71vs07fvrNGAl/iA==", + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.34.3.tgz", + "integrity": "sha512-1PqMHiuRochQ6++SDI7SaRDWJKr/NgAlezBi5nOne6Da6IWJo3hK0TdECBDwd92IUDPG4j/bZmWuwOnomNT8wA==", "cpu": [ "arm64" ], @@ -2175,9 +2175,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.27.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.27.4.tgz", - "integrity": "sha512-PlNiRQapift4LNS8DPUHuDX/IdXiLjf8mc5vdEmUR0fF/pyy2qWwzdLjB+iZquGr8LuN4LnUoSEvKRwjSVYz3Q==", + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.34.3.tgz", + "integrity": "sha512-fqbrykX4mGV3DlCDXhF4OaMGcchd2tmLYxVt3On5oOZWVDFfdEoYAV2alzNChl8OzNaeMAGqm1f7gk7eIw/uDg==", "cpu": [ "arm64" ], @@ -2189,9 +2189,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.27.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.27.4.tgz", - "integrity": "sha512-o9bH2dbdgBDJaXWJCDTNDYa171ACUdzpxSZt+u/AAeQ20Nk5x+IhA+zsGmrQtpkLiumRJEYef68gcpn2ooXhSQ==", + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.34.3.tgz", + "integrity": "sha512-8Wxrx/KRvMsTyLTbdrMXcVKfpW51cCNW8x7iQD72xSEbjvhCY3b+w83Bea3nQfysTMR7K28esc+ZFITThXm+1w==", "cpu": [ "x64" ], @@ -2203,9 +2203,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.27.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.27.4.tgz", - "integrity": "sha512-NBI2/i2hT9Q+HySSHTBh52da7isru4aAAo6qC3I7QFVsuhxi2gM8t/EI9EVcILiHLj1vfi+VGGPaLOUENn7pmw==", + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.34.3.tgz", + "integrity": "sha512-lpBmV2qSiELh+ATQPTjQczt5hvbTLsE0c43Rx4bGxN2VpnAZWy77we7OO62LyOSZNY7CzjMoceRPc+Lt4e9J6A==", "cpu": [ "arm64" ], @@ -2217,9 +2217,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.27.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.27.4.tgz", - "integrity": "sha512-wYcC5ycW2zvqtDYrE7deary2P2UFmSh85PUpAx+dwTCO9uw3sgzD6Gv9n5X4vLaQKsrfTSZZ7Z7uynQozPVvWA==", + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.34.3.tgz", + "integrity": "sha512-sNPvBIXpgaYcI6mAeH13GZMXFrrw5mdZVI1M9YQPRG2LpjwL8DSxSIflZoh/B5NEuOi53kxsR/S2GKozK1vDXA==", "cpu": [ "x64" ], @@ -2231,9 +2231,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.27.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.27.4.tgz", - "integrity": "sha512-9OwUnK/xKw6DyRlgx8UizeqRFOfi9mf5TYCw1uolDaJSbUmBxP85DE6T4ouCMoN6pXw8ZoTeZCSEfSaYo+/s1w==", + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.34.3.tgz", + "integrity": "sha512-MW6N3AoC61OfE1VgnN5O1OW0gt8VTbhx9s/ZEPLBM11wEdHjeilPzOxVmmsrx5YmejpGPvez8QwGGvMU+pGxpw==", "cpu": [ "arm" ], @@ -2245,9 +2245,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.27.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.27.4.tgz", - "integrity": "sha512-Vgdo4fpuphS9V24WOV+KwkCVJ72u7idTgQaBoLRD0UxBAWTF9GWurJO9YD9yh00BzbkhpeXtm6na+MvJU7Z73A==", + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.34.3.tgz", + "integrity": "sha512-2SQkhr5xvatYq0/+H6qyW0zvrQz9LM4lxGkpWURLoQX5+yP8MsERh4uWmxFohOvwCP6l/+wgiHZ1qVwLDc7Qmw==", "cpu": [ "arm" ], @@ -2259,9 +2259,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.27.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.27.4.tgz", - "integrity": "sha512-pleyNgyd1kkBkw2kOqlBx+0atfIIkkExOTiifoODo6qKDSpnc6WzUY5RhHdmTdIJXBdSnh6JknnYTtmQyobrVg==", + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.34.3.tgz", + "integrity": "sha512-R3JLYt8YoRwKI5shJsovLpcR6pwIMui/MGG/MmxZ1DYI3iRSKI4qcYrvYgDf4Ss2oCR3RL3F3dYK7uAGQgMIuQ==", "cpu": [ "arm64" ], @@ -2273,9 +2273,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.27.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.27.4.tgz", - "integrity": "sha512-caluiUXvUuVyCHr5DxL8ohaaFFzPGmgmMvwmqAITMpV/Q+tPoaHZ/PWa3t8B2WyoRcIIuu1hkaW5KkeTDNSnMA==", + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.34.3.tgz", + "integrity": "sha512-4XQhG8v/t3S7Rxs7rmFUuM6j09hVrTArzONS3fUZ6oBRSN/ps9IPQjVhp62P0W3KhqJdQADo/MRlYRMdgxr/3w==", "cpu": [ "arm64" ], @@ -2286,10 +2286,24 @@ "linux" ] }, + "node_modules/@rollup/rollup-linux-loongarch64-gnu": { + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.34.3.tgz", + "integrity": "sha512-QlW1jCUZ1LHUIYCAK2FciVw1ptHsxzApYVi05q7bz2A8oNE8QxQ85NhM4arLxkAlcnS42t4avJbSfzSQwbIaKg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.27.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.27.4.tgz", - "integrity": "sha512-FScrpHrO60hARyHh7s1zHE97u0KlT/RECzCKAdmI+LEoC1eDh/RDji9JgFqyO+wPDb86Oa/sXkily1+oi4FzJQ==", + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.34.3.tgz", + "integrity": "sha512-kMbLToizVeCcN69+nnm20Dh0hrRIAjgaaL+Wh0gWZcNt8e542d2FUGtsyuNsHVNNF3gqTJrpzUGIdwMGLEUM7g==", "cpu": [ "ppc64" ], @@ -2301,9 +2315,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.27.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.27.4.tgz", - "integrity": "sha512-qyyprhyGb7+RBfMPeww9FlHwKkCXdKHeGgSqmIXw9VSUtvyFZ6WZRtnxgbuz76FK7LyoN8t/eINRbPUcvXB5fw==", + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.34.3.tgz", + "integrity": "sha512-YgD0DnZ3CHtvXRH8rzjVSxwI0kMTr0RQt3o1N92RwxGdx7YejzbBO0ELlSU48DP96u1gYYVWfUhDRyaGNqJqJg==", "cpu": [ "riscv64" ], @@ -2315,9 +2329,9 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.27.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.27.4.tgz", - "integrity": "sha512-PFz+y2kb6tbh7m3A7nA9++eInGcDVZUACulf/KzDtovvdTizHpZaJty7Gp0lFwSQcrnebHOqxF1MaKZd7psVRg==", + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.34.3.tgz", + "integrity": "sha512-dIOoOz8altjp6UjAi3U9EW99s8nta4gzi52FeI45GlPyrUH4QixUoBMH9VsVjt+9A2RiZBWyjYNHlJ/HmJOBCQ==", "cpu": [ "s390x" ], @@ -2329,9 +2343,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.27.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.27.4.tgz", - "integrity": "sha512-Ni8mMtfo+o/G7DVtweXXV/Ol2TFf63KYjTtoZ5f078AUgJTmaIJnj4JFU7TK/9SVWTaSJGxPi5zMDgK4w+Ez7Q==", + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.34.3.tgz", + "integrity": "sha512-lOyG3aF4FTKrhpzXfMmBXgeKUUXdAWmP2zSNf8HTAXPqZay6QYT26l64hVizBjq+hJx3pl0DTEyvPi9sTA6VGA==", "cpu": [ "x64" ], @@ -2343,9 +2357,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.27.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.27.4.tgz", - "integrity": "sha512-5AeeAF1PB9TUzD+3cROzFTnAJAcVUGLuR8ng0E0WXGkYhp6RD6L+6szYVX+64Rs0r72019KHZS1ka1q+zU/wUw==", + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.34.3.tgz", + "integrity": "sha512-usztyYLu2i+mYzzOjqHZTaRXbUOqw3P6laNUh1zcqxbPH1P2Tz/QdJJCQSnGxCtsRQeuU2bCyraGMtMumC46rw==", "cpu": [ "x64" ], @@ -2357,9 +2371,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.27.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.27.4.tgz", - "integrity": "sha512-yOpVsA4K5qVwu2CaS3hHxluWIK5HQTjNV4tWjQXluMiiiu4pJj4BN98CvxohNCpcjMeTXk/ZMJBRbgRg8HBB6A==", + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.34.3.tgz", + "integrity": "sha512-ojFOKaz/ZyalIrizdBq2vyc2f0kFbJahEznfZlxdB6pF9Do6++i1zS5Gy6QLf8D7/S57MHrmBLur6AeRYeQXSA==", "cpu": [ "arm64" ], @@ -2371,9 +2385,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.27.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.27.4.tgz", - "integrity": "sha512-KtwEJOaHAVJlxV92rNYiG9JQwQAdhBlrjNRp7P9L8Cb4Rer3in+0A+IPhJC9y68WAi9H0sX4AiG2NTsVlmqJeQ==", + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.34.3.tgz", + "integrity": "sha512-K/V97GMbNa+Da9mGcZqmSl+DlJmWfHXTuI9V8oB2evGsQUtszCl67+OxWjBKpeOnYwox9Jpmt/J6VhpeRCYqow==", "cpu": [ "ia32" ], @@ -2385,9 +2399,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.27.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.27.4.tgz", - "integrity": "sha512-3j4jx1TppORdTAoBJRd+/wJRGCPC0ETWkXOecJ6PPZLj6SptXkrXcNqdj0oclbKML6FkQltdz7bBA3rUSirZug==", + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.34.3.tgz", + "integrity": "sha512-CUypcYP31Q8O04myV6NKGzk9GVXslO5EJNfmARNSzLF2A+5rmZUlDJ4et6eoJaZgBT9wrC2p4JZH04Vkic8HdQ==", "cpu": [ "x64" ], @@ -3850,9 +3864,9 @@ } }, "node_modules/postcss": { - "version": "8.4.49", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.49.tgz", - "integrity": "sha512-OCVPnIObs4N29kxTjzLfUryOkvZEq+pf8jTF0lg8E7uETuWHA+v7j3c/xJmiqpX450191LlmZfUKkXxkTry7nA==", + "version": "8.5.1", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.1.tgz", + "integrity": "sha512-6oz2beyjc5VMn/KV1pPw8fliQkhBXrVn1Z3TVyqZxU8kZpzEKhBdmCFqI6ZbmGtamQvQGuU1sgPTk8ZrXDD7jQ==", "funding": [ { "type": "opencollective", @@ -3869,7 +3883,7 @@ ], "license": "MIT", "dependencies": { - "nanoid": "^3.3.7", + "nanoid": "^3.3.8", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" }, @@ -4340,9 +4354,9 @@ } }, "node_modules/rollup": { - "version": "4.27.4", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.27.4.tgz", - "integrity": "sha512-RLKxqHEMjh/RGLsDxAEsaLO3mWgyoU6x9w6n1ikAzet4B3gI2/3yP6PWY2p9QzRTh6MfEIXB3MwsOY0Iv3vNrw==", + "version": "4.34.3", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.34.3.tgz", + "integrity": "sha512-ORCtU0UBJyiAIn9m0llUXJXAswG/68pZptCrqxHG7//Z2DDzAUeyyY5hqf4XrsGlUxscMr9GkQ2QI7KTLqeyPw==", "dev": true, "license": "MIT", "dependencies": { @@ -4356,24 +4370,25 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.27.4", - "@rollup/rollup-android-arm64": "4.27.4", - "@rollup/rollup-darwin-arm64": "4.27.4", - "@rollup/rollup-darwin-x64": "4.27.4", - "@rollup/rollup-freebsd-arm64": "4.27.4", - "@rollup/rollup-freebsd-x64": "4.27.4", - "@rollup/rollup-linux-arm-gnueabihf": "4.27.4", - "@rollup/rollup-linux-arm-musleabihf": "4.27.4", - "@rollup/rollup-linux-arm64-gnu": "4.27.4", - "@rollup/rollup-linux-arm64-musl": "4.27.4", - "@rollup/rollup-linux-powerpc64le-gnu": "4.27.4", - "@rollup/rollup-linux-riscv64-gnu": "4.27.4", - "@rollup/rollup-linux-s390x-gnu": "4.27.4", - "@rollup/rollup-linux-x64-gnu": "4.27.4", - "@rollup/rollup-linux-x64-musl": "4.27.4", - "@rollup/rollup-win32-arm64-msvc": "4.27.4", - "@rollup/rollup-win32-ia32-msvc": "4.27.4", - "@rollup/rollup-win32-x64-msvc": "4.27.4", + "@rollup/rollup-android-arm-eabi": "4.34.3", + "@rollup/rollup-android-arm64": "4.34.3", + "@rollup/rollup-darwin-arm64": "4.34.3", + "@rollup/rollup-darwin-x64": "4.34.3", + "@rollup/rollup-freebsd-arm64": "4.34.3", + "@rollup/rollup-freebsd-x64": "4.34.3", + "@rollup/rollup-linux-arm-gnueabihf": "4.34.3", + "@rollup/rollup-linux-arm-musleabihf": "4.34.3", + "@rollup/rollup-linux-arm64-gnu": "4.34.3", + "@rollup/rollup-linux-arm64-musl": "4.34.3", + "@rollup/rollup-linux-loongarch64-gnu": "4.34.3", + "@rollup/rollup-linux-powerpc64le-gnu": "4.34.3", + "@rollup/rollup-linux-riscv64-gnu": "4.34.3", + "@rollup/rollup-linux-s390x-gnu": "4.34.3", + "@rollup/rollup-linux-x64-gnu": "4.34.3", + "@rollup/rollup-linux-x64-musl": "4.34.3", + "@rollup/rollup-win32-arm64-msvc": "4.34.3", + "@rollup/rollup-win32-ia32-msvc": "4.34.3", + "@rollup/rollup-win32-x64-msvc": "4.34.3", "fsevents": "~2.3.2" } }, @@ -4717,15 +4732,15 @@ "license": "MIT" }, "node_modules/vite": { - "version": "6.0.11", - "resolved": "https://registry.npmjs.org/vite/-/vite-6.0.11.tgz", - "integrity": "sha512-4VL9mQPKoHy4+FE0NnRE/kbY51TOfaknxAjt3fJbGJxhIpBZiqVzlZDEesWWsuREXHwNdAoOFZ9MkPEVXczHwg==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.1.0.tgz", + "integrity": "sha512-RjjMipCKVoR4hVfPY6GQTgveinjNuyLw+qruksLDvA5ktI1150VmcMBKmQaEWJhg/j6Uaf6dNCNA0AfdzUb/hQ==", "dev": true, "license": "MIT", "dependencies": { "esbuild": "^0.24.2", - "postcss": "^8.4.49", - "rollup": "^4.23.0" + "postcss": "^8.5.1", + "rollup": "^4.30.1" }, "bin": { "vite": "bin/vite.js" From 9c592bcf31af8f5dbc937f62e7023f7db2eac487 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 16:43:59 -0600 Subject: [PATCH 02/23] Bump vulners from 2.3.2 to 2.3.3 (#11737) Bumps vulners from 2.3.2 to 2.3.3. --- updated-dependencies: - dependency-name: vulners dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index bf71e35b76..2739c8d7fa 100644 --- a/requirements.txt +++ b/requirements.txt @@ -71,6 +71,6 @@ blackduck==1.1.3 pycurl==7.45.4 # Required for Celery Broker AWS (SQS) support boto3==1.36.12 # Required for Celery Broker AWS (SQS) support netaddr==1.3.0 -vulners==2.3.2 +vulners==2.3.3 fontawesomefree==6.6.0 PyYAML==6.0.2 From 6517bbfb8f545b3edfe92c8d43495bb0e8dfaf81 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 16:44:22 -0600 Subject: [PATCH 03/23] Bump boto3 from 1.36.12 to 1.36.13 (#11736) Bumps [boto3](https://github.com/boto/boto3) from 1.36.12 to 1.36.13. - [Release notes](https://github.com/boto/boto3/releases) - [Commits](https://github.com/boto/boto3/compare/1.36.12...1.36.13) --- updated-dependencies: - dependency-name: boto3 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 2739c8d7fa..86407393b2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -69,7 +69,7 @@ django-ratelimit==4.1.0 argon2-cffi==23.1.0 blackduck==1.1.3 pycurl==7.45.4 # Required for Celery Broker AWS (SQS) support -boto3==1.36.12 # Required for Celery Broker AWS (SQS) support +boto3==1.36.13 # Required for Celery Broker AWS (SQS) support netaddr==1.3.0 vulners==2.3.3 fontawesomefree==6.6.0 From c34332da2bb5ed2cbe191925bbd58df6557b7613 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 16:44:56 -0600 Subject: [PATCH 04/23] chore(deps): update postgres:17.2-alpine docker digest from 17.2 to 17.2-alpine (docker-compose.yml) (#11733) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 3043ed2785..40774b43df 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -105,7 +105,7 @@ services: source: ./docker/extra_settings target: /app/docker/extra_settings postgres: - image: postgres:17.2-alpine@sha256:0bcc5bbbb2aa9c9b4c6505845918c7eb55d783cf5c1f434fac33012579fb149d + image: postgres:17.2-alpine@sha256:7e5df973a74872482e320dcbdeb055e178d6f42de0558b083892c50cda833c96 environment: POSTGRES_DB: ${DD_DATABASE_NAME:-defectdojo} POSTGRES_USER: ${DD_DATABASE_USER:-defectdojo} From 060ff7bb5ec125d56ac5ce24329deea8d257cb37 Mon Sep 17 00:00:00 2001 From: kiblik <5609770+kiblik@users.noreply.github.com> Date: Wed, 5 Feb 2025 23:59:34 +0100 Subject: [PATCH 05/23] Ruff: Add and fix TRY401 (#11644) --- dojo/endpoint/views.py | 2 +- dojo/engagement/views.py | 6 +++--- dojo/finding/views.py | 4 ++-- dojo/finding_group/views.py | 4 ++-- dojo/jira_link/helper.py | 27 ++++++++++++--------------- dojo/jira_link/views.py | 18 ++++++++++-------- dojo/notifications/helper.py | 10 +++++----- dojo/product/views.py | 2 +- dojo/tasks.py | 5 ++--- dojo/test/views.py | 2 +- dojo/tool_config/views.py | 2 +- dojo/tools/api_sonarqube/importer.py | 4 ++-- ruff.toml | 2 +- 13 files changed, 43 insertions(+), 45 deletions(-) diff --git a/dojo/endpoint/views.py b/dojo/endpoint/views.py index 0cf5717be2..034de15dcc 100644 --- a/dojo/endpoint/views.py +++ b/dojo/endpoint/views.py @@ -502,7 +502,7 @@ def import_endpoint_meta(request, pid): try: endpoint_meta_import(file, product, create_endpoints, create_tags, create_dojo_meta, origin="UI", request=request) except Exception as e: - logger.exception(e) + logger.exception("An exception error occurred during the report import") add_error_message_to_response(f"An exception error occurred during the report import:{e}") return HttpResponseRedirect(reverse("endpoint") + "?product=" + pid) diff --git a/dojo/engagement/views.py b/dojo/engagement/views.py index eac899947e..0af8592052 100644 --- a/dojo/engagement/views.py +++ b/dojo/engagement/views.py @@ -928,7 +928,7 @@ def import_findings( closed_finding_count=closed_finding_count, )) except Exception as e: - logger.exception(e) + logger.exception("An exception error occurred during the report import") return f"An exception error occurred during the report import: {e}" return None @@ -1202,10 +1202,10 @@ def add_risk_acceptance(request, eid, fid=None): # we sometimes see a weird exception here, but are unable to reproduce. # we add some logging in case it happens risk_acceptance = form.save() - except Exception as e: + except Exception: logger.debug(vars(request.POST)) logger.error(vars(form)) - logger.exception(e) + logger.exception("Creation of Risk Acc. is not possible") raise # attach note to risk acceptance object now in database diff --git a/dojo/finding/views.py b/dojo/finding/views.py index 119cbe5203..97bb2cf9f0 100644 --- a/dojo/finding/views.py +++ b/dojo/finding/views.py @@ -3291,8 +3291,8 @@ def unlink_jira(request, fid): ) return JsonResponse({"result": "OK"}) - except Exception as e: - logger.exception(e) + except Exception: + logger.exception("Link to JIRA could not be deleted") messages.add_message( request, messages.ERROR, diff --git a/dojo/finding_group/views.py b/dojo/finding_group/views.py index a894766cf5..99e69b455d 100644 --- a/dojo/finding_group/views.py +++ b/dojo/finding_group/views.py @@ -153,8 +153,8 @@ def unlink_jira(request, fgid): extra_tags="alert-success") return JsonResponse({"result": "OK"}) - except Exception as e: - logger.exception(e) + except Exception: + logger.exception("Link to JIRA could not be deleted") messages.add_message( request, messages.ERROR, diff --git a/dojo/jira_link/helper.py b/dojo/jira_link/helper.py index 9fe7064af5..8201a204fc 100644 --- a/dojo/jira_link/helper.py +++ b/dojo/jira_link/helper.py @@ -408,7 +408,7 @@ def get_jira_connection_raw(jira_server, jira_username, jira_password): return jira except JIRAError as e: - logger.exception(e) + logger.exception("logged in to JIRA %s unsuccessful", jira_server) error_message = e.text if hasattr(e, "text") else e.message if hasattr(e, "message") else e.args[0] @@ -421,7 +421,7 @@ def get_jira_connection_raw(jira_server, jira_username, jira_password): raise except requests.exceptions.RequestException as re: - logger.exception(re) + logger.exception("Unknown JIRA Connection Error") error_message = re.text if hasattr(re, "text") else re.message if hasattr(re, "message") else re.args[0] log_jira_generic_alert("Unknown JIRA Connection Error", re) @@ -461,7 +461,7 @@ def jira_transition(jira, issue, transition_id): return True except JIRAError as jira_error: logger.debug("error transitioning jira issue " + issue.key + " " + str(jira_error)) - logger.exception(jira_error) + logger.exception("Error with Jira transation issue") alert_text = f"JiraError HTTP {jira_error.status_code}" if jira_error.url: alert_text += f" url: {jira_error.url}" @@ -692,8 +692,7 @@ def add_issues_to_epic(jira, obj, epic_id, issue_keys, *, ignore_epics=True): epic = jira.issue(epic_id) issue.update(parent={"key": epic.key}) except JIRAError as e: - logger.error("error adding issues %s to epic %s for %s", issue_keys, epic_id, obj.id) - logger.exception(e) + logger.exception("error adding issues %s to epic %s for %s", issue_keys, epic_id, obj.id) log_jira_alert(e.text, obj) return False @@ -1055,8 +1054,7 @@ def get_jira_issue_from_jira(find): return jira.issue(j_issue.jira_id) except JIRAError as e: - logger.exception(e) - logger.error("jira_meta for project: %s and url: %s meta: %s", jira_project.project_key, jira_project.jira_instance.url, json.dumps(meta, indent=4)) # this is None safe + logger.exception("jira_meta for project: %s and url: %s meta: %s", jira_project.project_key, jira_project.jira_instance.url, json.dumps(meta, indent=4)) # this is None safe log_jira_alert(e.text, find) return None @@ -1213,7 +1211,7 @@ def jira_attachment(finding, jira, issue, file, jira_filename=None): jira.add_attachment(issue=issue, attachment=f) return True except JIRAError as e: - logger.exception(e) + logger.exception("Unable to add attachment") log_jira_alert("Attachment: " + e.text, finding) return False return None @@ -1267,7 +1265,7 @@ def close_epic(eng, push_to_jira, **kwargs): return False return True except JIRAError as e: - logger.exception(e) + logger.exception("Jira Engagement/Epic Close Error") log_jira_generic_alert("Jira Engagement/Epic Close Error", str(e)) return False return None @@ -1308,7 +1306,7 @@ def update_epic(engagement, **kwargs): issue.update(**jira_issue_update_kwargs) return True except JIRAError as e: - logger.exception(e) + logger.exception("Jira Engagement/Epic Update Error") log_jira_generic_alert("Jira Engagement/Epic Update Error", str(e)) return False else: @@ -1368,12 +1366,12 @@ def add_epic(engagement, **kwargs): # but it's just a non-existent project (or maybe a project for which the account has no create permission?) # # {"errorMessages":[],"errors":{"project":"project is required"}} - logger.exception(e) error = str(e) message = "" if "customfield" in error: message = "The 'Epic name id' in your DefectDojo Jira Configuration does not appear to be correct. Please visit, " + jira_instance.url + \ "/rest/api/2/field and search for Epic Name. Copy the number out of cf[number] and place in your DefectDojo settings for Jira and try again. For example, if your results are cf[100001] then copy 100001 and place it in 'Epic name id'. (Your Epic Id will be different.) \n\n" + logger.exception(message) log_jira_generic_alert("Jira Engagement/Epic Creation Error", message + error) @@ -1390,8 +1388,7 @@ def jira_get_issue(jira_project, issue_key): return jira.issue(issue_key) except JIRAError as jira_error: - logger.debug("error retrieving jira issue " + issue_key + " " + str(jira_error)) - logger.exception(jira_error) + logger.exception("error retrieving jira issue %s", issue_key) log_jira_generic_alert("error retrieving jira issue " + issue_key, str(jira_error)) return None @@ -1597,9 +1594,9 @@ def process_jira_project_form(request, instance=None, target=None, product=None, extra_tags="alert-success") error = False logger.debug("stored JIRA_Project successfully") - except Exception as e: + except Exception: error = True - logger.exception(e) + logger.exception("Unable to store Jira project") else: logger.debug(jform.errors) error = True diff --git a/dojo/jira_link/views.py b/dojo/jira_link/views.py index bb4d4ce314..0275f9976e 100644 --- a/dojo/jira_link/views.py +++ b/dojo/jira_link/views.py @@ -316,8 +316,8 @@ def post(self, request): try: jira = jira_helper.get_jira_connection_raw(jira_server, jira_username, jira_password) - except Exception as e: - logger.exception(e) # already logged in jira_helper + except Exception: + logger.exception("Unable to authenticate. Please check credentials.") # already logged in jira_helper messages.add_message( request, messages.ERROR, @@ -337,24 +337,26 @@ def post(self, request): open_key = open_key or int(node["id"]) if node["to"]["statusCategory"]["name"] == "Done": close_key = close_key or int(node["id"]) - except Exception as e: - logger.exception(e) # already logged in jira_helper + except Exception: + msg = "Unable to find Open/Close ID's (invalid issue key specified?). They will need to be found manually" + logger.exception(msg) # already logged in jira_helper messages.add_message( request, messages.ERROR, - "Unable to find Open/Close ID's (invalid issue key specified?). They will need to be found manually", + msg, extra_tags="alert-danger") fallback_form = self.get_fallback_form_class()(request.POST, instance=JIRA_Instance()) return render(request, self.get_fallback_template(), {"jform": fallback_form}) # Get the epic id name try: epic_name = get_custom_field(jira, "Epic Name") - except Exception as e: - logger.exception(e) # already logged in jira_helper + except Exception: + msg = "Unable to find Epic Name. It will need to be found manually" + logger.exception(msg) # already logged in jira_helper messages.add_message( request, messages.ERROR, - "Unable to find Epic Name. It will need to be found manually", + msg, extra_tags="alert-danger") fallback_form = self.get_fallback_form_class()(request.POST, instance=JIRA_Instance()) return render(request, self.get_fallback_template(), {"jform": fallback_form}) diff --git a/dojo/notifications/helper.py b/dojo/notifications/helper.py index f6f21e1389..eaf1989ba6 100644 --- a/dojo/notifications/helper.py +++ b/dojo/notifications/helper.py @@ -242,7 +242,7 @@ def send_slack_notification( ) except Exception as exception: - logger.exception(exception) + logger.exception("Unable to send Slack notification") self._log_alert( exception, "Slack Notification", @@ -348,7 +348,7 @@ def send_msteams_notification( "Webhook URL for Microsoft Teams not configured: skipping system notification", ) except Exception as exception: - logger.exception(exception) + logger.exception("Unable to send Microsoft Teams Notification") self._log_alert( exception, "Microsoft Teams Notification", @@ -397,7 +397,7 @@ def send_mail_notification( email.send(fail_silently=False) except Exception as exception: - logger.exception(exception) + logger.exception("Unable to send Email Notification") self._log_alert( exception, "Email Notification", @@ -460,7 +460,7 @@ def send_webhooks_notification( except Exception as exception: error = self.ERROR_PERMANENT endpoint.note = f"Exception: {exception}"[:1000] - logger.exception(exception) + logger.exception("Unable to send Webhooks Notification") self._log_alert(exception, "Webhooks Notification") now = get_current_datetime() @@ -601,7 +601,7 @@ def send_alert_notification( alert.clean_fields(exclude=["url"]) alert.save() except Exception as exception: - logger.exception(exception) + logger.exception("Unable to create Alert Notification") self._log_alert( exception, "Alert Notification", diff --git a/dojo/product/views.py b/dojo/product/views.py index 4f0bba81d7..57ab55000f 100644 --- a/dojo/product/views.py +++ b/dojo/product/views.py @@ -1757,7 +1757,7 @@ def add_api_scan_configuration(request, pid): return HttpResponseRedirect(reverse("add_api_scan_configuration", args=(pid,))) return HttpResponseRedirect(reverse("view_api_scan_configurations", args=(pid,))) except Exception as e: - logger.exception(e) + logger.exception("Unable to add API Scan Configuration") messages.add_message(request, messages.ERROR, str(e), diff --git a/dojo/tasks.py b/dojo/tasks.py index 3257115682..7416cb58f1 100644 --- a/dojo/tasks.py +++ b/dojo/tasks.py @@ -175,9 +175,8 @@ def async_sla_compute_and_notify_task(*args, **kwargs): system_settings = System_Settings.objects.get() if system_settings.enable_finding_sla: sla_compute_and_notify(*args, **kwargs) - except Exception as e: - logger.exception(e) - logger.error(f"An unexpected error was thrown calling the SLA code: {e}") + except Exception: + logger.exception("An unexpected error was thrown calling the SLA code") @app.task diff --git a/dojo/test/views.py b/dojo/test/views.py index 2ea9b24905..4169b66c2c 100644 --- a/dojo/test/views.py +++ b/dojo/test/views.py @@ -1002,7 +1002,7 @@ def reimport_findings( untouched_finding_count=untouched_finding_count, )) except Exception as e: - logger.exception(e) + logger.exception("An exception error occurred during the report import") return f"An exception error occurred during the report import: {e}" return None diff --git a/dojo/tool_config/views.py b/dojo/tool_config/views.py index c6c514d09d..6ffbe94067 100644 --- a/dojo/tool_config/views.py +++ b/dojo/tool_config/views.py @@ -36,7 +36,7 @@ def new_tool_config(request): extra_tags="alert-success") return HttpResponseRedirect(reverse("tool_config")) except Exception as e: - logger.exception(e) + logger.exception("Unable to connect to API") messages.add_message(request, messages.ERROR, str(e), diff --git a/dojo/tools/api_sonarqube/importer.py b/dojo/tools/api_sonarqube/importer.py index caafce26f7..a30e124045 100644 --- a/dojo/tools/api_sonarqube/importer.py +++ b/dojo/tools/api_sonarqube/importer.py @@ -203,7 +203,7 @@ def import_issues(self, test): items.append(find) except Exception as e: - logger.exception(e) + logger.exception("SonarQube API import issue") create_notification( event="sonarqube_failed", title="SonarQube API import issue", @@ -326,7 +326,7 @@ def import_hotspots(self, test): return items except Exception as e: - logger.exception(e) + logger.exception("SonarQube API import issue") create_notification( event="sonarqube_failed", title="SonarQube API import issue", diff --git a/ruff.toml b/ruff.toml index f1ed4d4cc5..8d4303652f 100644 --- a/ruff.toml +++ b/ruff.toml @@ -77,7 +77,7 @@ select = [ "PLE", "PLR01", "PLR0203", "PLR0206", "PLR0915", "PLR1716", "PLR172", "PLR1733", "PLR1736", "PLW0120", "PLW0129", "PLW013", "PLW017", "PLW02", "PLW04", "PLW07", "PLW1", "PLW2", "PLW3", - "TRY003", "TRY004", "TRY2", + "TRY003", "TRY004", "TRY2", "TRY401", "FLY", "NPY", "FAST", From e7d3d0584bdaef83de38f953289f02a2620fd255 Mon Sep 17 00:00:00 2001 From: kiblik <5609770+kiblik@users.noreply.github.com> Date: Thu, 6 Feb 2025 00:12:51 +0100 Subject: [PATCH 06/23] feat(db-checker): Extension of "db reachable" (#11651) * fix(exit in bash): Fix handling exit in everywhere * feat(db-checker): Extension of "db reachable" --- .github/workflows/k8s-tests.yml | 1 + docker/entrypoint-celery-beat.sh | 5 ++++- docker/entrypoint-celery-worker.sh | 4 +++- docker/entrypoint-initializer.sh | 6 ++++-- docker/entrypoint-integration-tests.sh | 2 ++ docker/entrypoint-nginx.sh | 2 ++ docker/entrypoint-unit-tests.sh | 3 +-- docker/entrypoint-uwsgi-dev.sh | 5 +++++ docker/entrypoint-uwsgi.sh | 8 +++++++- docker/reach_database.sh | 23 +++++++++++++++++------ docker/unit-tests.sh | 1 + 11 files changed, 47 insertions(+), 13 deletions(-) diff --git a/.github/workflows/k8s-tests.yml b/.github/workflows/k8s-tests.yml index 9dec6f1189..df05cacf51 100644 --- a/.github/workflows/k8s-tests.yml +++ b/.github/workflows/k8s-tests.yml @@ -85,6 +85,7 @@ jobs: ./helm/defectdojo \ --set django.ingress.enabled=true \ --set imagePullPolicy=Never \ + --set initializer.keepSeconds="-1" \ ${{ env[matrix.databases] }} \ ${{ env[matrix.brokers] }} \ --set createSecret=true \ diff --git a/docker/entrypoint-celery-beat.sh b/docker/entrypoint-celery-beat.sh index 2404ec473f..a185493fe9 100755 --- a/docker/entrypoint-celery-beat.sh +++ b/docker/entrypoint-celery-beat.sh @@ -1,5 +1,8 @@ #!/bin/bash +set -e # needed to handle "exit" correctly + +. /secret-file-loader.sh . /reach_database.sh umask 0002 @@ -7,7 +10,7 @@ umask 0002 id # Allow for bind-mount multiple settings.py overrides -FILES=$(ls /app/docker/extra_settings/* 2>/dev/null) +FILES=$(ls /app/docker/extra_settings/* 2>/dev/null || true) NUM_FILES=$(echo "$FILES" | wc -w) if [ "$NUM_FILES" -gt 0 ]; then COMMA_LIST=$(echo "$FILES" | tr -s '[:blank:]' ', ') diff --git a/docker/entrypoint-celery-worker.sh b/docker/entrypoint-celery-worker.sh index e02ec177f5..178cc3a887 100755 --- a/docker/entrypoint-celery-worker.sh +++ b/docker/entrypoint-celery-worker.sh @@ -3,11 +3,13 @@ umask 0002 id +set -e # needed to handle "exit" correctly + . /secret-file-loader.sh . /reach_database.sh # Allow for bind-mount multiple settings.py overrides -FILES=$(ls /app/docker/extra_settings/* 2>/dev/null) +FILES=$(ls /app/docker/extra_settings/* 2>/dev/null || true) NUM_FILES=$(echo "$FILES" | wc -w) if [ "$NUM_FILES" -gt 0 ]; then COMMA_LIST=$(echo "$FILES" | tr -s '[:blank:]' ', ') diff --git a/docker/entrypoint-initializer.sh b/docker/entrypoint-initializer.sh index 83bf97f364..c238bc9b43 100755 --- a/docker/entrypoint-initializer.sh +++ b/docker/entrypoint-initializer.sh @@ -1,5 +1,7 @@ #!/bin/bash +set -e # needed to handle "exit" correctly + . /secret-file-loader.sh . /reach_database.sh @@ -40,7 +42,7 @@ fi } # Allow for bind-mount multiple settings.py overrides -FILES=$(ls /app/docker/extra_settings/* 2>/dev/null) +FILES=$(ls /app/docker/extra_settings/* 2>/dev/null || true) NUM_FILES=$(echo "$FILES" | wc -w) if [ "$NUM_FILES" -gt 0 ]; then COMMA_LIST=$(echo "$FILES" | tr -s '[:blank:]' ', ') @@ -127,7 +129,7 @@ echo "Migrating" python3 manage.py migrate echo "Admin user: ${DD_ADMIN_USER}" -ADMIN_EXISTS=$(echo "SELECT * from auth_user;" | python manage.py dbshell | grep "${DD_ADMIN_USER}") +ADMIN_EXISTS=$(echo "SELECT * from auth_user;" | python manage.py dbshell | grep "${DD_ADMIN_USER}" || true) # Abort if the admin user already exists, instead of giving a new fake password that won't work if [ -n "$ADMIN_EXISTS" ] then diff --git a/docker/entrypoint-integration-tests.sh b/docker/entrypoint-integration-tests.sh index 0044d0b5b9..075d419d89 100755 --- a/docker/entrypoint-integration-tests.sh +++ b/docker/entrypoint-integration-tests.sh @@ -1,5 +1,7 @@ #!/bin/bash +set -e # needed to handle "exit" correctly + . /secret-file-loader.sh echo "Testing DefectDojo Service" diff --git a/docker/entrypoint-nginx.sh b/docker/entrypoint-nginx.sh index c1e542052f..375aba6c04 100755 --- a/docker/entrypoint-nginx.sh +++ b/docker/entrypoint-nginx.sh @@ -1,5 +1,7 @@ #!/bin/sh +set -e # needed to handle "exit" correctly + umask 0002 if [ "${GENERATE_TLS_CERTIFICATE}" = true ]; then openssl req \ diff --git a/docker/entrypoint-unit-tests.sh b/docker/entrypoint-unit-tests.sh index 8b6ba002ca..da2984fd85 100755 --- a/docker/entrypoint-unit-tests.sh +++ b/docker/entrypoint-unit-tests.sh @@ -3,10 +3,9 @@ # - Fail if migrations are not created # - Exit container after running tests to allow exit code to propagate as test result # set -x -# set -e +set -e # set -v - . /secret-file-loader.sh . /reach_database.sh diff --git a/docker/entrypoint-uwsgi-dev.sh b/docker/entrypoint-uwsgi-dev.sh index cb6aca58c5..ee126305ac 100755 --- a/docker/entrypoint-uwsgi-dev.sh +++ b/docker/entrypoint-uwsgi-dev.sh @@ -1,7 +1,12 @@ #!/bin/sh +set -e # needed to handle "exit" correctly + . /secret-file-loader.sh +. /reach_database.sh +wait_for_database_to_be_reachable +echo cd /app || exit diff --git a/docker/entrypoint-uwsgi.sh b/docker/entrypoint-uwsgi.sh index 333fe9c4f6..343aaf12fe 100755 --- a/docker/entrypoint-uwsgi.sh +++ b/docker/entrypoint-uwsgi.sh @@ -1,9 +1,12 @@ #!/bin/sh +set -e # needed to handle "exit" correctly + . /secret-file-loader.sh +. /reach_database.sh # Allow for bind-mount multiple settings.py overrides -FILES=$(ls /app/docker/extra_settings/* 2>/dev/null) +FILES=$(ls /app/docker/extra_settings/* 2>/dev/null || true) NUM_FILES=$(echo "$FILES" | wc -w) if [ "$NUM_FILES" -gt 0 ]; then COMMA_LIST=$(echo "$FILES" | tr -s '[:blank:]' ', ') @@ -15,6 +18,9 @@ if [ "$NUM_FILES" -gt 0 ]; then rm -f /app/dojo/settings/README.md fi +wait_for_database_to_be_reachable +echo + umask 0002 # do the check with Django stack diff --git a/docker/reach_database.sh b/docker/reach_database.sh index 819f69282e..536e2c9f09 100644 --- a/docker/reach_database.sh +++ b/docker/reach_database.sh @@ -6,11 +6,22 @@ wait_for_database_to_be_reachable() { DD_DATABASE_READINESS_TIMEOUT=${DD_DATABASE_READINESS_TIMEOUT:-30} until echo "select 1;" | python3 manage.py dbshell > /dev/null do - echo -n "." - failure_count=$((failure_count + 1)) - sleep 1 - if [ $DD_DATABASE_READINESS_TIMEOUT = $failure_count ]; then - exit 1 - fi + echo -n "." + failure_count=$((failure_count + 1)) + sleep 1 + if [ $DD_DATABASE_READINESS_TIMEOUT = $failure_count ]; then + exit 1 + fi done + cat < Date: Thu, 6 Feb 2025 04:03:36 +0100 Subject: [PATCH 07/23] fix(dockerfile): rename django stage in alpine (#11654) --- Dockerfile.django-alpine | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile.django-alpine b/Dockerfile.django-alpine index cfef58fa32..cb1d832998 100644 --- a/Dockerfile.django-alpine +++ b/Dockerfile.django-alpine @@ -31,7 +31,7 @@ COPY requirements.txt ./ # https://github.com/unbit/uwsgi/issues/1318#issuecomment-542238096 RUN CPUCOUNT=1 pip3 wheel --wheel-dir=/tmp/wheels -r ./requirements.txt -FROM base AS django-alpine +FROM base AS django WORKDIR /app ARG uid=1001 ARG gid=1337 @@ -135,5 +135,5 @@ ENV \ DD_UWSGI_NUM_OF_THREADS="2" ENTRYPOINT ["/entrypoint-uwsgi.sh"] -FROM django-alpine AS django-unittests +FROM django AS django-unittests COPY unittests/ ./unittests/ From 45d9dfe7a816e0203f725b369b6c4da6277c2032 Mon Sep 17 00:00:00 2001 From: kiblik <5609770+kiblik@users.noreply.github.com> Date: Thu, 6 Feb 2025 04:06:31 +0100 Subject: [PATCH 08/23] feat(k8s-test): Try login and obtain API token (#11656) * feat(k8s-test): Try login and obtain API token * Make it more readable --- .github/workflows/k8s-tests.yml | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/.github/workflows/k8s-tests.yml b/.github/workflows/k8s-tests.yml index df05cacf51..1ebe70382f 100644 --- a/.github/workflows/k8s-tests.yml +++ b/.github/workflows/k8s-tests.yml @@ -125,10 +125,15 @@ jobs: RETRY=0 while : do + DJANGO_IP=$(kubectl get svc defectdojo-django -o jsonpath='{.spec.clusterIP}') OUT=$(kubectl run curl --quiet=true --image=curlimages/curl:7.73.0 \ --overrides='{ "apiVersion": "v1" }' \ - --restart=Never -i --rm -- -s -m 20 -I --header "Host: $DD_HOSTNAME" http://`kubectl get service defectdojo-django -o json \ - | jq -r '.spec.clusterIP'`/login?next=/) + --restart=Never -i --rm -- \ + --silent \ + --max-time 20 \ + --head \ + --header "Host: $DD_HOSTNAME" \ + http://$DJANGO_IP/login?next=/) echo $OUT CR=`echo $OUT | egrep "^HTTP" | cut -d' ' -f2` echo $CR @@ -149,6 +154,26 @@ jobs: break fi done + ADMIN_PASS=$(kubectl get secret/defectdojo -o jsonpath='{.data.DD_ADMIN_PASSWORD}' | base64 -d) + echo "Simple API check" + DJANGO_IP=$(kubectl get svc defectdojo-django -o jsonpath='{.spec.clusterIP}') + CR=$(kubectl run curl --quiet=true --image=curlimages/curl:7.73.0 \ + --overrides='{ "apiVersion": "v1" }' \ + --restart=Never -i --rm -- \ + --silent \ + --max-time 20 \ + --header "Host: $DD_HOSTNAME" \ + --data-raw "username=admin&password=$ADMIN_PASS" \ + --output /dev/null \ + --write-out "%{http_code}\n" \ + http://$DJANGO_IP/api/v2/api-token-auth/) + echo $CR + if [[ $CR -ne 200 ]]; then + echo "ERROR: login is not possible; got HTTP code $CR" + exit 1 + else + echo "Result received" + fi echo "Final Check of components" errors=`kubectl get pods | grep Error | awk '{print $1}'` if [[ ! -z $errors ]]; then From 2e1042bcb4f96a9c7108f2b4db92c0c2ebe37ee3 Mon Sep 17 00:00:00 2001 From: Jaja <16231488+littlesvensson@users.noreply.github.com> Date: Thu, 6 Feb 2025 04:07:35 +0100 Subject: [PATCH 09/23] Accessibility calendar switch buttons (#11661) * Aria labels for calendar navigation * Included trans func * First letter capital to follow other text examples --- dojo/locale/en/LC_MESSAGES/django.po | 20 ++++++++++++++++++++ dojo/templates/dojo/calendar.html | 11 ++++++++++- 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/dojo/locale/en/LC_MESSAGES/django.po b/dojo/locale/en/LC_MESSAGES/django.po index 427ee4d505..d294e02631 100644 --- a/dojo/locale/en/LC_MESSAGES/django.po +++ b/dojo/locale/en/LC_MESSAGES/django.po @@ -4964,3 +4964,23 @@ msgstr "" #: dojo/utils.py msgid "Home" msgstr "" + +#: dojo/templates/dojo/calendar.html +msgid "Previous month" +msgstr "" + +#: dojo/templates/dojo/calendar.html +msgid "Next month" +msgstr "" + +#: dojo/templates/dojo/calendar.html +msgid "Month preview" +msgstr "" + +#: dojo/templates/dojo/calendar.html +msgid "Week preview" +msgstr "" + +#: dojo/templates/dojo/calendar.html +msgid "Day preview" +msgstr "" diff --git a/dojo/templates/dojo/calendar.html b/dojo/templates/dojo/calendar.html index d2d2ae84d1..f7d96cd058 100644 --- a/dojo/templates/dojo/calendar.html +++ b/dojo/templates/dojo/calendar.html @@ -1,5 +1,6 @@ {% extends 'base.html' %} {% load static %} +{% load i18n %} {% block content %} {{ block.super }} @@ -75,7 +76,15 @@ }, {% endfor %} {% endif %} - ] + ], + viewRender: function(view, element) { + // Updating aria-label attributes calendar switches after the view is rendered + $('.fc-prev-button').attr('aria-label', '{% trans "Previous month" %}'); + $('.fc-next-button').attr('aria-label', '{% trans "Next month" %}'); + $('.fc-month-button').attr('aria-label', '{% trans "Month preview" %}'); + $('.fc-basicWeek-button').attr('aria-label', '{% trans "Week preview" %}'); + $('.fc-basicDay-button').attr('aria-label', '{% trans "Day preview" %}'); + } }); }); From 7e50f8fb2700a9cc833d61c11beeb30251e78799 Mon Sep 17 00:00:00 2001 From: valentijnscholten Date: Thu, 6 Feb 2025 04:09:04 +0100 Subject: [PATCH 10/23] make alpine builds work with docker compose (#11669) * make Dockerfile.django-alpine work with docker compose * Update NodeJS release keys ins Dockerfile.nginx-alpine --- Dockerfile.nginx-alpine | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/Dockerfile.nginx-alpine b/Dockerfile.nginx-alpine index 96f32eaa3b..e1b839e48f 100644 --- a/Dockerfile.nginx-alpine +++ b/Dockerfile.nginx-alpine @@ -70,15 +70,14 @@ RUN addgroup -g 1000 node \ python3 \ # gpg keys listed at https://github.com/nodejs/node#release-keys && for key in \ - 4ED778F539E3634C779C87C6D7062848A1AB005C \ - 141F07595B7B3FFE74309A937405533BE57C7D57 \ - 74F12602B6F1C4E913FAA37AD3A89613643B6201 \ - 61FC681DFB92A079F1685E77973F295594EC4689 \ - 8FCCA13FEF1D0C2E91008E09770F7A9A5AE15600 \ - C4F0DFFF4E8C1A8236409D08E73BC641CC11F4C8 \ - 890C08DB8579162FEE0DF9DB8BEAB4DFCF555EF4 \ - C82FA3AE1CBEDC6BE46B9360C43CEC45C17AB93C \ - 108F52B48DB57BB0CC439B2997B01419BD92F80A \ + C0D6248439F1D5604AAFFB4021D900FFDB233756 \ + DD792F5973C6DE52C432CBDAC77ABFA00DDBF2B7 \ + CC68F5A3106FF448322E48ED27F5E38D5B0A215F \ + 8FCCA13FEF1D0C2E91008E09770F7A9A5AE15600 \ + 890C08DB8579162FEE0DF9DB8BEAB4DFCF555EF4 \ + C82FA3AE1CBEDC6BE46B9360C43CEC45C17AB93C \ + 108F52B48DB57BB0CC439B2997B01419BD92F80A \ + A363A499291CBBC940DD62E41F10027AF002F8B0 \ ; do \ gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$key" || \ gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \ From d5573b1aec9049a7a5e2a4b78f136a0c3b4559da Mon Sep 17 00:00:00 2001 From: kiblik <5609770+kiblik@users.noreply.github.com> Date: Thu, 6 Feb 2025 04:10:15 +0100 Subject: [PATCH 11/23] feat(gha): Update and pin "peter-evans/create-pull-request" (#11674) --- .github/workflows/update-sample-data.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/update-sample-data.yml b/.github/workflows/update-sample-data.yml index 6c20a90066..b17dbf89f4 100644 --- a/.github/workflows/update-sample-data.yml +++ b/.github/workflows/update-sample-data.yml @@ -43,7 +43,7 @@ jobs: git push --set-upstream origin $(git rev-parse --abbrev-ref HEAD) - name: Create Pull Request - uses: peter-evans/create-pull-request@v5 + uses: peter-evans/create-pull-request@67ccf781d68cd99b580ae25a5c18a1cc84ffff1f # v7.0.6 with: token: ${{ secrets.GITHUB_TOKEN }} commit-message: "Update sample data" From 818df37ad44784c43d0efc2d80dfb3cf82dfae88 Mon Sep 17 00:00:00 2001 From: kiblik <5609770+kiblik@users.noreply.github.com> Date: Thu, 6 Feb 2025 04:13:30 +0100 Subject: [PATCH 12/23] feat(gha): Pin and update actions/checkout (#11675) --- .github/workflows/update-sample-data.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/update-sample-data.yml b/.github/workflows/update-sample-data.yml index b17dbf89f4..1bf7b86cba 100644 --- a/.github/workflows/update-sample-data.yml +++ b/.github/workflows/update-sample-data.yml @@ -16,7 +16,7 @@ jobs: steps: # Checkout the repository - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: ${{ github.ref_name || 'dev'}} From f13769f13854860ddd38a44fb9fd088ffa9dd4d6 Mon Sep 17 00:00:00 2001 From: kiblik <5609770+kiblik@users.noreply.github.com> Date: Thu, 6 Feb 2025 04:14:42 +0100 Subject: [PATCH 13/23] feat(db-migration-checker): Add successful message (#11685) --- helm/defectdojo/templates/_helpers.tpl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/helm/defectdojo/templates/_helpers.tpl b/helm/defectdojo/templates/_helpers.tpl index c3e0026c2e..848f961407 100644 --- a/helm/defectdojo/templates/_helpers.tpl +++ b/helm/defectdojo/templates/_helpers.tpl @@ -140,7 +140,7 @@ Create chart name and version as used by the chart label. command: - sh - -c - - while ! /app/manage.py migrate --check; do echo "Database is not migrated to the latest state yet"; sleep 5; done; + - while ! /app/manage.py migrate --check; do echo "Database is not migrated to the latest state yet"; sleep 5; done; echo "Database is migrated to the latest state"; image: '{{ template "django.uwsgi.repository" . }}:{{ .Values.tag }}' imagePullPolicy: {{ .Values.imagePullPolicy }} {{- if .Values.securityContext.enabled }} From 4306a58fa922fbc2310604886add13d78fe78ba7 Mon Sep 17 00:00:00 2001 From: kiblik <5609770+kiblik@users.noreply.github.com> Date: Thu, 6 Feb 2025 18:12:48 +0100 Subject: [PATCH 14/23] Ruff: Add and fix PLW0127 (#11642) --- dojo/tools/dsop/parser.py | 1 - dojo/tools/govulncheck/parser.py | 2 +- dojo/tools/mend/parser.py | 2 -- dojo/tools/npm_audit_7_plus/parser.py | 1 - ruff.toml | 2 +- 5 files changed, 2 insertions(+), 6 deletions(-) diff --git a/dojo/tools/dsop/parser.py b/dojo/tools/dsop/parser.py index bf729cb006..020c598f5e 100644 --- a/dojo/tools/dsop/parser.py +++ b/dojo/tools/dsop/parser.py @@ -224,7 +224,6 @@ def __parse_anchore_compliance(self, test, items, sheet): severity = "Critical" else: severity = "Info" - severity = severity mitigation = "To be investigated" description = "Gate: {} (Trigger: {}): {}".format( row[headers["gate"]], diff --git a/dojo/tools/govulncheck/parser.py b/dojo/tools/govulncheck/parser.py index f8764cca31..0a3c2b870b 100644 --- a/dojo/tools/govulncheck/parser.py +++ b/dojo/tools/govulncheck/parser.py @@ -50,7 +50,7 @@ def get_finding_trace_info(self, data, osv_id): version = trace.get("version", "Unknown version") package = trace.get("module", "Unknown package") function = trace.get("function", "Unknown function") - filename = filename = trace.get("position", {}).get("filename", "Unknown filename") + filename = trace.get("position", {}).get("filename", "Unknown filename") line = trace.get("position", {}).get("line", "Unknown line") trace_info_str = f"\tModule: {module}, Version: {version}, Package: {package}, Function: {function}, File: {filename}, Line: {line}" trace_info_strs.append(trace_info_str) diff --git a/dojo/tools/mend/parser.py b/dojo/tools/mend/parser.py index cc18bf3175..76dba35887 100644 --- a/dojo/tools/mend/parser.py +++ b/dojo/tools/mend/parser.py @@ -183,8 +183,6 @@ def _build_common_output(node, lib_name=None): locations = truncated_locations locations.append("...") # Add the ellipsis to the end of the locations list - filepaths = filepaths - new_finding = Finding( title=title, test=test, diff --git a/dojo/tools/npm_audit_7_plus/parser.py b/dojo/tools/npm_audit_7_plus/parser.py index 479e3eb217..f62ccfcd12 100644 --- a/dojo/tools/npm_audit_7_plus/parser.py +++ b/dojo/tools/npm_audit_7_plus/parser.py @@ -95,7 +95,6 @@ def get_item(item_node, tree, test): """Return the individual Findigns from items found in report.""" references = [] mitigation = "" - test = test static_finding = True title = "" unique_id_from_tool = "" diff --git a/ruff.toml b/ruff.toml index 8d4303652f..6333d345f2 100644 --- a/ruff.toml +++ b/ruff.toml @@ -76,7 +76,7 @@ select = [ "PLC01", "PLC0205", "PLC0208", "PLC0414", "PLC24", "PLC3", "PLE", "PLR01", "PLR0203", "PLR0206", "PLR0915", "PLR1716", "PLR172", "PLR1733", "PLR1736", - "PLW0120", "PLW0129", "PLW013", "PLW017", "PLW02", "PLW04", "PLW07", "PLW1", "PLW2", "PLW3", + "PLW0120", "PLW0127", "PLW0129", "PLW013", "PLW017", "PLW02", "PLW04", "PLW07", "PLW1", "PLW2", "PLW3", "TRY003", "TRY004", "TRY2", "TRY401", "FLY", "NPY", From e16f937f11d6b607dafd283296b1f40a4b497510 Mon Sep 17 00:00:00 2001 From: kiblik <5609770+kiblik@users.noreply.github.com> Date: Thu, 6 Feb 2025 18:14:06 +0100 Subject: [PATCH 15/23] Ruff: Add and fix A001 (#11645) --- dojo/api_v2/mixins.py | 4 +- dojo/api_v2/permissions.py | 36 ++++++------- dojo/decorators.py | 3 +- dojo/filters.py | 4 +- dojo/jira_link/helper.py | 8 +-- dojo/models.py | 4 +- dojo/notes/views.py | 54 +++++++++---------- dojo/object/views.py | 22 ++++---- dojo/survey/views.py | 16 +++--- dojo/tools/api_bugcrowd/api_client.py | 12 ++--- dojo/tools/api_vulners/parser.py | 12 ++--- dojo/tools/blackduck/importer.py | 8 +-- .../blackduck_component_risk/importer.py | 10 ++-- dojo/tools/blackduck_component_risk/parser.py | 12 ++--- dojo/tools/checkmarx_one/parser.py | 8 +-- dojo/tools/codechecker/parser.py | 4 +- dojo/tools/dependency_check/parser.py | 8 +-- dojo/tools/detect_secrets/parser.py | 8 +-- dojo/tools/govulncheck/parser.py | 4 +- dojo/tools/harbor_vulnerability/parser.py | 6 +-- dojo/tools/hcl_asoc_sast/parser.py | 4 +- dojo/tools/ort/parser.py | 9 ++-- dojo/tools/rapplex/parser.py | 4 +- dojo/tools/trivy/parser.py | 22 ++++---- dojo/tools/veracode/json_parser.py | 6 +-- dojo/tools/yarn_audit/parser.py | 4 +- dojo/utils.py | 4 +- dojo/widgets.py | 2 +- ruff.toml | 2 +- 29 files changed, 148 insertions(+), 152 deletions(-) diff --git a/dojo/api_v2/mixins.py b/dojo/api_v2/mixins.py index 6c6b479275..c8f3dca316 100644 --- a/dojo/api_v2/mixins.py +++ b/dojo/api_v2/mixins.py @@ -20,10 +20,10 @@ class DeletePreviewModelMixin: ) @action(detail=True, methods=["get"], filter_backends=[], suffix="List") def delete_preview(self, request, pk=None): - object = self.get_object() + obj = self.get_object() collector = NestedObjects(using=DEFAULT_DB_ALIAS) - collector.collect([object]) + collector.collect([obj]) rels = collector.nested() def flatten(elem): diff --git a/dojo/api_v2/permissions.py b/dojo/api_v2/permissions.py index fe508c92b1..ddaf67e2a4 100644 --- a/dojo/api_v2/permissions.py +++ b/dojo/api_v2/permissions.py @@ -33,8 +33,8 @@ def check_post_permission(request, post_model, post_pk, post_permission): if request.data.get(post_pk) is None: msg = f"Unable to check for permissions: Attribute '{post_pk}' is required" raise ParseError(msg) - object = get_object_or_404(post_model, pk=request.data.get(post_pk)) - return user_has_permission(request.user, object, post_permission) + obj = get_object_or_404(post_model, pk=request.data.get(post_pk)) + return user_has_permission(request.user, obj, post_permission) return True @@ -158,29 +158,29 @@ def has_permission(self, request, view): has_permission_result = True product_id = request.data.get("product", None) if product_id: - object = get_object_or_404(Product, pk=product_id) + obj = get_object_or_404(Product, pk=product_id) has_permission_result = ( has_permission_result and user_has_permission( - request.user, object, Permissions.Product_Edit, + request.user, obj, Permissions.Product_Edit, ) ) finding_id = request.data.get("finding", None) if finding_id: - object = get_object_or_404(Finding, pk=finding_id) + obj = get_object_or_404(Finding, pk=finding_id) has_permission_result = ( has_permission_result and user_has_permission( - request.user, object, Permissions.Finding_Edit, + request.user, obj, Permissions.Finding_Edit, ) ) endpoint_id = request.data.get("endpoint", None) if endpoint_id: - object = get_object_or_404(Endpoint, pk=endpoint_id) + obj = get_object_or_404(Endpoint, pk=endpoint_id) has_permission_result = ( has_permission_result and user_has_permission( - request.user, object, Permissions.Endpoint_Edit, + request.user, obj, Permissions.Endpoint_Edit, ) ) return has_permission_result @@ -744,20 +744,20 @@ def has_permission(self, request, view): has_permission_result = True engagement_id = request.data.get("engagement", None) if engagement_id: - object = get_object_or_404(Engagement, pk=engagement_id) + obj = get_object_or_404(Engagement, pk=engagement_id) has_permission_result = ( has_permission_result and user_has_permission( - request.user, object, Permissions.Engagement_Edit, + request.user, obj, Permissions.Engagement_Edit, ) ) product_id = request.data.get("product", None) if product_id: - object = get_object_or_404(Product, pk=product_id) + obj = get_object_or_404(Product, pk=product_id) has_permission_result = ( has_permission_result and user_has_permission( - request.user, object, Permissions.Product_Edit, + request.user, obj, Permissions.Product_Edit, ) ) return has_permission_result @@ -798,29 +798,29 @@ def has_permission(self, request, view): has_permission_result = True engagement_id = request.data.get("engagement", None) if engagement_id: - object = get_object_or_404(Engagement, pk=engagement_id) + obj = get_object_or_404(Engagement, pk=engagement_id) has_permission_result = ( has_permission_result and user_has_permission( - request.user, object, Permissions.Engagement_Edit, + request.user, obj, Permissions.Engagement_Edit, ) ) finding_id = request.data.get("finding", None) if finding_id: - object = get_object_or_404(Finding, pk=finding_id) + obj = get_object_or_404(Finding, pk=finding_id) has_permission_result = ( has_permission_result and user_has_permission( - request.user, object, Permissions.Finding_Edit, + request.user, obj, Permissions.Finding_Edit, ) ) finding_group_id = request.data.get("finding_group", None) if finding_group_id: - object = get_object_or_404(Finding_Group, pk=finding_group_id) + obj = get_object_or_404(Finding_Group, pk=finding_group_id) has_permission_result = ( has_permission_result and user_has_permission( - request.user, object, Permissions.Finding_Group_Edit, + request.user, obj, Permissions.Finding_Group_Edit, ) ) return has_permission_result diff --git a/dojo/decorators.py b/dojo/decorators.py index b97d4ec795..2b1c08e57c 100644 --- a/dojo/decorators.py +++ b/dojo/decorators.py @@ -66,9 +66,8 @@ def __wrapper__(*args, **kwargs): if model_or_id: if isinstance(model_or_id, models.Model) and we_want_async(*args, func=func, **kwargs): logger.debug("converting model_or_id to id: %s", model_or_id) - id = model_or_id.id args = list(args) - args[parameter] = id + args[parameter] = model_or_id.id return func(*args, **kwargs) diff --git a/dojo/filters.py b/dojo/filters.py index f024e60f5a..844d975c4e 100644 --- a/dojo/filters.py +++ b/dojo/filters.py @@ -103,8 +103,8 @@ def custom_filter(queryset, name, value): values = value.split(",") - filter = (f"{name}__in") - return queryset.filter(Q(**{filter: values})) + cust_filter = (f"{name}__in") + return queryset.filter(Q(**{cust_filter: values})) def custom_vulnerability_id_filter(queryset, name, value): diff --git a/dojo/jira_link/helper.py b/dojo/jira_link/helper.py index 8201a204fc..0de8c620af 100644 --- a/dojo/jira_link/helper.py +++ b/dojo/jira_link/helper.py @@ -572,12 +572,12 @@ def get_labels(obj): if system_settings.add_vulnerability_id_to_jira_label or (jira_project and jira_project.add_vulnerability_id_to_jira_label): if isinstance(obj, Finding) and obj.vulnerability_ids: - for id in obj.vulnerability_ids: - labels.append(id) + for vul_id in obj.vulnerability_ids: + labels.append(vul_id) elif isinstance(obj, Finding_Group): for finding in obj.findings.all(): - for id in finding.vulnerability_ids: - labels.append(id) + for vul_id in finding.vulnerability_ids: + labels.append(vul_id) return labels diff --git a/dojo/models.py b/dojo/models.py index 7a711292b1..4901df7fde 100644 --- a/dojo/models.py +++ b/dojo/models.py @@ -941,8 +941,8 @@ def clean(self): self.finding_id] ids_count = 0 - for id in ids: - if id is not None: + for obj_id in ids: + if obj_id is not None: ids_count += 1 if ids_count == 0: diff --git a/dojo/notes/views.py b/dojo/notes/views.py index 6dfca7895d..c255414ddc 100644 --- a/dojo/notes/views.py +++ b/dojo/notes/views.py @@ -26,20 +26,20 @@ def delete_note(request, id, page, objid): object_id = None if page == "engagement": - object = get_object_or_404(Engagement, id=objid) - object_id = object.id + obj = get_object_or_404(Engagement, id=objid) + object_id = obj.id reverse_url = "view_engagement" elif page == "test": - object = get_object_or_404(Test, id=objid) - object_id = object.id + obj = get_object_or_404(Test, id=objid) + object_id = obj.id reverse_url = "view_test" elif page == "finding": - object = get_object_or_404(Finding, id=objid) - object_id = object.id + obj = get_object_or_404(Finding, id=objid) + object_id = obj.id reverse_url = "view_finding" elif page == "cred": - object = get_object_or_404(Cred_User, id=objid) - object_id = object.id + obj = get_object_or_404(Cred_User, id=objid) + object_id = obj.id reverse_url = "view_cred_details" form = DeleteNoteForm(request.POST, instance=note) @@ -47,7 +47,7 @@ def delete_note(request, id, page, objid): if page is None: raise PermissionDenied if str(request.user) != note.author.username: - user_has_permission_or_403(request.user, object, Permissions.Note_Delete) + user_has_permission_or_403(request.user, obj, Permissions.Note_Delete) if form.is_valid(): note.delete() @@ -73,24 +73,24 @@ def edit_note(request, id, page, objid): raise PermissionDenied if page == "engagement": - object = get_object_or_404(Engagement, id=objid) - object_id = object.id + obj = get_object_or_404(Engagement, id=objid) + object_id = obj.id reverse_url = "view_engagement" elif page == "test": - object = get_object_or_404(Test, id=objid) - object_id = object.id + obj = get_object_or_404(Test, id=objid) + object_id = obj.id reverse_url = "view_test" elif page == "finding": - object = get_object_or_404(Finding, id=objid) - object_id = object.id + obj = get_object_or_404(Finding, id=objid) + object_id = obj.id reverse_url = "view_finding" if str(request.user) != note.author.username: - user_has_permission_or_403(request.user, object, Permissions.Note_Edit) + user_has_permission_or_403(request.user, obj, Permissions.Note_Edit) note_type_activation = Note_Type.objects.filter(is_active=True).count() if note_type_activation: - available_note_types = find_available_notetypes(object, note) + available_note_types = find_available_notetypes(obj, note) if request.method == "POST": if note_type_activation: @@ -114,9 +114,9 @@ def edit_note(request, id, page, objid): history.save() note.history.add(history) note.save() - object.last_reviewed = note.date - object.last_reviewed_by = request.user - object.save() + obj.last_reviewed = note.date + obj.last_reviewed_by = request.user + obj.save() form = NoteForm() messages.add_message(request, messages.SUCCESS, @@ -148,22 +148,22 @@ def note_history(request, id, page, objid): object_id = None if page == "engagement": - object = get_object_or_404(Engagement, id=objid) - object_id = object.id + obj = get_object_or_404(Engagement, id=objid) + object_id = obj.id reverse_url = "view_engagement" elif page == "test": - object = get_object_or_404(Test, id=objid) - object_id = object.id + obj = get_object_or_404(Test, id=objid) + object_id = obj.id reverse_url = "view_test" elif page == "finding": - object = get_object_or_404(Finding, id=objid) - object_id = object.id + obj = get_object_or_404(Finding, id=objid) + object_id = obj.id reverse_url = "view_finding" if page is None: raise PermissionDenied if str(request.user) != note.author.username: - user_has_permission_or_403(request.user, object, Permissions.Note_View_History) + user_has_permission_or_403(request.user, obj, Permissions.Note_View_History) history = note.history.all() diff --git a/dojo/object/views.py b/dojo/object/views.py index 0cca584b0b..ad649885bf 100644 --- a/dojo/object/views.py +++ b/dojo/object/views.py @@ -57,14 +57,14 @@ def view_objects(request, pid): @user_is_authorized(Product, Permissions.Product_Tracking_Files_Edit, "pid") def edit_object(request, pid, ttid): - object = Objects_Product.objects.get(pk=ttid) + object_prod = Objects_Product.objects.get(pk=ttid) product = get_object_or_404(Product, id=pid) - if object.product != product: - msg = f"Product {pid} does not fit to product of Object {object.product.id}" + if object_prod.product != product: + msg = f"Product {pid} does not fit to product of Object {object_prod.product.id}" raise BadRequest(msg) if request.method == "POST": - tform = ObjectSettingsForm(request.POST, instance=object) + tform = ObjectSettingsForm(request.POST, instance=object_prod) if tform.is_valid(): tform.save() @@ -74,7 +74,7 @@ def edit_object(request, pid, ttid): extra_tags="alert-success") return HttpResponseRedirect(reverse("view_objects", args=(pid,))) else: - tform = ObjectSettingsForm(instance=object) + tform = ObjectSettingsForm(instance=object_prod) product_tab = Product_Tab(product, title="Edit Tracked Files", tab="settings") return render(request, @@ -87,21 +87,21 @@ def edit_object(request, pid, ttid): @user_is_authorized(Product, Permissions.Product_Tracking_Files_Delete, "pid") def delete_object(request, pid, ttid): - object = Objects_Product.objects.get(pk=ttid) + object_prod = Objects_Product.objects.get(pk=ttid) product = get_object_or_404(Product, id=pid) - if object.product != product: - msg = f"Product {pid} does not fit to product of Object {object.product.id}" + if object_prod.product != product: + msg = f"Product {pid} does not fit to product of Object {object_prod.product.id}" raise BadRequest(msg) if request.method == "POST": - tform = ObjectSettingsForm(request.POST, instance=object) - object.delete() + tform = ObjectSettingsForm(request.POST, instance=object_prod) + object_prod.delete() messages.add_message(request, messages.SUCCESS, "Tracked Product Files Deleted.", extra_tags="alert-success") return HttpResponseRedirect(reverse("view_objects", args=(pid,))) - tform = DeleteObjectsSettingsForm(instance=object) + tform = DeleteObjectsSettingsForm(instance=object_prod) product_tab = Product_Tab(product, title="Delete Product Tool Configuration", tab="settings") return render(request, diff --git a/dojo/survey/views.py b/dojo/survey/views.py index 824bbf52d4..544d90fddc 100644 --- a/dojo/survey/views.py +++ b/dojo/survey/views.py @@ -468,8 +468,8 @@ def create_question(request): choiceQuestionFrom = CreateChoiceQuestionForm(request.POST) if form.is_valid(): - type = form.cleaned_data["type"] - if type == "text": + question_type = form.cleaned_data["type"] + if question_type == "text": if textQuestionForm.is_valid(): created_question = TextQuestion.objects.create( optional=form.cleaned_data["optional"], @@ -483,7 +483,7 @@ def create_question(request): return HttpResponseRedirect(reverse("questions")) error = True - elif type == "choice": + elif question_type == "choice": if choiceQuestionFrom.is_valid(): created_question = ChoiceQuestion.objects.create( optional=form.cleaned_data["optional"], @@ -537,19 +537,19 @@ def edit_question(request, qid): "This question is part of an already answered survey. If you change it, the responses " "may no longer be valid.", extra_tags="alert-info") - type = str(ContentType.objects.get_for_model(question)) + content_type = str(ContentType.objects.get_for_model(question)) - if type == "dojo | text question": + if content_type == "dojo | text question": form = EditTextQuestionForm(instance=question) - elif type == "dojo | choice question": + elif content_type == "dojo | choice question": form = EditChoiceQuestionForm(instance=question) else: raise Http404 if request.method == "POST": - if type == "dojo | text question": + if content_type == "dojo | text question": form = EditTextQuestionForm(request.POST, instance=question) - elif type == "dojo | choice question": + elif content_type == "dojo | choice question": form = EditChoiceQuestionForm(request.POST, instance=question) else: raise Http404 diff --git a/dojo/tools/api_bugcrowd/api_client.py b/dojo/tools/api_bugcrowd/api_client.py index 7e9ac2b91c..c1672ddfff 100644 --- a/dojo/tools/api_bugcrowd/api_client.py +++ b/dojo/tools/api_bugcrowd/api_client.py @@ -51,10 +51,10 @@ def get_findings(self, program, target): else: params_encoded = urlencode(params_default) - next = f"{self.bugcrowd_api_url}/submissions?{params_encoded}" - while next != "": + next_page = f"{self.bugcrowd_api_url}/submissions?{params_encoded}" + while next_page != "": response = self.session.get( - url=next, + url=next_page, timeout=settings.REQUESTS_TIMEOUT, ) response.raise_for_status() @@ -65,15 +65,15 @@ def get_findings(self, program, target): # When we hit the end of the submissions, break out if len(data["data"]) == 0: - next = "" + next_page = "" break # Otherwise, keep updating next link - next = "{}{}".format( + next_page = "{}{}".format( self.bugcrowd_api_url, data["links"]["next"], ) else: - next = "over" + next_page = "over" def test_connection(self): # Request programs diff --git a/dojo/tools/api_vulners/parser.py b/dojo/tools/api_vulners/parser.py index fe9ec24219..2414f3a836 100644 --- a/dojo/tools/api_vulners/parser.py +++ b/dojo/tools/api_vulners/parser.py @@ -55,9 +55,9 @@ def get_findings(self, file, test): # for each issue found for component in report: - id = component.get("vulnID") - vuln = vulns.get(id, {}) - title = component.get("title", id) + vuln_id = component.get("vulnID") + vuln = vulns.get(vuln_id, {}) + title = component.get("title", vuln_id) family = component.get("family") agentip = component.get("agentip") agentfqdn = component.get("agentfqdn") @@ -71,7 +71,7 @@ def get_findings(self, file, test): mitigation=component.get("cumulativeFix"), static_finding=False, # by definition dynamic_finding=True, # by definition - vuln_id_from_tool="VNS/" + id, + vuln_id_from_tool="VNS/" + vuln_id, component_name=agentfqdn if agentfqdn != "unknown" else agentip, @@ -79,7 +79,7 @@ def get_findings(self, file, test): endpoint = Endpoint(host=agentip) finding.unsaved_endpoints = [endpoint] - finding.unsaved_vulnerability_ids = ["VNS/" + id] + finding.unsaved_vulnerability_ids = ["VNS/" + vuln_id] # CVE List cve_ids = vuln.get("cvelist", []) @@ -97,7 +97,7 @@ def get_findings(self, file, test): # References references = ( - f"**Vulners ID** \nhttps://vulners.com/{family}/{id} \n" + f"**Vulners ID** \nhttps://vulners.com/{family}/{vuln_id} \n" ) if len(cve_ids): references += "**Related CVE** \n" diff --git a/dojo/tools/blackduck/importer.py b/dojo/tools/blackduck/importer.py index 9d50dc7332..83be2fd541 100644 --- a/dojo/tools/blackduck/importer.py +++ b/dojo/tools/blackduck/importer.py @@ -48,16 +48,16 @@ def _process_zipfile(self, report): files = {} security_issues = {} - with zipfile.ZipFile(str(report)) as zip: - for full_file_name in zip.namelist(): + with zipfile.ZipFile(str(report)) as zipf: + for full_file_name in zipf.namelist(): file_name = full_file_name.split("/")[-1] # Backwards compatibility, newer versions of Blackduck have a source file rather # than a "files" file. if "source" in file_name or "files" in file_name: - with io.TextIOWrapper(zip.open(full_file_name), encoding="utf-8") as f: + with io.TextIOWrapper(zipf.open(full_file_name), encoding="utf-8") as f: files = self.__partition_by_key(f) elif "security" in file_name: - with io.TextIOWrapper(zip.open(full_file_name), encoding="utf-8") as f: + with io.TextIOWrapper(zipf.open(full_file_name), encoding="utf-8") as f: security_issues = self.__partition_by_key(f) project_ids = set(files.keys()) & set(security_issues.keys()) diff --git a/dojo/tools/blackduck_component_risk/importer.py b/dojo/tools/blackduck_component_risk/importer.py index fb35be926d..5c0f7a95f3 100644 --- a/dojo/tools/blackduck_component_risk/importer.py +++ b/dojo/tools/blackduck_component_risk/importer.py @@ -43,24 +43,24 @@ def _process_zipfile(self, report: Path) -> (dict, dict, dict): components = {} source = {} try: - with zipfile.ZipFile(str(report)) as zip: + with zipfile.ZipFile(str(report)) as zipf: c_file = False s_file = False - for full_file_name in zip.namelist(): + for full_file_name in zipf.namelist(): # Just in case the word component or security is in the name of # zip file, best to ignore it. file_name = full_file_name.split("/")[-1] # Look for the component and security CSVs. if "component" in file_name: - with io.TextIOWrapper(zip.open(full_file_name), encoding="utf-8") as f: + with io.TextIOWrapper(zipf.open(full_file_name), encoding="utf-8") as f: components = self.__get_components(f) c_file = True elif "security" in file_name: - with io.TextIOWrapper(zip.open(full_file_name), encoding="utf-8") as f: + with io.TextIOWrapper(zipf.open(full_file_name), encoding="utf-8") as f: security_issues = self.__get_security_risks(f) s_file = True elif "source" in file_name: - with io.TextIOWrapper(zip.open(full_file_name), encoding="utf-8") as f: + with io.TextIOWrapper(zipf.open(full_file_name), encoding="utf-8") as f: source = self.__get_source(f) # Raise exception to error-out if the zip is missing either of # these files. diff --git a/dojo/tools/blackduck_component_risk/parser.py b/dojo/tools/blackduck_component_risk/parser.py index 579d21f628..abca1aa199 100644 --- a/dojo/tools/blackduck_component_risk/parser.py +++ b/dojo/tools/blackduck_component_risk/parser.py @@ -58,8 +58,8 @@ def ingest_findings(self, components, securities, sources, test): for component_id, component in components.items(): source = {} # Find the sources.csv data for this component - for id, src in sources.items(): - if id in component_id: + for source_id, src in sources.items(): + if source_id in component_id: source = src if component.get("Component policy status") == "In Violation": # We have us a license risk: @@ -243,7 +243,7 @@ def license_severity(self, component): :param vulns: Dictionary {component_version_identifier: [vulns]} :return: """ - map = { + severity_map = { "HIGH": "High", "MEDIUM": "Medium", "LOW": "Low", @@ -253,7 +253,7 @@ def license_severity(self, component): } sev = "None" try: - sev = map[component.get("License Risk")] + sev = severity_map[component.get("License Risk")] except KeyError: sev = "None" return sev @@ -266,7 +266,7 @@ def security_severity(self, vulns): :param vulns: Dictionary {component_version_identifier: [vulns]} :return: """ - map = { + severity_map = { "HIGH": "High", "MEDIUM": "Medium", "LOW": "Low", @@ -278,7 +278,7 @@ def security_severity(self, vulns): for vuln in vulns: if float(vuln["Base score"]) > max_severity: max_severity = float(vuln["Base score"]) - sev = map[vuln["Security Risk"]] + sev = severity_map[vuln["Security Risk"]] return sev def security_mitigation(self, vulns): diff --git a/dojo/tools/checkmarx_one/parser.py b/dojo/tools/checkmarx_one/parser.py index f1a673c52d..9d2fada818 100644 --- a/dojo/tools/checkmarx_one/parser.py +++ b/dojo/tools/checkmarx_one/parser.py @@ -199,7 +199,7 @@ def parse_vulnerabilities( ) -> list[Finding]: findings = [] for result in results: - id = result.get("identifiers")[0].get("value") + result_id = result.get("identifiers")[0].get("value") cwe = None if "vulnerabilityDetails" in result: cwe = result.get("vulnerabilites").get("cweId") @@ -208,14 +208,14 @@ def parse_vulnerabilities( locations_startLine = result.get("location").get("start_line") locations_endLine = result.get("location").get("end_line") finding = Finding( - unique_id_from_tool=id, + unique_id_from_tool=result_id, file_path=locations_uri, line=locations_startLine, - title=id + "_" + locations_uri, + title=result_id + "_" + locations_uri, test=test, cwe=cwe, severity=severity, - description="**id**: " + str(id) + "\n" + description="**id**: " + str(result_id) + "\n" + "**uri**: " + locations_uri + "\n" + "**startLine**: " + str(locations_startLine) + "\n" + "**endLine**: " + str(locations_endLine) + "\n", diff --git a/dojo/tools/codechecker/parser.py b/dojo/tools/codechecker/parser.py index 7bdccf5b36..4e821b8b03 100644 --- a/dojo/tools/codechecker/parser.py +++ b/dojo/tools/codechecker/parser.py @@ -82,15 +82,13 @@ def get_item(vuln): ] # this finding is false positive active = not false_positive and not risk_accepted - hash = hashlib.sha256() unique_id = ( vuln["report_hash"] + "." + vuln["analyzer_result_file_path"] + description ) - hash.update(unique_id.encode()) - unique_id_from_tool = hash.hexdigest() + unique_id_from_tool = hashlib.sha256(unique_id.encode()).hexdigest() title = "" if "checker_name" in vuln: diff --git a/dojo/tools/dependency_check/parser.py b/dojo/tools/dependency_check/parser.py index 8f87042b63..984f436d00 100644 --- a/dojo/tools/dependency_check/parser.py +++ b/dojo/tools/dependency_check/parser.py @@ -60,8 +60,8 @@ def get_component_name_and_version_from_dependency( # analyzing identifier from the more generic to package_node = identifiers_node.find(".//" + namespace + "package") if package_node: - id = package_node.findtext(f"{namespace}id") - purl = PackageURL.from_string(id) + pck_id = package_node.findtext(f"{namespace}id") + purl = PackageURL.from_string(pck_id) purl_parts = purl.to_dict() component_name = ( purl_parts["namespace"] + ":" @@ -96,8 +96,8 @@ def get_component_name_and_version_from_dependency( ".//" + namespace + 'identifier[@type="cpe"]', ) if cpe_node: - id = cpe_node.findtext(f"{namespace}name") - cpe = CPE(id) + cpe_id = cpe_node.findtext(f"{namespace}name") + cpe = CPE(cpe_id) component_name = ( cpe.get_vendor()[0] + ":" if len(cpe.get_vendor()) > 0 diff --git a/dojo/tools/detect_secrets/parser.py b/dojo/tools/detect_secrets/parser.py index 30e891ea19..53a00393d2 100644 --- a/dojo/tools/detect_secrets/parser.py +++ b/dojo/tools/detect_secrets/parser.py @@ -26,7 +26,7 @@ def get_findings(self, filename, test): find_date = dateutil.parser.parse(data.get("generated_at")) for detect_file in data.get("results"): for item in data.get("results").get(detect_file): - type = item.get("type") + item_type = item.get("type") file = item.get("filename") hashed_secret = item.get("hashed_secret") is_verified = item.get("is_verified") @@ -34,10 +34,10 @@ def get_findings(self, filename, test): description = "Detected potential secret with the following related data:\n" description += "**Filename:** " + file + "\n" description += "**Line:** " + str(line) + "\n" - description += "**Type:** " + type + "\n" + description += "**Type:** " + item_type + "\n" dupe_key = hashlib.sha256( - (type + file + str(line) + hashed_secret).encode("utf-8"), + (item_type + file + str(line) + hashed_secret).encode("utf-8"), ).hexdigest() if dupe_key in dupes: @@ -45,7 +45,7 @@ def get_findings(self, filename, test): finding.nb_occurences += 1 else: finding = Finding( - title=f"{type}", + title=item_type, test=test, description=description, cwe=798, diff --git a/dojo/tools/govulncheck/parser.py b/dojo/tools/govulncheck/parser.py index 0a3c2b870b..404e80b2a9 100644 --- a/dojo/tools/govulncheck/parser.py +++ b/dojo/tools/govulncheck/parser.py @@ -137,7 +137,7 @@ def get_findings(self, scan_file, test): formatted_ranges = [] summary = osv_data.get("summary", "Unknown") component_name = affected_package["name"] - id = osv_data["id"] + osv_id = osv_data["id"] for r in affected_ranges: events = r["events"] @@ -192,7 +192,7 @@ def get_findings(self, scan_file, test): "references": references, "file_path": path, "url": db_specific_url, - "unique_id_from_tool": id, + "unique_id_from_tool": osv_id, } findings.append(Finding(**d)) diff --git a/dojo/tools/harbor_vulnerability/parser.py b/dojo/tools/harbor_vulnerability/parser.py index 8072357700..d99fab6908 100644 --- a/dojo/tools/harbor_vulnerability/parser.py +++ b/dojo/tools/harbor_vulnerability/parser.py @@ -39,7 +39,7 @@ def get_findings(self, filename, test): return [] for item in vulnerability: - id = item.get("id") + item_id = item.get("id") package_name = item.get("package") package_version = item.get("version") description = item.get("description", "No description found") @@ -48,7 +48,7 @@ def get_findings(self, filename, test): links = item.get("links") cwe_ids = item.get("cwe_ids") - title = f"{id} - {package_name} ({package_version})" + title = f"{item_id} - {package_name} ({package_version})" severity = transpose_severity(severity) mitigation = f"Upgrade {package_name} to version {fix_version}" if fix_version else None @@ -62,7 +62,7 @@ def get_findings(self, filename, test): cwe = cwe_ids[0].strip("CWE-") if cwe_ids and cwe_ids[0] != "" else None - vulnerability_id = id if id and id.startswith("CVE") else None + vulnerability_id = item_id if item_id and item_id.startswith("CVE") else None dupe_key = title diff --git a/dojo/tools/hcl_asoc_sast/parser.py b/dojo/tools/hcl_asoc_sast/parser.py index 538cd78583..94cc0c5a6c 100644 --- a/dojo/tools/hcl_asoc_sast/parser.py +++ b/dojo/tools/hcl_asoc_sast/parser.py @@ -95,8 +95,8 @@ def get_findings(self, file, test): fix = item.iter() for fitem in fix: if fitem.tag == "types": - type = fitem.iter() - for titem in type: + ftype = fitem.iter() + for titem in ftype: if titem.tag == "name": issuetypename = self.xmltreehelper(titem) if fitem.tag == "remediation": diff --git a/dojo/tools/ort/parser.py b/dojo/tools/ort/parser.py index d40bd13653..689ffea0e6 100644 --- a/dojo/tools/ort/parser.py +++ b/dojo/tools/ort/parser.py @@ -125,8 +125,8 @@ def get_rule_violation_model( dependency_trees, rule_violation_unresolved["pkg"], ) project_names = [] - for id in project_ids: - project_names.append(get_name_id_for_package(packages, id)) + for proj_id in project_ids: + project_names.append(get_name_id_for_package(packages, proj_id)) package = find_package_by_id(packages, rule_violation_unresolved["pkg"]) license_tmp = rule_violation_unresolved.get("license", "unset") if "license_source" not in rule_violation_unresolved: @@ -148,12 +148,11 @@ def find_package_by_id(packages, pkg_id): def find_license_id(licenses, license_id): - id = "" for lic in licenses: if lic["_id"] == license_id: - id = lic["id"] + lic_id = lic["id"] break - return id + return lic_id def get_item(model, test): diff --git a/dojo/tools/rapplex/parser.py b/dojo/tools/rapplex/parser.py index 6770547dd6..9c974ff80b 100644 --- a/dojo/tools/rapplex/parser.py +++ b/dojo/tools/rapplex/parser.py @@ -51,14 +51,14 @@ def get_findings(self, filename, test): issue_sections = issue_definition.get("Sections", {}) ref = html2text(issue_sections.get("References", "")) rem = issue_sections.get("Remediation", "") - sum = issue_sections.get("Summary", "") + summary = issue_sections.get("Summary", "") finding = Finding( title=title, test=test, severity=severity_level, date=formatted_date, - description=sum, + description=summary, mitigation=rem, cwe=cwe_val, references=ref, diff --git a/dojo/tools/trivy/parser.py b/dojo/tools/trivy/parser.py index 464eadf83d..fbe87bd64c 100644 --- a/dojo/tools/trivy/parser.py +++ b/dojo/tools/trivy/parser.py @@ -195,12 +195,12 @@ def get_result_items(self, test, results, service_name=None, artifact_name=""): references = "\n".join(vuln.get("References", [])) mitigation = vuln.get("FixedVersion", "") cwe = int(vuln["CweIDs"][0].split("-")[1]) if len(vuln.get("CweIDs", [])) > 0 else 0 - type = target_data.get("Type", "") + vul_type = target_data.get("Type", "") title = f"{vuln_id} {package_name} {package_version}" description = DESCRIPTION_TEMPLATE.format( title=vuln.get("Title", ""), target=target, - type=type, + type=vul_type, fixed_version=mitigation, description_text=vuln.get("Description", ""), ) @@ -218,7 +218,7 @@ def get_result_items(self, test, results, service_name=None, artifact_name=""): cvssv3=cvssv3, static_finding=True, dynamic_finding=False, - tags=[type, target_class], + tags=[vul_type, target_class], service=service_name, ) @@ -308,14 +308,14 @@ def get_result_items(self, test, results, service_name=None, artifact_name=""): items.append(finding) licenses = target_data.get("Licenses", []) - for license in licenses: - license_severity = license.get("Severity") - license_category = license.get("Category") - license_pkgname = license.get("PkgName") - license_filepath = license.get("FilePath") - license_name = license.get("Name") - license_confidence = license.get("Confidence") - license_link = license.get("Link") + for lic in licenses: + license_severity = lic.get("Severity") + license_category = lic.get("Category") + license_pkgname = lic.get("PkgName") + license_filepath = lic.get("FilePath") + license_name = lic.get("Name") + license_confidence = lic.get("Confidence") + license_link = lic.get("Link") title = f"License detected in {target_target} - {license_name}" description = LICENSE_DESCRIPTION_TEMPLATE.format( diff --git a/dojo/tools/veracode/json_parser.py b/dojo/tools/veracode/json_parser.py index df83cbb802..62df0ffd27 100644 --- a/dojo/tools/veracode/json_parser.py +++ b/dojo/tools/veracode/json_parser.py @@ -261,9 +261,9 @@ def add_sca_details(self, finding, finding_details, backup_title=None) -> Findin if licenses := finding_details.get("licenses", []): # Build the license string license_markdown = "#### Licenses\n" - for license in licenses: - license_name = license.get("license_id") - license_details = self.license_mapping.get(int(license.get("risk_rating", 5))) + for lic in licenses: + license_name = lic.get("license_id") + license_details = self.license_mapping.get(int(lic.get("risk_rating", 5))) license_markdown += f"- {license_name}: {license_details[0]}\n - {license_details[1]}\n" # Do not add any extra text if the there are no licenses here if license_markdown != "#### Licenses\n": diff --git a/dojo/tools/yarn_audit/parser.py b/dojo/tools/yarn_audit/parser.py index 8bc0c8adfd..d18e126488 100644 --- a/dojo/tools/yarn_audit/parser.py +++ b/dojo/tools/yarn_audit/parser.py @@ -91,7 +91,7 @@ def get_items_auditci(self, tree, test): # https://github.com/DefectDojo/django cvss = "**cvss:** " + str(tree.get("advisories").get(element).get("cvss")) found_by = "**found_by:** " + str(tree.get("advisories").get(element).get("found_by")) deleted = "**deleted:** " + str(tree.get("advisories").get(element).get("deleted")) - id = "**id:** " + str(tree.get("advisories").get(element).get("id")) + elem_id = "**id:** " + str(tree.get("advisories").get(element).get("id")) references = "**references:** " + str(tree.get("advisories").get(element).get("references")) created = "**created:** " + str(tree.get("advisories").get(element).get("created")) reported_by = "**reported_by:** " + str(tree.get("advisories").get(element).get("reported_by")) @@ -109,7 +109,7 @@ def get_items_auditci(self, tree, test): # https://github.com/DefectDojo/django description += cvss + "\n" description += found_by + "\n" description += deleted + "\n" - description += id + "\n" + description += elem_id + "\n" description += created + "\n" description += reported_by + "\n" description += title + "\n" diff --git a/dojo/utils.py b/dojo/utils.py index da6e370c0f..9ded148c11 100644 --- a/dojo/utils.py +++ b/dojo/utils.py @@ -2372,9 +2372,9 @@ def __init__(self, *args, **kwargs): @dojo_async_task @app.task def delete_chunk(self, objects, **kwargs): - for object in objects: + for obj in objects: try: - object.delete() + obj.delete() except AssertionError: logger.debug("ASYNC_DELETE: object has already been deleted elsewhere. Skipping") # The id must be None diff --git a/dojo/widgets.py b/dojo/widgets.py index 91210094dd..6b6652dfb8 100644 --- a/dojo/widgets.py +++ b/dojo/widgets.py @@ -15,7 +15,7 @@ def __init__(self, *args, **kwargs): def value_from_datadict(self, data, files, name): selected_ids = data.getlist(name) - return [int(id) for id in selected_ids] + return [int(idn) for idn in selected_ids] def render(self, name, value, attrs=None, renderer=None): page_number = self.page_number diff --git a/ruff.toml b/ruff.toml index 6333d345f2..92edcaac8b 100644 --- a/ruff.toml +++ b/ruff.toml @@ -44,7 +44,7 @@ select = [ "ASYNC", "S1", "S2", "S302", "S303", "S304", "S305", "S306", "S307", "S31", "S323", "S401", "S402", "S406", "S407", "S408", "S409", "S41", "S5", "S601", "S602", "S604", "S605", "S606", "S607", "S609", "S610", "S612", "S7", "FBT", - "A003", "A004", "A005", "A006", + "A001", "A003", "A004", "A005", "A006", "COM", "C4", "T10", From ab20c6ea97cf1a4bb8465f234c63867aa518fbb4 Mon Sep 17 00:00:00 2001 From: kiblik <5609770+kiblik@users.noreply.github.com> Date: Thu, 6 Feb 2025 18:15:14 +0100 Subject: [PATCH 16/23] Ruff: Add and fix SIM117 (#11653) --- ruff.toml | 1 - .../tools/test_anchore_enterprise_parser.py | 8 +++--- unittests/tools/test_auditjs_parser.py | 8 +++--- unittests/tools/test_aws_inspector2_parser.py | 16 +++++------ unittests/tools/test_burp_graphql_parser.py | 9 +++--- unittests/tools/test_coverity_api_parser.py | 8 +++--- .../tools/test_gitlab_api_fuzzing_parser.py | 8 +++--- unittests/tools/test_govulncheck_parser.py | 8 +++--- unittests/tools/test_intsights_parser.py | 8 +++--- unittests/tools/test_kubehunter_parser.py | 8 +++--- unittests/tools/test_meterian_parser.py | 8 +++--- unittests/tools/test_noseyparker_parser.py | 20 ++++++------- unittests/tools/test_npm_audit_parser.py | 16 +++++------ unittests/tools/test_risk_recon_parser.py | 16 +++++------ unittests/tools/test_sysdig_reports_parser.py | 28 +++++++++---------- .../tools/test_threat_composer_parser.py | 8 +++--- .../tools/test_whitehat_sentinel_parser.py | 16 +++++------ unittests/tools/test_yarn_audit_parser.py | 16 +++++------ 18 files changed, 104 insertions(+), 106 deletions(-) diff --git a/ruff.toml b/ruff.toml index 92edcaac8b..43da6f1952 100644 --- a/ruff.toml +++ b/ruff.toml @@ -92,7 +92,6 @@ ignore = [ "SIM102", "SIM113", "SIM115", - "SIM117", "RUF012", "RUF015", "D205", diff --git a/unittests/tools/test_anchore_enterprise_parser.py b/unittests/tools/test_anchore_enterprise_parser.py index a2ae81304a..3ac1de154b 100644 --- a/unittests/tools/test_anchore_enterprise_parser.py +++ b/unittests/tools/test_anchore_enterprise_parser.py @@ -27,10 +27,10 @@ def test_anchore_policy_check_parser_has_multiple_findings(self): self.assertEqual("CVE-2015-2992", finding.unsaved_vulnerability_ids[0]) def test_anchore_policy_check_parser_invalid_format(self): - with open(get_unit_tests_scans_path("anchore_enterprise") / "invalid_checks_format.json", encoding="utf-8") as testfile: - with self.assertRaises(Exception): - parser = AnchoreEnterpriseParser() - parser.get_findings(testfile, Test()) + with open(get_unit_tests_scans_path("anchore_enterprise") / "invalid_checks_format.json", encoding="utf-8") as testfile, \ + self.assertRaises(Exception): + parser = AnchoreEnterpriseParser() + parser.get_findings(testfile, Test()) def test_anchore_policy_check_extract_vulnerability_id(self): vulnerability_id = extract_vulnerability_id("CVE-2019-14540+openapi-generator-cli-4.0.0.jar:jackson-databind") diff --git a/unittests/tools/test_auditjs_parser.py b/unittests/tools/test_auditjs_parser.py index 1df420952c..6acee12401 100644 --- a/unittests/tools/test_auditjs_parser.py +++ b/unittests/tools/test_auditjs_parser.py @@ -58,10 +58,10 @@ def test_auditjs_parser_with_many_vuln_has_many_findings(self): self.assertEqual(400, findings[4].cwe) def test_auditjs_parser_empty_with_error(self): - with self.assertRaises(ValueError) as context: - with open(get_unit_tests_scans_path("auditjs") / "empty_with_error.json", encoding="utf-8") as testfile: - parser = AuditJSParser() - parser.get_findings(testfile, Test()) + with self.assertRaises(ValueError) as context, \ + open(get_unit_tests_scans_path("auditjs") / "empty_with_error.json", encoding="utf-8") as testfile: + parser = AuditJSParser() + parser.get_findings(testfile, Test()) self.assertIn( "Invalid JSON format. Are you sure you used --json option ?", str(context.exception), diff --git a/unittests/tools/test_aws_inspector2_parser.py b/unittests/tools/test_aws_inspector2_parser.py index ff47cbd517..36c28a24e0 100644 --- a/unittests/tools/test_aws_inspector2_parser.py +++ b/unittests/tools/test_aws_inspector2_parser.py @@ -37,11 +37,11 @@ def test_aws_inspector2_parser_with_many_vuln_has_many_findings(self): self.assertEqual(8, len(findings)) def test_aws_inspector2_parser_empty_with_error(self): - with self.assertRaises(TypeError) as context: - with open(get_unit_tests_scans_path("aws_inspector2") / "empty_with_error.json", encoding="utf-8") as testfile: - parser = AWSInspector2Parser() - parser.get_findings(testfile, Test()) - testfile.close() - self.assertTrue( - "Incorrect Inspector2 report format" in str(context.exception), - ) + with self.assertRaises(TypeError) as context, \ + open(get_unit_tests_scans_path("aws_inspector2") / "empty_with_error.json", encoding="utf-8") as testfile: + parser = AWSInspector2Parser() + parser.get_findings(testfile, Test()) + testfile.close() + self.assertTrue( + "Incorrect Inspector2 report format" in str(context.exception), + ) diff --git a/unittests/tools/test_burp_graphql_parser.py b/unittests/tools/test_burp_graphql_parser.py index bfa5e057ec..fded6c2897 100644 --- a/unittests/tools/test_burp_graphql_parser.py +++ b/unittests/tools/test_burp_graphql_parser.py @@ -55,11 +55,10 @@ def test_burp_no_findings(self): self.assertEqual(0, len(findings)) def test_burp_null_title(self): - with open(get_unit_tests_scans_path("burp_graphql") / "null_title.json", encoding="utf-8") as test_file: - - with self.assertRaises(ValueError): - parser = BurpGraphQLParser() - parser.get_findings(test_file, Test()) + with open(get_unit_tests_scans_path("burp_graphql") / "null_title.json", encoding="utf-8") as test_file, \ + self.assertRaises(ValueError): + parser = BurpGraphQLParser() + parser.get_findings(test_file, Test()) def test_burp_null_request_segments(self): with open(get_unit_tests_scans_path("burp_graphql") / "null_request_segments.json", encoding="utf-8") as test_file: diff --git a/unittests/tools/test_coverity_api_parser.py b/unittests/tools/test_coverity_api_parser.py index 9be4e0d125..8413cc1d08 100644 --- a/unittests/tools/test_coverity_api_parser.py +++ b/unittests/tools/test_coverity_api_parser.py @@ -7,10 +7,10 @@ class TestZapParser(DojoTestCase): def test_parse_wrong_file(self): - with self.assertRaises(ValueError): - with open(get_unit_tests_scans_path("coverity_api") / "wrong.json", encoding="utf-8") as testfile: - parser = CoverityApiParser() - parser.get_findings(testfile, Test()) + with self.assertRaises(ValueError), \ + open(get_unit_tests_scans_path("coverity_api") / "wrong.json", encoding="utf-8") as testfile: + parser = CoverityApiParser() + parser.get_findings(testfile, Test()) def test_parse_no_findings(self): with open(get_unit_tests_scans_path("coverity_api") / "empty.json", encoding="utf-8") as testfile: diff --git a/unittests/tools/test_gitlab_api_fuzzing_parser.py b/unittests/tools/test_gitlab_api_fuzzing_parser.py index 376db6f3ea..db722f7ad3 100644 --- a/unittests/tools/test_gitlab_api_fuzzing_parser.py +++ b/unittests/tools/test_gitlab_api_fuzzing_parser.py @@ -44,8 +44,8 @@ def test_gitlab_api_fuzzing_parser_with_one_criticle_vuln_has_one_findings_v15(s ) def test_gitlab_api_fuzzing_parser_with_invalid_json(self): - with open(get_unit_tests_scans_path("gitlab_api_fuzzing") / "gitlab_api_fuzzing_invalid.json", encoding="utf-8") as testfile: + with open(get_unit_tests_scans_path("gitlab_api_fuzzing") / "gitlab_api_fuzzing_invalid.json", encoding="utf-8") as testfile, \ + self.assertRaises((KeyError, ValueError)): # Something is wrong with JSON file - with self.assertRaises((KeyError, ValueError)): - parser = GitlabAPIFuzzingParser() - parser.get_findings(testfile, Test()) + parser = GitlabAPIFuzzingParser() + parser.get_findings(testfile, Test()) diff --git a/unittests/tools/test_govulncheck_parser.py b/unittests/tools/test_govulncheck_parser.py index 7d0bdbff26..e6e6ecf04c 100644 --- a/unittests/tools/test_govulncheck_parser.py +++ b/unittests/tools/test_govulncheck_parser.py @@ -6,10 +6,10 @@ class TestGovulncheckParser(DojoTestCase): def test_parse_empty(self): - with self.assertRaises(ValueError) as exp: - with open(get_unit_tests_scans_path("govulncheck") / "empty.json", encoding="utf-8") as testfile: - parser = GovulncheckParser() - parser.get_findings(testfile, Test()) + with self.assertRaises(ValueError) as exp, \ + open(get_unit_tests_scans_path("govulncheck") / "empty.json", encoding="utf-8") as testfile: + parser = GovulncheckParser() + parser.get_findings(testfile, Test()) self.assertIn( "Invalid JSON format", str(exp.exception), ) diff --git a/unittests/tools/test_intsights_parser.py b/unittests/tools/test_intsights_parser.py index b2bfb34c04..0b77b9591b 100644 --- a/unittests/tools/test_intsights_parser.py +++ b/unittests/tools/test_intsights_parser.py @@ -54,10 +54,10 @@ def test_intsights_parser_with_many_vuln_has_many_findings_csv(self): self.assertEqual(9, len(findings)) def test_intsights_parser_invalid_text_with_error_csv(self): - with self.assertRaises(ValueError): - with open(get_unit_tests_scans_path("intsights") / "intsights_invalid_file.txt", encoding="utf-8") as testfile: - parser = IntSightsParser() - parser.get_findings(testfile, Test()) + with self.assertRaises(ValueError), \ + open(get_unit_tests_scans_path("intsights") / "intsights_invalid_file.txt", encoding="utf-8") as testfile: + parser = IntSightsParser() + parser.get_findings(testfile, Test()) def test_intsights_parser_with_no_alerts_json(self): with open(get_unit_tests_scans_path("intsights") / "intsights_zero_vuln.json", encoding="utf-8") as testfile: diff --git a/unittests/tools/test_kubehunter_parser.py b/unittests/tools/test_kubehunter_parser.py index c59a5e74fb..f4e4c50f0c 100644 --- a/unittests/tools/test_kubehunter_parser.py +++ b/unittests/tools/test_kubehunter_parser.py @@ -36,10 +36,10 @@ def test_kubehunter_parser_with_many_vuln_has_many_findings(self): self.assertEqual(8, len(findings)) def test_kubehunter_parser_empty_with_error(self): - with self.assertRaises(ValueError) as context: - with open(get_unit_tests_scans_path("kubehunter") / "empty.json", encoding="utf-8") as testfile: - parser = KubeHunterParser() - parser.get_findings(testfile, Test()) + with self.assertRaises(ValueError) as context, \ + open(get_unit_tests_scans_path("kubehunter") / "empty.json", encoding="utf-8") as testfile: + parser = KubeHunterParser() + parser.get_findings(testfile, Test()) self.assertEqual( "Expecting value: line 1 column 1 (char 0)", str(context.exception), diff --git a/unittests/tools/test_meterian_parser.py b/unittests/tools/test_meterian_parser.py index e119dc8df8..4c7ea176dd 100644 --- a/unittests/tools/test_meterian_parser.py +++ b/unittests/tools/test_meterian_parser.py @@ -6,10 +6,10 @@ class TestMeterianParser(DojoTestCase): def test_meterianParser_invalid_security_report_raise_ValueError_exception(self): - with self.assertRaises(ValueError): - with open(get_unit_tests_scans_path("meterian") / "report_invalid.json", encoding="utf-8") as testfile: - parser = MeterianParser() - parser.get_findings(testfile, Test()) + with self.assertRaises(ValueError), \ + open(get_unit_tests_scans_path("meterian") / "report_invalid.json", encoding="utf-8") as testfile: + parser = MeterianParser() + parser.get_findings(testfile, Test()) def test_meterianParser_report_has_no_finding(self): with open(get_unit_tests_scans_path("meterian") / "report_no_vulns.json", encoding="utf-8") as testfile: diff --git a/unittests/tools/test_noseyparker_parser.py b/unittests/tools/test_noseyparker_parser.py index c2ff908ef5..425925b657 100644 --- a/unittests/tools/test_noseyparker_parser.py +++ b/unittests/tools/test_noseyparker_parser.py @@ -32,16 +32,16 @@ def test_noseyparker_parser_many_vulns(self): self.assertEqual(3, len(findings)) def test_noseyparker_parser_error(self): - with self.assertRaises(ValueError) as context: - with open(get_unit_tests_scans_path("noseyparker") / "empty_with_error.json", encoding="utf-8") as testfile: - parser = NoseyParkerParser() - findings = parser.get_findings(testfile, Test()) - testfile.close() - self.assertEqual(0, len(findings)) - self.assertIn( - "Invalid Nosey Parker data, make sure to use Nosey Parker v0.16.0", str(context.exception), - ) - self.assertIn("ECONNREFUSED", str(context.exception)) + with self.assertRaises(ValueError) as context, \ + open(get_unit_tests_scans_path("noseyparker") / "empty_with_error.json", encoding="utf-8") as testfile: + parser = NoseyParkerParser() + findings = parser.get_findings(testfile, Test()) + testfile.close() + self.assertEqual(0, len(findings)) + self.assertIn( + "Invalid Nosey Parker data, make sure to use Nosey Parker v0.16.0", str(context.exception), + ) + self.assertIn("ECONNREFUSED", str(context.exception)) def test_noseyparker_version_0_22_0(self): with open("unittests/scans/noseyparker/noseyparker_0_22_0.jsonl", encoding="utf-8") as testfile: diff --git a/unittests/tools/test_npm_audit_parser.py b/unittests/tools/test_npm_audit_parser.py index 0a76dfbebe..3a7b28aa47 100644 --- a/unittests/tools/test_npm_audit_parser.py +++ b/unittests/tools/test_npm_audit_parser.py @@ -63,19 +63,19 @@ def test_npm_audit_parser_with_one_criticle_vuln_has_null_as_cwe(self): self.assertEqual("1.9.2", findings[0].component_version) def test_npm_audit_parser_empty_with_error(self): - with self.assertRaises(ValueError) as context: - with open(get_unit_tests_scans_path("npm_audit") / "empty_with_error.json", encoding="utf-8") as testfile: - parser = NpmAuditParser() - parser.get_findings(testfile, Test()) + with self.assertRaises(ValueError) as context, \ + open(get_unit_tests_scans_path("npm_audit") / "empty_with_error.json", encoding="utf-8") as testfile: + parser = NpmAuditParser() + parser.get_findings(testfile, Test()) self.assertIn("npm audit report contains errors:", str(context.exception)) self.assertIn("ENOAUDIT", str(context.exception)) def test_npm_audit_parser_many_vuln_npm7(self): - with self.assertRaises(ValueError) as context: - with open(get_unit_tests_scans_path("npm_audit") / "many_vuln_npm7.json", encoding="utf-8") as testfile: - parser = NpmAuditParser() - parser.get_findings(testfile, Test()) + with self.assertRaises(ValueError) as context, \ + open(get_unit_tests_scans_path("npm_audit") / "many_vuln_npm7.json", encoding="utf-8") as testfile: + parser = NpmAuditParser() + parser.get_findings(testfile, Test()) self.assertIn("npm7 with auditReportVersion 2 or higher not yet supported", str(context.exception)) diff --git a/unittests/tools/test_risk_recon_parser.py b/unittests/tools/test_risk_recon_parser.py index f24b249007..2c7d875733 100644 --- a/unittests/tools/test_risk_recon_parser.py +++ b/unittests/tools/test_risk_recon_parser.py @@ -8,16 +8,16 @@ class TestRiskReconAPIParser(DojoTestCase): def test_api_with_bad_url(self): - with open(get_unit_tests_scans_path("risk_recon") / "bad_url.json", encoding="utf-8") as testfile: - with self.assertRaises(Exception): - parser = RiskReconParser() - parser.get_findings(testfile, Test()) + with open(get_unit_tests_scans_path("risk_recon") / "bad_url.json", encoding="utf-8") as testfile, \ + self.assertRaises(Exception): + parser = RiskReconParser() + parser.get_findings(testfile, Test()) def test_api_with_bad_key(self): - with open(get_unit_tests_scans_path("risk_recon") / "bad_key.json", encoding="utf-8") as testfile: - with self.assertRaises(Exception): - parser = RiskReconParser() - parser.get_findings(testfile, Test()) + with open(get_unit_tests_scans_path("risk_recon") / "bad_key.json", encoding="utf-8") as testfile, \ + self.assertRaises(Exception): + parser = RiskReconParser() + parser.get_findings(testfile, Test()) def test_parser_without_api(self): with open(get_unit_tests_scans_path("risk_recon") / "findings.json", encoding="utf-8") as testfile: diff --git a/unittests/tools/test_sysdig_reports_parser.py b/unittests/tools/test_sysdig_reports_parser.py index 5afc7eb243..d4b949a6b6 100644 --- a/unittests/tools/test_sysdig_reports_parser.py +++ b/unittests/tools/test_sysdig_reports_parser.py @@ -33,25 +33,25 @@ def test_sysdig_parser_with_many_vuln_has_many_findings(self): self.assertEqual(50, len(findings)) def test_sysdig_parser_missing_cve_field_id_from_csv_file(self): - with self.assertRaises(ValueError) as context: - with open(get_unit_tests_scans_path("sysdig_reports") / "sysdig_reports_missing_cve_field.csv", encoding="utf-8") as testfile: - parser = SysdigReportsParser() - findings = parser.get_findings(testfile, Test()) - for finding in findings: - for endpoint in finding.unsaved_endpoints: - endpoint.clean() + with self.assertRaises(ValueError) as context, \ + open(get_unit_tests_scans_path("sysdig_reports") / "sysdig_reports_missing_cve_field.csv", encoding="utf-8") as testfile: + parser = SysdigReportsParser() + findings = parser.get_findings(testfile, Test()) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() self.assertEqual( "Number of fields in row (22) does not match number of headers (21)", str(context.exception), ) def test_sysdig_parser_missing_cve_field_not_starting_with_cve(self): - with self.assertRaises(ValueError) as context: - with open(get_unit_tests_scans_path("sysdig_reports") / "sysdig_reports_not_starting_with_cve.csv", encoding="utf-8") as testfile: - parser = SysdigReportsParser() - findings = parser.get_findings(testfile, Test()) - for finding in findings: - for endpoint in finding.unsaved_endpoints: - endpoint.clean() + with self.assertRaises(ValueError) as context, \ + open(get_unit_tests_scans_path("sysdig_reports") / "sysdig_reports_not_starting_with_cve.csv", encoding="utf-8") as testfile: + parser = SysdigReportsParser() + findings = parser.get_findings(testfile, Test()) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() self.assertEqual( "Number of fields in row (22) does not match number of headers (21)", str(context.exception), ) diff --git a/unittests/tools/test_threat_composer_parser.py b/unittests/tools/test_threat_composer_parser.py index 93a7b41314..4b60596504 100644 --- a/unittests/tools/test_threat_composer_parser.py +++ b/unittests/tools/test_threat_composer_parser.py @@ -44,10 +44,10 @@ def test_threat_composer_parser_with_many_threats_has_many_findings(self): self.assertEqual(21, len(findings)) def test_threat_composer_parser_empty_with_error(self): - with self.assertRaises(ValueError) as context: - with open(sample_path("threat_composer_no_threats_with_error.json"), encoding="utf-8") as testfile: - parser = ThreatComposerParser() - parser.get_findings(testfile, Test()) + with self.assertRaises(ValueError) as context, \ + open(sample_path("threat_composer_no_threats_with_error.json"), encoding="utf-8") as testfile: + parser = ThreatComposerParser() + parser.get_findings(testfile, Test()) self.assertNotIn("No threats found in the JSON file", str(context.exception)) diff --git a/unittests/tools/test_whitehat_sentinel_parser.py b/unittests/tools/test_whitehat_sentinel_parser.py index fa2c020302..fb73986dd8 100644 --- a/unittests/tools/test_whitehat_sentinel_parser.py +++ b/unittests/tools/test_whitehat_sentinel_parser.py @@ -6,10 +6,10 @@ class TestWhiteHatSentinelParser(DojoTestCase): def test_parse_file_with_no_vuln_has_no_findings(self): - with self.assertRaises(ValueError): - with open(get_unit_tests_scans_path("whitehat_sentinel") / "empty_file.json", encoding="utf-8") as testfile: - parser = WhiteHatSentinelParser() - parser.get_findings(testfile, Test()) + with self.assertRaises(ValueError), \ + open(get_unit_tests_scans_path("whitehat_sentinel") / "empty_file.json", encoding="utf-8") as testfile: + parser = WhiteHatSentinelParser() + parser.get_findings(testfile, Test()) def test_parse_file_with_one_vuln_has_one_findings(self): with open(get_unit_tests_scans_path("whitehat_sentinel") / "one_vuln.json", encoding="utf-8") as testfile: @@ -24,7 +24,7 @@ def test_parse_file_with_multiple_vuln_has_multiple_finding(self): self.assertEqual(3, len(findings)) def test_parse_file_with_invalid_data(self): - with self.assertRaises(ValueError): - with open(get_unit_tests_scans_path("whitehat_sentinel") / "invalid_data.txt", encoding="utf-8") as testfile: - parser = WhiteHatSentinelParser() - parser.get_findings(testfile, Test()) + with self.assertRaises(ValueError), \ + open(get_unit_tests_scans_path("whitehat_sentinel") / "invalid_data.txt", encoding="utf-8") as testfile: + parser = WhiteHatSentinelParser() + parser.get_findings(testfile, Test()) diff --git a/unittests/tools/test_yarn_audit_parser.py b/unittests/tools/test_yarn_audit_parser.py index 428b4ac1c5..0cdaca1dc6 100644 --- a/unittests/tools/test_yarn_audit_parser.py +++ b/unittests/tools/test_yarn_audit_parser.py @@ -63,14 +63,14 @@ def test_yarn_audit_parser_with_multiple_cwes_per_finding_list(self): self.assertEqual(findings[1].unsaved_vulnerability_ids[0], "CVE-2021-3807") def test_yarn_audit_parser_empty_with_error(self): - with self.assertRaises(ValueError) as context: - with open(get_unit_tests_scans_path("yarn_audit") / "empty_with_error.json", encoding="utf-8") as testfile: - parser = YarnAuditParser() - parser.get_findings(testfile, self.get_test()) - self.assertIn( - "yarn audit report contains errors:", str(context.exception), - ) - self.assertIn("ECONNREFUSED", str(context.exception)) + with self.assertRaises(ValueError) as context, \ + open(get_unit_tests_scans_path("yarn_audit") / "empty_with_error.json", encoding="utf-8") as testfile: + parser = YarnAuditParser() + parser.get_findings(testfile, self.get_test()) + self.assertIn( + "yarn audit report contains errors:", str(context.exception), + ) + self.assertIn("ECONNREFUSED", str(context.exception)) def test_yarn_audit_parser_issue_6495(self): with open(get_unit_tests_scans_path("yarn_audit") / "issue_6495.json", encoding="utf-8") as testfile: From efe7a565b4ad56c3608ff83edacb20979f213566 Mon Sep 17 00:00:00 2001 From: kiblik <5609770+kiblik@users.noreply.github.com> Date: Thu, 6 Feb 2025 18:16:22 +0100 Subject: [PATCH 17/23] Ruff: Add and fix SIM113 (#11652) --- dojo/finding/helper.py | 6 ++---- dojo/reports/views.py | 16 +++------------- dojo/tools/openvas/csv_parser.py | 13 +++---------- dojo/tools/sarif/parser.py | 11 +++-------- dojo/tools/skf/parser.py | 14 +++----------- ruff.toml | 1 - 6 files changed, 14 insertions(+), 47 deletions(-) diff --git a/dojo/finding/helper.py b/dojo/finding/helper.py index 13ae9a7544..bad13fe061 100644 --- a/dojo/finding/helper.py +++ b/dojo/finding/helper.py @@ -516,11 +516,9 @@ def prepare_duplicates_for_delete(test=None, engagement=None): # remove the link to the original from the duplicates inside the cluster so they can be safely deleted by the django framework total = len(originals) - i = 0 # logger.debug('originals: %s', [original.id for original in originals]) - for original in originals: - i += 1 - logger.debug("%d/%d: preparing duplicate cluster for deletion of original: %d", i, total, original.id) + for i, original in enumerate(originals): + logger.debug("%d/%d: preparing duplicate cluster for deletion of original: %d", i + 1, total, original.id) cluster_inside = original.original_finding.all() if engagement: cluster_inside = cluster_inside.filter(test__engagement=engagement) diff --git a/dojo/reports/views.py b/dojo/reports/views.py index 09f26538b4..a058ffd28f 100644 --- a/dojo/reports/views.py +++ b/dojo/reports/views.py @@ -880,9 +880,7 @@ def get(self, request): fields.append(finding.test.engagement.product.name) endpoint_value = "" - num_endpoints = 0 for endpoint in finding.endpoints.all(): - num_endpoints += 1 endpoint_value += f"{endpoint}; " endpoint_value = endpoint_value.removesuffix("; ") if len(endpoint_value) > EXCEL_CHAR_LIMIT: @@ -890,9 +888,7 @@ def get(self, request): fields.append(endpoint_value) vulnerability_ids_value = "" - num_vulnerability_ids = 0 - for vulnerability_id in finding.vulnerability_ids: - num_vulnerability_ids += 1 + for num_vulnerability_ids, vulnerability_id in enumerate(finding.vulnerability_ids): if num_vulnerability_ids > 5: vulnerability_ids_value += "..." break @@ -903,9 +899,7 @@ def get(self, request): fields.append(vulnerability_ids_value) # Tags tags_value = "" - num_tags = 0 - for tag in finding.tags.all(): - num_tags += 1 + for num_tags, tag in enumerate(finding.tags.all()): if num_tags > 5: tags_value += "..." break @@ -1029,9 +1023,7 @@ def get(self, request): col_num += 1 endpoint_value = "" - num_endpoints = 0 for endpoint in finding.endpoints.all(): - num_endpoints += 1 endpoint_value += f"{endpoint}; \n" endpoint_value = endpoint_value.removesuffix("; \n") if len(endpoint_value) > EXCEL_CHAR_LIMIT: @@ -1040,9 +1032,7 @@ def get(self, request): col_num += 1 vulnerability_ids_value = "" - num_vulnerability_ids = 0 - for vulnerability_id in finding.vulnerability_ids: - num_vulnerability_ids += 1 + for num_vulnerability_ids, vulnerability_id in enumerate(finding.vulnerability_ids): if num_vulnerability_ids > 5: vulnerability_ids_value += "..." break diff --git a/dojo/tools/openvas/csv_parser.py b/dojo/tools/openvas/csv_parser.py index c93a411bc9..6de396aec5 100644 --- a/dojo/tools/openvas/csv_parser.py +++ b/dojo/tools/openvas/csv_parser.py @@ -262,10 +262,8 @@ def create_chain(self): def read_column_names(self, row): column_names = {} - index = 0 - for column in row: + for index, column in enumerate(row): column_names[index] = column - index += 1 return column_names def get_findings(self, filename, test): @@ -276,21 +274,17 @@ def get_findings(self, filename, test): if isinstance(content, bytes): content = content.decode("utf-8") reader = csv.reader(io.StringIO(content), delimiter=",", quotechar='"') - row_number = 0 - for row in reader: + for row_number, row in enumerate(reader): finding = Finding(test=test) finding.unsaved_vulnerability_ids = [] finding.unsaved_endpoints = [Endpoint()] if row_number == 0: column_names = self.read_column_names(row) - row_number += 1 continue - column_number = 0 - for column in row: + for column_number, column in enumerate(row): chain.process_column( column_names[column_number], column, finding, ) - column_number += 1 if finding is not None and row_number > 0: if finding.title is None: finding.title = "" @@ -309,5 +303,4 @@ def get_findings(self, filename, test): ).hexdigest() if key not in dupes: dupes[key] = finding - row_number += 1 return list(dupes.values()) diff --git a/dojo/tools/sarif/parser.py b/dojo/tools/sarif/parser.py index 4c53958356..829b1a1bff 100644 --- a/dojo/tools/sarif/parser.py +++ b/dojo/tools/sarif/parser.py @@ -126,10 +126,8 @@ def get_result_cwes_properties(result): def get_artifacts(run): artifacts = {} - custom_index = 0 # hack because some tool doesn't generate this attribute - for tree_artifact in run.get("artifacts", []): + for custom_index, tree_artifact in enumerate(run.get("artifacts", [])): artifacts[tree_artifact.get("index", custom_index)] = tree_artifact - custom_index += 1 return artifacts @@ -229,9 +227,8 @@ def get_codeFlowsDescription(codeFlows): continue description = f"**{_('Code flow')}:**\n" - line = 1 - for location in threadFlow.get("locations", []): + for line, location in enumerate(threadFlow.get("locations", [])): physicalLocation = location.get("location", {}).get("physicalLocation", {}) region = physicalLocation.get("region", {}) uri = physicalLocation.get("artifactLocation").get("uri") @@ -249,7 +246,7 @@ def get_codeFlowsDescription(codeFlows): if "snippet" in region: snippet = f"\t-\t{region.get('snippet').get('text')}" - description += f"{line}. {uri}{start_line}{start_column}{snippet}\n" + description += f"{line + 1}. {uri}{start_line}{start_column}{snippet}\n" if "message" in location.get("location", {}): message_field = location.get("location", {}).get("message", {}) @@ -260,8 +257,6 @@ def get_codeFlowsDescription(codeFlows): description += f"\t{message}\n" - line += 1 - return description diff --git a/dojo/tools/skf/parser.py b/dojo/tools/skf/parser.py index 887716c509..14abaf688b 100644 --- a/dojo/tools/skf/parser.py +++ b/dojo/tools/skf/parser.py @@ -86,10 +86,8 @@ def create_chain(self): return date_column_strategy def read_column_names(self, column_names, row): - index = 0 - for column in row: + for index, column in enumerate(row): column_names[index] = column - index += 1 def get_findings(self, filename, test): content = filename.read() @@ -99,26 +97,22 @@ def get_findings(self, filename, test): column_names = {} chain = self.create_chain() - row_number = 0 reader = csv.reader( io.StringIO(content), delimiter=",", quotechar='"', escapechar="\\", ) dupes = {} - for row in reader: + for row_number, row in enumerate(reader): finding = Finding(test=test) finding.severity = "Info" if row_number == 0: self.read_column_names(column_names, row) - row_number += 1 continue - column_number = 0 - for column in row: + for column_number, column in enumerate(row): chain.process_column( column_names[column_number], column, finding, ) - column_number += 1 if finding is not None: key = hashlib.sha256( @@ -134,6 +128,4 @@ def get_findings(self, filename, test): if key not in dupes: dupes[key] = finding - row_number += 1 - return list(dupes.values()) diff --git a/ruff.toml b/ruff.toml index 43da6f1952..321246254e 100644 --- a/ruff.toml +++ b/ruff.toml @@ -90,7 +90,6 @@ ignore = [ "E501", "E722", "SIM102", - "SIM113", "SIM115", "RUF012", "RUF015", From 2212363941d0b65bc3549f16228ba95b6f358ab4 Mon Sep 17 00:00:00 2001 From: kiblik <5609770+kiblik@users.noreply.github.com> Date: Thu, 6 Feb 2025 18:17:15 +0100 Subject: [PATCH 18/23] Ruff: Add and fix D414 (#11655) --- dojo/tools/qualys/csv_parser.py | 2 -- dojo/tools/whitehat_sentinel/parser.py | 2 -- ruff.toml | 2 +- 3 files changed, 1 insertion(+), 5 deletions(-) diff --git a/dojo/tools/qualys/csv_parser.py b/dojo/tools/qualys/csv_parser.py index 450d74f610..f39eb8d0a3 100644 --- a/dojo/tools/qualys/csv_parser.py +++ b/dojo/tools/qualys/csv_parser.py @@ -37,8 +37,6 @@ def get_report_findings(csv_reader) -> [dict]: Args: csv_reader: - Returns: - """ report_findings = [] diff --git a/dojo/tools/whitehat_sentinel/parser.py b/dojo/tools/whitehat_sentinel/parser.py index 9ed3007824..41946ff406 100644 --- a/dojo/tools/whitehat_sentinel/parser.py +++ b/dojo/tools/whitehat_sentinel/parser.py @@ -130,8 +130,6 @@ def _parse_solution(self, whitehat_sentinel_vuln_solution): Args: whitehat_sentinel_vuln_solution: - Returns: - """ solution_html = whitehat_sentinel_vuln_solution["solution"] diff --git a/ruff.toml b/ruff.toml index 321246254e..4eed27e06f 100644 --- a/ruff.toml +++ b/ruff.toml @@ -38,7 +38,7 @@ select = [ "C90", "I", "N804", "N811", "N814", "N818", - "D2", "D3", "D402", "D403", "D405", "D406", "D407", "D408", "D409", "D410", "D411", "D412", "D413", "D416", + "D2", "D3", "D402", "D403", "D405", "D406", "D407", "D408", "D409", "D410", "D411", "D412", "D413", "D414", "D416", "UP", "YTT", "ASYNC", From e179fb65c03aec7b19009ba2c80d577511eedb4c Mon Sep 17 00:00:00 2001 From: valentijnscholten Date: Thu, 6 Feb 2025 18:17:52 +0100 Subject: [PATCH 19/23] NodeJS: Correct Debian bullseye reference (#11670) --- Dockerfile.nginx-debian | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile.nginx-debian b/Dockerfile.nginx-debian index 978e74dfb4..7297695b57 100644 --- a/Dockerfile.nginx-debian +++ b/Dockerfile.nginx-debian @@ -42,8 +42,8 @@ RUN \ apt-get -y update && \ apt-get -y install --no-install-recommends apt-transport-https ca-certificates curl wget gnupg && \ curl -sSL https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add --no-tty - && \ - echo 'deb https://deb.nodesource.com/node_20.x bullseye main' > /etc/apt/sources.list.d/nodesource.list && \ - echo 'deb-src https://deb.nodesource.com/node_20.x bullseye main' >> /etc/apt/sources.list.d/nodesource.list && \ + echo 'deb https://deb.nodesource.com/node_20.x bookworm main' > /etc/apt/sources.list.d/nodesource.list && \ + echo 'deb-src https://deb.nodesource.com/node_20.x bookworm main' >> /etc/apt/sources.list.d/nodesource.list && \ apt-get update -y -o Dir::Etc::sourcelist="sources.list.d/nodesource.list" \ -o Dir::Etc::sourceparts="-" -o APT::Get::List-Cleanup="0" && \ curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - && \ From 5d2a00ee407bbe19fd6f76537e09712a9c0b7f7d Mon Sep 17 00:00:00 2001 From: Jaja <16231488+littlesvensson@users.noreply.github.com> Date: Thu, 6 Feb 2025 18:27:35 +0100 Subject: [PATCH 20/23] Adjusted number fields in tables (#11677) * Adjusted number fields in tables * Removed text-right from th not necessary and its overridden anyway * Fixed no empty new line at the end of file --- dojo/static/dojo/css/dojo.css | 9 +- dojo/templates/dojo/components.html | 13 +- dojo/templates/dojo/endpoint_pdf_report.html | 2 +- .../templates/dojo/engagement_pdf_report.html | 2 +- dojo/templates/dojo/finding_pdf_report.html | 2 +- .../templates/dojo/findings_list_snippet.html | 8 +- dojo/templates/dojo/metrics.html | 148 +++++++++--------- dojo/templates/dojo/product.html | 8 +- .../dojo/product_endpoint_pdf_report.html | 2 +- dojo/templates/dojo/product_pdf_report.html | 2 +- dojo/templates/dojo/product_type.html | 4 +- .../dojo/product_type_pdf_report.html | 2 +- dojo/templates/dojo/pt_counts.html | 60 +++---- dojo/templates/dojo/simple_metrics.html | 16 +- dojo/templates/dojo/test_pdf_report.html | 2 +- dojo/templates/dojo/view_finding.html | 10 +- dojo/templates/dojo/view_test.html | 2 +- 17 files changed, 150 insertions(+), 142 deletions(-) diff --git a/dojo/static/dojo/css/dojo.css b/dojo/static/dojo/css/dojo.css index 54c15b8b41..27eb88bd4b 100644 --- a/dojo/static/dojo/css/dojo.css +++ b/dojo/static/dojo/css/dojo.css @@ -1854,4 +1854,11 @@ input[type=number]::-webkit-inner-spin-button, input[type=number]::-webkit-outer-spin-button { -webkit-appearance: none; margin: 0; -} \ No newline at end of file +} + +.table th, .table td { + border-right: 1px solid #ddd; +} +.table th:last-child, .table td:last-child { + border-right: none; +} diff --git a/dojo/templates/dojo/components.html b/dojo/templates/dojo/components.html index 5220a8d9fb..e5a1261e26 100644 --- a/dojo/templates/dojo/components.html +++ b/dojo/templates/dojo/components.html @@ -4,6 +4,7 @@ {% load static %} {% block content %} {{ block.super }} +
@@ -28,9 +29,9 @@

Name Version - Active - Duplicate - Total + Active + Duplicate + Total @@ -45,7 +46,7 @@

{{result.component_version}} - + {% if result.active and result.component_name == none %} {{ result.active }} {% elif result.active%} @@ -54,7 +55,7 @@

0 {% endif %} - + {% if result.duplicate and result.component_name == none %} {{ result.duplicate }} {% elif result.duplicate %} @@ -63,7 +64,7 @@

0 {% endif %} - + {% if result.total and result.component_name == none %} {{ result.total }} {% elif result.total %} diff --git a/dojo/templates/dojo/endpoint_pdf_report.html b/dojo/templates/dojo/endpoint_pdf_report.html index 10cf2804c9..637527d39c 100644 --- a/dojo/templates/dojo/endpoint_pdf_report.html +++ b/dojo/templates/dojo/endpoint_pdf_report.html @@ -138,7 +138,7 @@

{% endif %} - + {{ finding.epss_score|format_epss }} / {{ finding.epss_percentile|format_epss }} diff --git a/dojo/templates/dojo/engagement_pdf_report.html b/dojo/templates/dojo/engagement_pdf_report.html index ff34127435..a07c19c449 100644 --- a/dojo/templates/dojo/engagement_pdf_report.html +++ b/dojo/templates/dojo/engagement_pdf_report.html @@ -271,7 +271,7 @@
{% endif %} - + {{ finding.epss_score|format_epss }} / {{ finding.epss_percentile|format_epss }} diff --git a/dojo/templates/dojo/finding_pdf_report.html b/dojo/templates/dojo/finding_pdf_report.html index ea051d43d0..79235b1e07 100644 --- a/dojo/templates/dojo/finding_pdf_report.html +++ b/dojo/templates/dojo/finding_pdf_report.html @@ -117,7 +117,7 @@
{% endif %} - + {{ finding.epss_score|format_epss }} / {{ finding.epss_percentile|format_epss }} diff --git a/dojo/templates/dojo/findings_list_snippet.html b/dojo/templates/dojo/findings_list_snippet.html index 9f7ad39593..3cea8bfffc 100644 --- a/dojo/templates/dojo/findings_list_snippet.html +++ b/dojo/templates/dojo/findings_list_snippet.html @@ -612,10 +612,10 @@

{% endif %} {% endwith %} - + {{ finding.epss_score|format_epss }} - + {{ finding.epss_percentile|format_epss }} {% if filter_name == 'Closed' %} @@ -626,11 +626,11 @@

{{ finding.date }} {% endif %} - + {{ finding.age }} {% if system_settings.enable_finding_sla %} - + {{ finding|finding_sla }} {% endif %} diff --git a/dojo/templates/dojo/metrics.html b/dojo/templates/dojo/metrics.html index 759c58f334..48b0603bf1 100644 --- a/dojo/templates/dojo/metrics.html +++ b/dojo/templates/dojo/metrics.html @@ -319,11 +319,11 @@

{% trans "Metric Counts" %}

{{ t.name }} - {{ t.critical|default_if_none:0 }} - {{ t.high|default_if_none:0 }} - {{ t.medium|default_if_none:0 }} - {{ t.low|default_if_none:0 }} - {{ t.total|default_if_none:0 }} + {{ t.critical|default_if_none:0 }} + {{ t.high|default_if_none:0 }} + {{ t.medium|default_if_none:0 }} + {{ t.low|default_if_none:0 }} + {{ t.total|default_if_none:0 }} {% endfor %} @@ -356,7 +356,7 @@

{% trans "Metric Counts" %}

{{ finding.severity_display }} - + {{ finding.epss_score|format_epss }} / {{ finding.epss_percentile|format_epss }} @@ -364,7 +364,7 @@

{% trans "Metric Counts" %}

{{ finding.title|truncatechars_html:20 }} - {{ finding.age }} + {{ finding.age }} {{ finding.status }} {{ finding.reporter }} @@ -385,12 +385,12 @@

{% trans "Opened During Period" %}

{% trans "Total" %} - {{ in_period_counts.critical }} - {{ in_period_counts.high }} - {{ in_period_counts.medium }} - {{ in_period_counts.low }} - {{ in_period_counts.info }} - {{ in_period_counts.total }} + {{ in_period_counts.critical }} + {{ in_period_counts.high }} + {{ in_period_counts.medium }} + {{ in_period_counts.low }} + {{ in_period_counts.info }} + {{ in_period_counts.total }} {% trans "Opened During Period" %} {{ product.product_name }} - - - - - - + + + + + + {% endfor %}
{{ product.critical }}{{ product.high }}{{ product.medium }}{{ product.low }}{{ product.info }}{{ product.total }}{{ product.critical }}{{ product.high }}{{ product.medium }}{{ product.low }}{{ product.info }}{{ product.total }}
@@ -436,12 +436,12 @@

{% trans "Accepted in Period" %}

{% trans "Total" %} - {{ accepted_in_period_counts.critical|default_if_none:0 }} - {{ accepted_in_period_counts.high|default_if_none:0 }} - {{ accepted_in_period_counts.medium|default_if_none:0 }} - {{ accepted_in_period_counts.low|default_if_none:0 }} - {{ accepted_in_period_counts.info|default_if_none:0 }} - {{ accepted_in_period_counts.total|default_if_none:0 }} + {{ accepted_in_period_counts.critical|default_if_none:0 }} + {{ accepted_in_period_counts.high|default_if_none:0 }} + {{ accepted_in_period_counts.medium|default_if_none:0 }} + {{ accepted_in_period_counts.low|default_if_none:0 }} + {{ accepted_in_period_counts.info|default_if_none:0 }} + {{ accepted_in_period_counts.total|default_if_none:0 }} {% trans "Accepted in Period" %} {{ product.product_name }} - - - - - - + + + + + + {% endfor %}
{{ product.critical }}{{ product.high }}{{ product.medium }}{{ product.low }}{{ product.info }}{{ product.total }}{{ product.critical }}{{ product.high }}{{ product.medium }}{{ product.low }}{{ product.info }}{{ product.total }}
@@ -489,12 +489,12 @@

{% trans "Closed in Period" %}

{% trans "Total" %} - {{ closed_in_period_counts.critical }} - {{ closed_in_period_counts.high }} - {{ closed_in_period_counts.medium }} - {{ closed_in_period_counts.low }} - {{ closed_in_period_counts.info }} - {{ closed_in_period_counts.total }} + {{ closed_in_period_counts.critical }} + {{ closed_in_period_counts.high }} + {{ closed_in_period_counts.medium }} + {{ closed_in_period_counts.low }} + {{ closed_in_period_counts.info }} + {{ closed_in_period_counts.total }} {% trans "Closed in Period" %} {{ product.product_name }} - - - - - - + + + + + + {% endfor %}
{{ product.critical }}{{ product.high }}{{ product.medium }}{{ product.low }}{{ product.info }}{{ product.total }}{{ product.critical }}{{ product.high }}{{ product.medium }}{{ product.low }}{{ product.info }}{{ product.total }}
@@ -543,13 +543,13 @@

{% trans "Closed in Period" %}

{% for week in opened_per_week %} {{ week.grouped_date|date:"m-d-Y" }} - {{ week.critical }} - {{ week.high }} - {{ week.medium }} - {{ week.low }} - {{ week.info }} - {{ week.total }} - {{ week.closed }} + {{ week.critical }} + {{ week.high }} + {{ week.medium }} + {{ week.low }} + {{ week.info }} + {{ week.total }} + {{ week.closed }} {% endfor %} @@ -568,14 +568,14 @@

{% trans "Closed in Period" %}

{% for month in opened_per_month %} - {{ month.grouped_date|date:"m-Y" }} - {{ month.critical }} - {{ month.high }} - {{ month.medium }} - {{ month.low }} - {{ month.info }} - {{ month.total }} - {{ month.closed }} + {{ month.grouped_date|date:"m-Y" }} + {{ month.critical }} + {{ month.high }} + {{ month.medium }} + {{ month.low }} + {{ month.info }} + {{ month.total }} + {{ month.closed }} {% endfor %} @@ -597,12 +597,12 @@

{% trans "Closed in Period" %}

{% for week in accepted_per_week %} {{ week.grouped_date|date:"m-d-Y" }} - {{ week.critical }} - {{ week.high }} - {{ week.medium }} - {{ week.low }} - {{ week.info }} - {{ week.total }} + {{ week.critical }} + {{ week.high }} + {{ week.medium }} + {{ week.low }} + {{ week.info }} + {{ week.total }} {% endfor %} @@ -621,12 +621,12 @@

{% trans "Closed in Period" %}

{% for month in accepted_per_month %} {{ month.grouped_date|date:"m-Y" }} - {{ month.critical }} - {{ month.high }} - {{ month.medium }} - {{ month.low }} - {{ month.info }} - {{ month.total }} + {{ month.critical }} + {{ month.high }} + {{ month.medium }} + {{ month.low }} + {{ month.info }} + {{ month.total }} {% endfor %} @@ -641,19 +641,19 @@

{% trans "Closed in Period" %}

{% trans "0 - 30 Days" %} - {{ age_detail.age_under_30 }} + {{ age_detail.age_under_30 }} {% trans "31 - 60 Days" %} - {{ age_detail.age_31_60 }} + {{ age_detail.age_31_60 }} {% trans "61 - 90 Days" %} - {{ age_detail.age_61_90 }} + {{ age_detail.age_61_90 }} {% trans "91+ Days" %} - {{ age_detail.age_90_plus }} + {{ age_detail.age_90_plus }}
diff --git a/dojo/templates/dojo/product.html b/dojo/templates/dojo/product.html index decf697db8..202cc7ed98 100644 --- a/dojo/templates/dojo/product.html +++ b/dojo/templates/dojo/product.html @@ -62,8 +62,8 @@

{% if system_settings.enable_github %} GitHub {% endif %} - {% dojo_sort request 'Active (Verified) Findings' 'findings_count' %} - Vulnerable Hosts / Endpoints + {% dojo_sort request 'Active (Verified) Findings' 'findings_count' %} + Vulnerable Hosts / Endpoints Contact {% comment %} The display field is translated in the function. No need to translate here as well{% endcomment %} {% dojo_sort request 'Product Type' 'prod_type__name' %} @@ -241,7 +241,7 @@

{% endif %} - + {% if prod.findings_count %} {{ prod.findings_count }}  ({{ prod.findings_active_verified_count }}) @@ -249,7 +249,7 @@

0 {% endif %} - + {{ prod.endpoint_host_count }} / {{ prod.endpoint_count }} diff --git a/dojo/templates/dojo/product_endpoint_pdf_report.html b/dojo/templates/dojo/product_endpoint_pdf_report.html index 0a1cb6e523..bb26f83562 100644 --- a/dojo/templates/dojo/product_endpoint_pdf_report.html +++ b/dojo/templates/dojo/product_endpoint_pdf_report.html @@ -186,7 +186,7 @@

{% endif %} - + {{ finding.epss_score|format_epss }} / {{ finding.epss_percentile|format_epss }} diff --git a/dojo/templates/dojo/product_pdf_report.html b/dojo/templates/dojo/product_pdf_report.html index 2548050275..4fec57dee5 100644 --- a/dojo/templates/dojo/product_pdf_report.html +++ b/dojo/templates/dojo/product_pdf_report.html @@ -243,7 +243,7 @@
{% endif %} - + {{ finding.epss_score|format_epss }} / {{ finding.epss_percentile|format_epss }} diff --git a/dojo/templates/dojo/product_type.html b/dojo/templates/dojo/product_type.html index 221b7c811e..0b9a58b656 100644 --- a/dojo/templates/dojo/product_type.html +++ b/dojo/templates/dojo/product_type.html @@ -101,8 +101,8 @@

{{ pt.name }} - {{ pt.prod_count }} - + {{ pt.prod_count }} + {{ pt.active_findings_count }}  ({{ pt.active_verified_findings_count }}) diff --git a/dojo/templates/dojo/product_type_pdf_report.html b/dojo/templates/dojo/product_type_pdf_report.html index ec550d1524..fba8bf63e2 100644 --- a/dojo/templates/dojo/product_type_pdf_report.html +++ b/dojo/templates/dojo/product_type_pdf_report.html @@ -172,7 +172,7 @@

{% endif %} - + {{ finding.epss_score|format_epss }} / {{ finding.epss_percentile|format_epss }} diff --git a/dojo/templates/dojo/pt_counts.html b/dojo/templates/dojo/pt_counts.html index 9b50cb08b8..4a418e8543 100644 --- a/dojo/templates/dojo/pt_counts.html +++ b/dojo/templates/dojo/pt_counts.html @@ -42,19 +42,19 @@

{% trans "Total Security Bug Count In Period" %}

- + {{ overall_in_pt.S0|default_if_none:0 }} - + {{ overall_in_pt.S1|default_if_none:0 }} - + {{ overall_in_pt.S2|default_if_none:0 }} - + {{ overall_in_pt.S3|default_if_none:0 }} - + {{ overall_in_pt.Total|default_if_none:0 }} @@ -79,19 +79,19 @@

{% trans "Total Security Bugs Opened In Period" %}

- + {{ opened_in_period.S0|default_if_none:0 }} - + {{ opened_in_period.S1|default_if_none:0 }} - + {{ opened_in_period.S2|default_if_none:0 }} - + {{ opened_in_period.S3|default_if_none:0 }} - + {{ opened_in_period.Total|default_if_none:0 }} @@ -115,19 +115,19 @@

{% trans "Total Security Bugs Closed In Period" %}

- + {{ closed_in_period.S0|default_if_none:0 }} - + {{ closed_in_period.S1|default_if_none:0 }} - + {{ closed_in_period.S2|default_if_none:0 }} - + {{ closed_in_period.S3|default_if_none:0 }} - + {{ closed_in_period.Total|default_if_none:0 }} @@ -162,25 +162,25 @@

{% trans "Trending Total Bug Count By Month" %}

{% for to in trending_opened %} {{ to.start_date.date|date:"M-Y" }} - + {{ to.S0|default_if_none:0 }} - + {{ to.S1|default_if_none:0 }} - + {{ to.S2|default_if_none:0 }} - + {{ to.S3|default_if_none:0 }} - + {{ to.Total|default_if_none:0 }} - + {{ to.to_date_total|default_if_none:0 }} - + {{ to.closed|default_if_none:0 }} @@ -211,19 +211,19 @@

{% trans "Top 10 By Bug Severity" %}

{{ p.name }} - + {{ p.critical|default_if_none:0 }} - + {{ p.high|default_if_none:0 }} - + {{ p.medium|default_if_none:0 }} - + {{ p.low|default_if_none:0 }} - + {{ p.total|default_if_none:0 }} @@ -252,7 +252,7 @@

{% blocktrans %}{{ pt }} Open Findings{% endblocktrans %}

{% for finding in all_current_in_pt %} - {{ forloop.counter }} + {{ forloop.counter }} {{ finding.title }} @@ -260,12 +260,12 @@

{% blocktrans %}{{ pt }} Open Findings{% endblocktrans %}

{% if finding.severity == "Critical" or finding.severity == "High" %}

{% else %}

{% endif %}{{ finding.severity_display }}

- + {{ finding.epss_score|format_epss }} / {{ finding.epss_percentile|format_epss }} - {{ finding.age }} + {{ finding.age }} {{ finding.test.engagement.product }} diff --git a/dojo/templates/dojo/simple_metrics.html b/dojo/templates/dojo/simple_metrics.html index c84916fe45..b805ba2341 100644 --- a/dojo/templates/dojo/simple_metrics.html +++ b/dojo/templates/dojo/simple_metrics.html @@ -32,28 +32,28 @@

{{ key.name }}

[ - + {{ value.Total }} - + {{ value.S0 }} - + {{ value.S1 }} - + {{ value.S2 }} - + {{ value.S3 }} - + {{ value.S4 }} - + {{ value.Opened }} - + {{ value.Closed }} diff --git a/dojo/templates/dojo/test_pdf_report.html b/dojo/templates/dojo/test_pdf_report.html index b1d1cd7b5f..274ca46767 100644 --- a/dojo/templates/dojo/test_pdf_report.html +++ b/dojo/templates/dojo/test_pdf_report.html @@ -69,7 +69,7 @@
{{ test.target_end|date }} - + {{ test.percent_complete }}% diff --git a/dojo/templates/dojo/view_finding.html b/dojo/templates/dojo/view_finding.html index 892da63370..ba973b1ba3 100755 --- a/dojo/templates/dojo/view_finding.html +++ b/dojo/templates/dojo/view_finding.html @@ -419,7 +419,7 @@

{% endif %} - + {% with finding|first_vulnerability_id as first_vulnerability_id %} {% if first_vulnerability_id %} {% if first_vulnerability_id|has_vulnerability_url %} @@ -433,11 +433,11 @@

{% if finding.epss_score != None or finding.epss_percentile != None %} {% if finding.epss_score != None and finding.epss_percentile != None %} - {{ finding.epss_score|multiply:100|floatformat:"2" }}% / {{ finding.epss_percentile|multiply:100|floatformat:"2" }}% + {{ finding.epss_score|multiply:100|floatformat:"2" }}% / {{ finding.epss_percentile|multiply:100|floatformat:"2" }}% {% elif finding.epss_score != None and finding.epss_percentile == None %} - {{ finding.epss_score|multiply:100|floatformat:"2" }}% + {{ finding.epss_score|multiply:100|floatformat:"2" }}% {% elif finding.epss_score == None and finding.epss_percentile != None %} - {{ finding.epss_percentile|multiply:100|floatformat:"2" }}% + {{ finding.epss_percentile|multiply:100|floatformat:"2" }}% {% endif %} {% endif %} @@ -467,7 +467,7 @@

Additional Vulnerability Ids - + {% for vulnerability_id in additional_vulnerability_ids %} {% if vulnerability_id|has_vulnerability_url%} diff --git a/dojo/templates/dojo/view_test.html b/dojo/templates/dojo/view_test.html index ffcfacef60..34197181f6 100644 --- a/dojo/templates/dojo/view_test.html +++ b/dojo/templates/dojo/view_test.html @@ -160,7 +160,7 @@

{% endif %} {{ test.version }} {% if 'TRACK_IMPORT_HISTORY'|setting_enabled and test.total_reimport_count %} - {{ test.total_reimport_count }} + {{ test.total_reimport_count }} {% endif %} {% if test.api_scan_configuration %} From a43cc06ededf68a8c5c92b2ba2d7e13386d28892 Mon Sep 17 00:00:00 2001 From: kiblik <5609770+kiblik@users.noreply.github.com> Date: Thu, 6 Feb 2025 18:29:51 +0100 Subject: [PATCH 21/23] Ruff: Add and fix ISC002 (#11705) --- dojo/endpoint/utils.py | 30 +++++++---- dojo/forms.py | 10 ++-- .../commands/jira_status_reconciliation.py | 7 +-- .../commands/stamp_finding_last_reviewed.py | 14 +++--- dojo/reports/widgets.py | 10 ++-- dojo/templatetags/display_tags.py | 6 ++- dojo/tools/dependency_track/parser.py | 8 +-- dojo/tools/noseyparker/parser.py | 50 +++++++++++-------- ruff.toml | 2 +- 9 files changed, 82 insertions(+), 55 deletions(-) diff --git a/dojo/endpoint/utils.py b/dojo/endpoint/utils.py index f5f01ee4e3..0fe6489cc6 100644 --- a/dojo/endpoint/utils.py +++ b/dojo/endpoint/utils.py @@ -105,8 +105,10 @@ def err_log(message, html_log, endpoint_html_log, endpoint): if parts.protocol: if endpoint.protocol and (endpoint.protocol != parts.protocol): - message = f"has defined protocol ({endpoint.protocol}) and it is not the same as protocol in host " \ - f"({parts.protocol})" + message = ( + f"has defined protocol ({endpoint.protocol}) and it is not the same as protocol in host " + f"({parts.protocol})" + ) err_log(message, html_log, endpoint_html_log, endpoint) else: if change: @@ -126,8 +128,10 @@ def err_log(message, html_log, endpoint_html_log, endpoint): if parts.port: try: if (endpoint.port is not None) and (int(endpoint.port) != parts.port): - message = f"has defined port number ({endpoint.port}) and it is not the same as port number in " \ - f"host ({parts.port})" + message = ( + f"has defined port number ({endpoint.port}) and it is not the same as port number in " + f"host ({parts.port})" + ) err_log(message, html_log, endpoint_html_log, endpoint) else: if change: @@ -138,8 +142,10 @@ def err_log(message, html_log, endpoint_html_log, endpoint): if parts.path: if endpoint.path and (endpoint.path != parts.path): - message = f"has defined path ({endpoint.path}) and it is not the same as path in host " \ - f"({parts.path})" + message = ( + f"has defined path ({endpoint.path}) and it is not the same as path in host " + f"({parts.path})" + ) err_log(message, html_log, endpoint_html_log, endpoint) else: if change: @@ -147,8 +153,10 @@ def err_log(message, html_log, endpoint_html_log, endpoint): if parts.query: if endpoint.query and (endpoint.query != parts.query): - message = f"has defined query ({endpoint.query}) and it is not the same as query in host " \ - f"({parts.query})" + message = ( + f"has defined query ({endpoint.query}) and it is not the same as query in host " + f"({parts.query})" + ) err_log(message, html_log, endpoint_html_log, endpoint) else: if change: @@ -156,8 +164,10 @@ def err_log(message, html_log, endpoint_html_log, endpoint): if parts.fragment: if endpoint.fragment and (endpoint.fragment != parts.fragment): - message = f"has defined fragment ({endpoint.fragment}) and it is not the same as fragment in host " \ - f"({parts.fragment})" + message = ( + f"has defined fragment ({endpoint.fragment}) and it is not the same as fragment in host " + f"({parts.fragment})" + ) err_log(message, html_log, endpoint_html_log, endpoint) else: if change: diff --git a/dojo/forms.py b/dojo/forms.py index b25d0a6d23..42b396d685 100644 --- a/dojo/forms.py +++ b/dojo/forms.py @@ -3063,9 +3063,10 @@ def __init__(self, *args, **kwargs): if self.push_all: # This will show the checkbox as checked and greyed out, this way the user is aware # that issues will be pushed to JIRA, given their product-level settings. - self.fields["push_to_jira"].help_text = \ - "Push all issues is enabled on this product. If you do not wish to push all issues" \ + self.fields["push_to_jira"].help_text = ( + "Push all issues is enabled on this product. If you do not wish to push all issues" " to JIRA, please disable Push all issues on this product." + ) self.fields["push_to_jira"].widget.attrs["checked"] = "checked" self.fields["push_to_jira"].disabled = True @@ -3166,9 +3167,10 @@ def __init__(self, *args, **kwargs): if self.push_all: # This will show the checkbox as checked and greyed out, this way the user is aware # that issues will be pushed to JIRA, given their product-level settings. - self.fields["push_to_jira"].help_text = \ - "Push all issues is enabled on this product. If you do not wish to push all issues" \ + self.fields["push_to_jira"].help_text = ( + "Push all issues is enabled on this product. If you do not wish to push all issues" " to JIRA, please disable Push all issues on this product." + ) self.fields["push_to_jira"].widget.attrs["checked"] = "checked" self.fields["push_to_jira"].disabled = True diff --git a/dojo/management/commands/jira_status_reconciliation.py b/dojo/management/commands/jira_status_reconciliation.py index 868f4492c4..e04da50756 100644 --- a/dojo/management/commands/jira_status_reconciliation.py +++ b/dojo/management/commands/jira_status_reconciliation.py @@ -204,10 +204,11 @@ class Command(BaseCommand): help = "Reconcile finding status with JIRA issue status, stdout will contain semicolon seperated CSV results. \ Risk Accepted findings are skipped. Findings created before 1.14.0 are skipped." - mode_help = \ - "- reconcile: (default)reconcile any differences in status between Defect Dojo and JIRA, will look at the latest status change timestamp in both systems to determine which one is the correct status" \ - "- push_status_to_jira: update JIRA status for all JIRA issues connected to a Defect Dojo finding (will not push summary/description, only status)" \ + mode_help = ( + "- reconcile: (default)reconcile any differences in status between Defect Dojo and JIRA, will look at the latest status change timestamp in both systems to determine which one is the correct status" + "- push_status_to_jira: update JIRA status for all JIRA issues connected to a Defect Dojo finding (will not push summary/description, only status)" "- import_status_from_jira: update Defect Dojo finding status from JIRA" + ) def add_arguments(self, parser): parser.add_argument("--mode", help=self.mode_help) diff --git a/dojo/management/commands/stamp_finding_last_reviewed.py b/dojo/management/commands/stamp_finding_last_reviewed.py index 24567a971b..7874f25160 100644 --- a/dojo/management/commands/stamp_finding_last_reviewed.py +++ b/dojo/management/commands/stamp_finding_last_reviewed.py @@ -24,12 +24,14 @@ class Command(BaseCommand): - help = "A new field last_reviewed has been added to the Finding model \n" \ - "This script will update all findings with a last_reviewed date of the most current date from: \n" \ - "1. Finding Date if no other evidence of activity is found \n" \ - "2. Last note added date if a note is found \n" \ - "3. Mitigation Date if finding is mitigated \n" \ - "4. Last action_log entry date if Finding has been updated \n" + help = ( + "A new field last_reviewed has been added to the Finding model \n" + "This script will update all findings with a last_reviewed date of the most current date from: \n" + "1. Finding Date if no other evidence of activity is found \n" + "2. Last note added date if a note is found \n" + "3. Mitigation Date if finding is mitigated \n" + "4. Last action_log entry date if Finding has been updated \n" + ) def handle(self, *args, **options): findings = Finding.objects.all().order_by("id") diff --git a/dojo/reports/widgets.py b/dojo/reports/widgets.py index e69ba6a187..f8183b3a97 100644 --- a/dojo/reports/widgets.py +++ b/dojo/reports/widgets.py @@ -269,8 +269,9 @@ def __init__(self, *args, **kwargs): self.form = None self.multiple = "true" self.widget_class = "finding-list" - self.extra_help = "You can use this form to filter findings and select only the ones to be included in the " \ - "report." + self.extra_help = ( + "You can use this form to filter findings and select only the ones to be included in the report." + ) self.title_words = get_words_for_field(Finding, "title") self.component_words = get_words_for_field(Finding, "component_name") @@ -339,8 +340,9 @@ def __init__(self, *args, **kwargs): else: self.paged_endpoints = self.endpoints self.multiple = "true" - self.extra_help = "You can use this form to filter endpoints and select only the ones to be included in the " \ - "report." + self.extra_help = ( + "You can use this form to filter endpoints and select only the ones to be included in the report." + ) def get_html(self): html = render_to_string("dojo/custom_html_report_endpoint_list.html", diff --git a/dojo/templatetags/display_tags.py b/dojo/templatetags/display_tags.py index 95156c1ff9..350b2b8196 100644 --- a/dojo/templatetags/display_tags.py +++ b/dojo/templatetags/display_tags.py @@ -285,8 +285,10 @@ def finding_sla(finding): sla_age) + " days or less since " + finding.get_sla_start_date().strftime("%b %d, %Y") if find_sla is not None: - title = '' \ - '' + str(find_sla) + "" + title = ( + f'' + f'{find_sla}' + ) return mark_safe(title) diff --git a/dojo/tools/dependency_track/parser.py b/dojo/tools/dependency_track/parser.py index 483e05e87c..9890b58501 100644 --- a/dojo/tools/dependency_track/parser.py +++ b/dojo/tools/dependency_track/parser.py @@ -172,9 +172,11 @@ def _convert_dependency_track_finding_to_dojo_finding(self, dependency_track_fin component_description = f"Version {component_version} of the {component_name} component" else: component_description = f"The {component_name} component" - vulnerability_description = "You are using a component with a known vulnerability. " \ - f"{component_description} is affected by the vulnerability with an id of {vuln_id} as " \ - f"identified by {source}." + vulnerability_description = ( + "You are using a component with a known vulnerability. " + f"{component_description} is affected by the vulnerability with an id of {vuln_id} as " + f"identified by {source}." + ) # Append purl info if it is present if "purl" in dependency_track_finding["component"] and dependency_track_finding["component"]["purl"] is not None: component_purl = dependency_track_finding["component"]["purl"] diff --git a/dojo/tools/noseyparker/parser.py b/dojo/tools/noseyparker/parser.py index 05fcdab49d..88e33d4dbb 100644 --- a/dojo/tools/noseyparker/parser.py +++ b/dojo/tools/noseyparker/parser.py @@ -16,8 +16,10 @@ def get_label_for_scan_types(self, scan_type): return "Nosey Parker Scan" def get_description_for_scan_types(self, scan_type): - return "Nosey Parker report file can be imported in JSON Lines format (option --jsonl). " \ - "Supports v0.16.0 and v0.22.0 of https://github.com/praetorian-inc/noseyparker" + return ( + "Nosey Parker report file can be imported in JSON Lines format (option --jsonl). " + "Supports v0.16.0 and v0.22.0 of https://github.com/praetorian-inc/noseyparker" + ) def get_findings(self, file, test): """ @@ -61,14 +63,15 @@ def version_0_16_0(self, line, test): title = f"Secret(s) Found in Repository with Commit ID {json_path['commit_provenance']['commit_metadata']['commit_id']}" filepath = json_path["commit_provenance"]["blob_path"] line_num = match["location"]["source_span"]["start"]["line"] - description = f"Secret found of type: {rule_name} \n" \ - f"SECRET starts with: '{secret[:3]}' \n" \ - f"Committer Name: {json_path['commit_provenance']['commit_metadata']['committer_name']} \n" \ - f"Committer Email: {json_path['commit_provenance']['commit_metadata']['committer_email']} \n" \ - f"Commit ID: {json_path['commit_provenance']['commit_metadata']['commit_id']} \n" \ - f"Location: {filepath} line #{line_num} \n" \ - f"Line #{line_num} \n" - + description = ( + f"Secret found of type: {rule_name} \n" + f"SECRET starts with: '{secret[:3]}' \n" + f"Committer Name: {json_path['commit_provenance']['commit_metadata']['committer_name']} \n" + f"Committer Email: {json_path['commit_provenance']['commit_metadata']['committer_email']} \n" + f"Commit ID: {json_path['commit_provenance']['commit_metadata']['commit_id']} \n" + f"Location: {filepath} line #{line_num} \n" + f"Line #{line_num} \n" + ) # Internal de-duplication key = hashlib.md5((filepath + "|" + secret + "|" + str(line_num)).encode("utf-8")).hexdigest() @@ -112,22 +115,25 @@ def version_0_22_0(self, line, test): if json_path.get("first_commit"): title = f"Secret(s) Found in Repository with Commit ID {json_path['first_commit']['commit_metadata']['commit_id']}" filepath = json_path["first_commit"]["blob_path"] - description = f"Secret found of type: {rule_name} \n" \ - f"SECRET starts with: '{rule_text_id[:3]}' \n" \ - f"Committer Name: {json_path['first_commit']['commit_metadata']['committer_name']} \n" \ - f"Committer Email: {json_path['first_commit']['commit_metadata']['committer_email']} \n" \ - f"Commit ID: {json_path['first_commit']['commit_metadata']['commit_id']} \n" \ - f"Location: {filepath} line #{line_num} \n" \ - f"Line #{line_num} \n" + description = ( + f"Secret found of type: {rule_name} \n" + f"SECRET starts with: '{rule_text_id[:3]}' \n" + f"Committer Name: {json_path['first_commit']['commit_metadata']['committer_name']} \n" + f"Committer Email: {json_path['first_commit']['commit_metadata']['committer_email']} \n" + f"Commit ID: {json_path['first_commit']['commit_metadata']['commit_id']} \n" + f"Location: {filepath} line #{line_num} \n" + f"Line #{line_num} \n" + ) # scanned wihout git history else: title = "Secret(s) Found in Repository" filepath = json_path["path"] - description = f"Secret found of type: {rule_name} \n" \ - f"SECRET starts with: '{rule_text_id[:3]}' \n" \ - f"Location: {filepath} line #{line_num} \n" \ - f"Line #{line_num} \n" - + description = ( + f"Secret found of type: {rule_name} \n" + f"SECRET starts with: '{rule_text_id[:3]}' \n" + f"Location: {filepath} line #{line_num} \n" + f"Line #{line_num} \n" + ) # Internal de-duplication key = hashlib.md5((filepath + "|" + rule_text_id + "|" + str(line_num)).encode("utf-8")).hexdigest() diff --git a/ruff.toml b/ruff.toml index 4eed27e06f..a1e29c87d7 100644 --- a/ruff.toml +++ b/ruff.toml @@ -52,7 +52,7 @@ select = [ "EM", "EXE", "FA", - "ISC001", + "ISC001", "ISC002", "ICN", "LOG", "G001", "G002", "G01", "G1", "G2", From e426aed347179e7f1b90454fa5972e0c58fc2ad2 Mon Sep 17 00:00:00 2001 From: kiblik <5609770+kiblik@users.noreply.github.com> Date: Thu, 6 Feb 2025 18:30:22 +0100 Subject: [PATCH 22/23] Ruff: Add PLC18 and fix PLC1802 (#11707) --- dojo/models.py | 2 +- ruff.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dojo/models.py b/dojo/models.py index 4901df7fde..e92f9da3f7 100644 --- a/dojo/models.py +++ b/dojo/models.py @@ -1033,7 +1033,7 @@ def save(self, *args, **kwargs): if (initial_sla_config.low != self.low) or (initial_sla_config.enforce_low != self.enforce_low): severities.append("Low") # if severities have changed, update finding sla expiration dates with those severities - if len(severities): + if severities: # set the async updating flag to true for this sla config self.async_updating = True super().save(*args, **kwargs) diff --git a/ruff.toml b/ruff.toml index a1e29c87d7..ef176d27a7 100644 --- a/ruff.toml +++ b/ruff.toml @@ -73,7 +73,7 @@ select = [ "FIX001", "FIX003", "PD", "PGH", - "PLC01", "PLC0205", "PLC0208", "PLC0414", "PLC24", "PLC3", + "PLC01", "PLC0205", "PLC0208", "PLC0414", "PLC18", "PLC24", "PLC3", "PLE", "PLR01", "PLR0203", "PLR0206", "PLR0915", "PLR1716", "PLR172", "PLR1733", "PLR1736", "PLW0120", "PLW0127", "PLW0129", "PLW013", "PLW017", "PLW02", "PLW04", "PLW07", "PLW1", "PLW2", "PLW3", From 60816ab2d5519e173afbb8e42c60ed0a12b0e6ed Mon Sep 17 00:00:00 2001 From: valentijnscholten Date: Thu, 6 Feb 2025 19:08:29 +0100 Subject: [PATCH 23/23] Support builds for different architectures including arm64 (#11673) * NodeJS: Correct Debian bullseye reference * support multiarch builds via index digest --------- Co-authored-by: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> --- Dockerfile.django-alpine | 2 +- Dockerfile.django-debian | 2 +- Dockerfile.integration-tests-debian | 1 + Dockerfile.nginx-alpine | 97 +++-------------------------- Dockerfile.nginx-debian | 2 +- 5 files changed, 11 insertions(+), 93 deletions(-) diff --git a/Dockerfile.django-alpine b/Dockerfile.django-alpine index cb1d832998..def1a6f01b 100644 --- a/Dockerfile.django-alpine +++ b/Dockerfile.django-alpine @@ -5,7 +5,7 @@ # Dockerfile.nginx to use the caching mechanism of Docker. # Ref: https://devguide.python.org/#branchstatus -FROM python:3.11.9-alpine3.20@sha256:df44c0c0761ddbd6388f4549cab42d24d64d257c2a960ad5b276bb7dab9639c7 AS base +FROM python:3.11.9-alpine3.20@sha256:f9ce6fe33d9a5499e35c976df16d24ae80f6ef0a28be5433140236c2ca482686 AS base FROM base AS build WORKDIR /app RUN \ diff --git a/Dockerfile.django-debian b/Dockerfile.django-debian index 663a75e884..2f9e62c1ac 100644 --- a/Dockerfile.django-debian +++ b/Dockerfile.django-debian @@ -5,7 +5,7 @@ # Dockerfile.nginx to use the caching mechanism of Docker. # Ref: https://devguide.python.org/#branchstatus -FROM python:3.11.9-slim-bookworm@sha256:8c1036ec919826052306dfb5286e4753ffd9d5f6c24fbc352a5399c3b405b57e AS base +FROM python:3.11.9-slim-bookworm@sha256:6ed5bff4d7d377e2a27d9285553b8c21cfccc4f00881de1b24c9bc8d90016e82 AS base FROM base AS build WORKDIR /app RUN \ diff --git a/Dockerfile.integration-tests-debian b/Dockerfile.integration-tests-debian index 624b9ef790..357f1e9c96 100644 --- a/Dockerfile.integration-tests-debian +++ b/Dockerfile.integration-tests-debian @@ -2,6 +2,7 @@ # code: language=Dockerfile FROM openapitools/openapi-generator-cli:v7.11.0@sha256:a9e7091ac8808c6835cf8ec88252bca603f1f889ef1456b63d8add5781feeca7 AS openapitools +# currently only supports x64, no arm yet due to chrome and selenium dependencies FROM python:3.11.9-slim-bookworm@sha256:8c1036ec919826052306dfb5286e4753ffd9d5f6c24fbc352a5399c3b405b57e AS build WORKDIR /app RUN \ diff --git a/Dockerfile.nginx-alpine b/Dockerfile.nginx-alpine index e1b839e48f..38cf2fcb44 100644 --- a/Dockerfile.nginx-alpine +++ b/Dockerfile.nginx-alpine @@ -5,7 +5,10 @@ # Dockerfile.django-alpine to use the caching mechanism of Docker. # Ref: https://devguide.python.org/#branchstatus -FROM python:3.11.9-alpine3.20@sha256:df44c0c0761ddbd6388f4549cab42d24d64d257c2a960ad5b276bb7dab9639c7 AS base +FROM node:20.18.2-alpine3.20@sha256:40cbd847a2db119ba716f2861b04076bcb575a0051923459271a18ff80944f7f AS node + +FROM python:3.11.9-alpine3.20@sha256:f9ce6fe33d9a5499e35c976df16d24ae80f6ef0a28be5433140236c2ca482686 AS base + FROM base AS build WORKDIR /app RUN \ @@ -31,97 +34,11 @@ COPY requirements.txt ./ # https://github.com/unbit/uwsgi/issues/1318#issuecomment-542238096 RUN CPUCOUNT=1 pip3 wheel --wheel-dir=/tmp/wheels -r ./requirements.txt -FROM build AS collectstatic - -# Node installation from https://github.com/nodejs/docker-node -ENV NODE_VERSION=20.11.0 - -RUN addgroup -g 1000 node \ - && adduser -u 1000 -G node -s /bin/sh -D node \ - && apk add --no-cache \ - libstdc++ \ - && apk add --no-cache --virtual .build-deps \ - curl \ - && ARCH= && alpineArch="$(apk --print-arch)" \ - && case "${alpineArch##*-}" in \ - x86_64) \ - ARCH='x64' \ - CHECKSUM=$(curl -sSL --compressed "https://unofficial-builds.nodejs.org/download/release/v${NODE_VERSION}/SHASUMS256.txt" | grep "node-v${NODE_VERSION}-linux-x64-musl.tar.xz" | cut -d' ' -f1) \ - ;; \ - *) ;; \ - esac \ - && if [ -n "${CHECKSUM}" ]; then \ - set -eu; \ - curl -fsSLO --compressed "https://unofficial-builds.nodejs.org/download/release/v$NODE_VERSION/node-v$NODE_VERSION-linux-$ARCH-musl.tar.xz"; \ - echo "$CHECKSUM node-v$NODE_VERSION-linux-$ARCH-musl.tar.xz" | sha256sum -c - \ - && tar -xJf "node-v$NODE_VERSION-linux-$ARCH-musl.tar.xz" -C /usr/local --strip-components=1 --no-same-owner \ - && ln -s /usr/local/bin/node /usr/local/bin/nodejs; \ - else \ - echo "Building from source" \ - # backup build - && apk add --no-cache --virtual .build-deps-full \ - binutils-gold \ - g++ \ - gcc \ - gnupg \ - libgcc \ - linux-headers \ - make \ - python3 \ - # gpg keys listed at https://github.com/nodejs/node#release-keys - && for key in \ - C0D6248439F1D5604AAFFB4021D900FFDB233756 \ - DD792F5973C6DE52C432CBDAC77ABFA00DDBF2B7 \ - CC68F5A3106FF448322E48ED27F5E38D5B0A215F \ - 8FCCA13FEF1D0C2E91008E09770F7A9A5AE15600 \ - 890C08DB8579162FEE0DF9DB8BEAB4DFCF555EF4 \ - C82FA3AE1CBEDC6BE46B9360C43CEC45C17AB93C \ - 108F52B48DB57BB0CC439B2997B01419BD92F80A \ - A363A499291CBBC940DD62E41F10027AF002F8B0 \ - ; do \ - gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$key" || \ - gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \ - done \ - && curl -fsSLO --compressed "https://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION.tar.xz" \ - && curl -fsSLO --compressed "https://nodejs.org/dist/v$NODE_VERSION/SHASUMS256.txt.asc" \ - && gpg --batch --decrypt --output SHASUMS256.txt SHASUMS256.txt.asc \ - && grep " node-v$NODE_VERSION.tar.xz\$" SHASUMS256.txt | sha256sum -c - \ - && tar -xf "node-v$NODE_VERSION.tar.xz" \ - && cd "node-v$NODE_VERSION" \ - && ./configure \ - && make -j$(getconf _NPROCESSORS_ONLN) V= \ - && make install \ - && apk del .build-deps-full \ - && cd .. \ - && rm -Rf "node-v$NODE_VERSION" \ - && rm "node-v$NODE_VERSION.tar.xz" SHASUMS256.txt.asc SHASUMS256.txt; \ - fi \ - && rm -f "node-v$NODE_VERSION-linux-$ARCH-musl.tar.xz" \ - && apk del .build-deps \ - # smoke tests - && node --version \ - && npm --version -ENV YARN_VERSION=1.22.19 +FROM build AS collectstatic +RUN apk add nodejs npm +RUN npm install -g yarn --force -RUN apk add --no-cache --virtual .build-deps-yarn curl gnupg tar \ - && for key in \ - 6A010C5166006599AA17F08146C2130DFD2497F5 \ - ; do \ - gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$key" || \ - gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \ - done \ - && curl -fsSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \ - && curl -fsSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \ - && gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ - && mkdir -p /opt \ - && tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/ \ - && ln -s /opt/yarn-v$YARN_VERSION/bin/yarn /usr/local/bin/yarn \ - && ln -s /opt/yarn-v$YARN_VERSION/bin/yarnpkg /usr/local/bin/yarnpkg \ - && rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ - && apk del .build-deps-yarn \ - # smoke test - && yarn --version # installing DefectDojo packages RUN pip3 install \ diff --git a/Dockerfile.nginx-debian b/Dockerfile.nginx-debian index 7297695b57..806d45e572 100644 --- a/Dockerfile.nginx-debian +++ b/Dockerfile.nginx-debian @@ -5,7 +5,7 @@ # Dockerfile.django-debian to use the caching mechanism of Docker. # Ref: https://devguide.python.org/#branchstatus -FROM python:3.11.9-slim-bookworm@sha256:8c1036ec919826052306dfb5286e4753ffd9d5f6c24fbc352a5399c3b405b57e AS base +FROM python:3.11.9-slim-bookworm@sha256:6ed5bff4d7d377e2a27d9285553b8c21cfccc4f00881de1b24c9bc8d90016e82 AS base FROM base AS build WORKDIR /app RUN \